Kubeintel Logo

Kubeintel

  • Search
  • Pods
  • Deployments
  • Statefulsets
  • jobJobs
  • Daemonsets
  • Namespaces
  • Nodes
  • Services
  • Configmaps
  1. Home
  2. /
  3. namespaces
  4. /
  5. kube-system
  6. /
  7. pods
  8. /
  9. cilium-g7zrz
  10. /
  11. logs
Summary
Metadata
Containers
Spec
Status
All
Events
Logs
Investigator
Pod Details

Name: cilium-g7zrz

Namespace: kube-system

Status: Running

IP: 10.108.0.2

Node: system-0-655pn

Ready: 1/1

Kubectl Commands
  • View
  • Delete
  • Describe
  • Debug
Containers
Name
Image
Ready
Restarts
...
cilium-agentghcr.io/digitalocean-packages/cilium:v1....Ready-
  • 1
Init Containers
Name
Image
Ready
Restarts
...
delay-cilium-for-ccmghcr.io/digitalocean-packages/cilium:v1....Completed-
configghcr.io/digitalocean-packages/cilium:v1....Completed-
mount-cgroupghcr.io/digitalocean-packages/cilium:v1....Completed-
apply-sysctl-overwritesghcr.io/digitalocean-packages/cilium:v1....Completed-
mount-bpf-fsghcr.io/digitalocean-packages/cilium:v1....Completed-
  • 1
  • 2
Metadata

Creation Time: 2025-04-17T22:04:46Z

Labels:

  • app.kubernetes.io/name: cilium-agent...
  • app.kubernetes.io/part-of: cilium...
  • controller-revision-hash: 79f45cdb77...
  • doks.digitalocean.com/managed: true...
  • k8s-app: cilium
  • kubernetes.io/cluster-service: true...
  • pod-template-generation: 6...

Annotation:

  • clusterlint.digitalocean.com/disabled-checks: privileged-container...
  • container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined...
  • container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined...
  • container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined...
  • container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined...
  • kubectl.kubernetes.io/default-container: cilium-agent...
  • prometheus.io/port: 9090...
  • prometheus.io/scrape: true...
name: cilium-g7zrz
generateName: cilium-
namespace: kube-system
uid: b737ef68-c16e-4507-ac42-c65a5eab8d0a
resourceVersion: '92140142'
creationTimestamp: '2025-04-17T22:04:46Z'
labels:
app.kubernetes.io/name: cilium-agent
app.kubernetes.io/part-of: cilium
controller-revision-hash: 79f45cdb77
doks.digitalocean.com/managed: 'true'
k8s-app: cilium
kubernetes.io/cluster-service: 'true'
pod-template-generation: '6'
annotations:
clusterlint.digitalocean.com/disabled-checks: privileged-containers,non-root-user,resource-requirements,hostpath-volume
container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined
container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined
container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined
container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined
kubectl.kubernetes.io/default-container: cilium-agent
prometheus.io/port: '9090'
prometheus.io/scrape: 'true'
ownerReferences:
- apiVersion: apps/v1
kind: DaemonSet
name: cilium
uid: f644a837-ae29-48a0-89c7-2d886e50903e
controller: true
blockOwnerDeletion: true
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
volumes:
- name: host-kubectl
hostPath:
path: /usr/bin/kubectl
type: File
- name: tmp
emptyDir: {}
- name: cilium-run
hostPath:
path: /var/run/cilium
type: DirectoryOrCreate
- name: bpf-maps
hostPath:
path: /sys/fs/bpf
type: DirectoryOrCreate
- name: hostproc
hostPath:
path: /proc
type: Directory
- name: cilium-cgroup
hostPath:
path: /run/cilium/cgroupv2
type: DirectoryOrCreate
- name: cni-path
hostPath:
path: /opt/cni/bin
type: DirectoryOrCreate
- name: etc-cni-netd
hostPath:
path: /etc/cni/net.d
type: DirectoryOrCreate
- name: lib-modules
hostPath:
path: /lib/modules
type: ''
- name: xtables-lock
hostPath:
path: /run/xtables.lock
type: FileOrCreate
- name: clustermesh-secrets
projected:
sources:
- secret:
name: cilium-clustermesh
optional: true
- secret:
name: clustermesh-apiserver-remote-cert
items:
- key: tls.key
path: common-etcd-client.key
- key: tls.crt
path: common-etcd-client.crt
- key: ca.crt
path: common-etcd-client-ca.crt
optional: true
defaultMode: 256
- name: host-proc-sys-net
hostPath:
path: /proc/sys/net
type: Directory
- name: host-proc-sys-kernel
hostPath:
path: /proc/sys/kernel
type: Directory
- name: hubble-tls
projected:
sources:
- secret:
name: hubble-server-certs
items:
- key: tls.crt
path: server.crt
- key: tls.key
path: server.key
- key: ca.crt
path: client-ca.crt
optional: true
defaultMode: 256
- name: kube-api-access-t7zzb
projected:
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
name: kube-root-ca.crt
items:
- key: ca.crt
path: ca.crt
- downwardAPI:
items:
- path: namespace
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
defaultMode: 420
initContainers:
- name: delay-cilium-for-ccm
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- bash
- '-e'
- '-c'
- >
# This will get the node object for the local node and search through
# the assigned addresses in the object in order to check whether CCM
# already set the internal AND external IP since cilium needs both
# for a clean startup.
# The grep matches regardless of the order of IPs.
until /host/usr/bin/kubectl get node ${HOSTNAME} -o
jsonpath="{.status.addresses[*].type}" | grep -E
"InternalIP.*ExternalIP|ExternalIP.*InternalIP"; do echo "waiting for
CCM to store internal and external IP addresses in node object:
${HOSTNAME}" && sleep 3; done;
env:
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 100m
memory: 100Mi
volumeMounts:
- name: host-kubectl
mountPath: /host/usr/bin/kubectl
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
imagePullPolicy: IfNotPresent
- name: config
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium
- build-config
- '--source=config-map:cilium-config'
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: tmp
mountPath: /tmp
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
- name: mount-cgroup
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- >
cp /usr/bin/cilium-mount /hostbin/cilium-mount;
nsenter --cgroup=/hostproc/1/ns/cgroup --mount=/hostproc/1/ns/mnt
"${BIN_PATH}/cilium-mount" $CGROUP_ROOT;
rm /hostbin/cilium-mount
env:
- name: CGROUP_ROOT
value: /run/cilium/cgroupv2
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: apply-sysctl-overwrites
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- |
cp /usr/bin/cilium-sysctlfix /hostbin/cilium-sysctlfix;
nsenter --mount=/hostproc/1/ns/mnt "${BIN_PATH}/cilium-sysctlfix";
rm /hostbin/cilium-sysctlfix
env:
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: mount-bpf-fs
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /bin/bash
- '-c'
- '--'
args:
- mount | grep "/sys/fs/bpf type bpf" || mount -t bpf bpf /sys/fs/bpf
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: Bidirectional
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
privileged: true
- name: clean-cilium-state
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /init-container.sh
env:
- name: CILIUM_ALL_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-state
optional: true
- name: CILIUM_BPF_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-bpf-state
optional: true
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
- name: cilium-cgroup
mountPath: /run/cilium/cgroupv2
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- NET_ADMIN
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: install-cni-binaries
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /install-plugin.sh
resources:
requests:
cpu: 100m
memory: 10Mi
volumeMounts:
- name: cni-path
mountPath: /host/opt/cni/bin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
containers:
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: cilium
serviceAccount: cilium
automountServiceAccountToken: true
nodeName: system-0-655pn
hostNetwork: true
securityContext: {}
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchFields:
- key: metadata.name
operator: In
values:
- system-0-655pn
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- operator: Exists
- key: node.kubernetes.io/not-ready
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/unreachable
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/disk-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/memory-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/pid-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/unschedulable
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/network-unavailable
operator: Exists
effect: NoSchedule
priorityClassName: system-node-critical
priority: 2000001000
enableServiceLinks: true
preemptionPolicy: PreemptLowerPriority
phase: Running
conditions:
- type: PodReadyToStartContainers
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:06Z'
- type: Initialized
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:13Z'
- type: Ready
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:18Z'
- type: ContainersReady
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:18Z'
- type: PodScheduled
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:46Z'
hostIP: 10.108.0.2
podIP: 10.108.0.2
podIPs:
- ip: 10.108.0.2
startTime: '2025-04-17T22:04:47Z'
initContainerStatuses:
- name: delay-cilium-for-ccm
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:05Z'
finishedAt: '2025-04-17T22:05:05Z'
containerID: >-
containerd://5cad42abc2d5f864fde4735e377461c0630af08b7a56c2e7e91c8de7681105a4
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://5cad42abc2d5f864fde4735e377461c0630af08b7a56c2e7e91c8de7681105a4
started: false
- name: config
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:07Z'
finishedAt: '2025-04-17T22:05:07Z'
containerID: >-
containerd://6956cf408e4724970ee6a52486ed925caa1e779dca23dbb997446f0558de2fe9
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://6956cf408e4724970ee6a52486ed925caa1e779dca23dbb997446f0558de2fe9
started: false
- name: mount-cgroup
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:08Z'
finishedAt: '2025-04-17T22:05:08Z'
containerID: >-
containerd://5d78c27fa86987f0a3fa51a536be849d057c2ddeff25c5382b06498fb0b4b05c
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://5d78c27fa86987f0a3fa51a536be849d057c2ddeff25c5382b06498fb0b4b05c
started: false
- name: apply-sysctl-overwrites
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:09Z'
finishedAt: '2025-04-17T22:05:09Z'
containerID: >-
containerd://50258722f5c9aaaeb030257bdf6d61fce449308930782a15657e3d9dbf420e98
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://50258722f5c9aaaeb030257bdf6d61fce449308930782a15657e3d9dbf420e98
started: false
- name: mount-bpf-fs
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:10Z'
finishedAt: '2025-04-17T22:05:10Z'
containerID: >-
containerd://2fc78efb0180960b0ea71c0a19d39f5b8f3b29a3087b4a32f5ad9ae3039ad418
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://2fc78efb0180960b0ea71c0a19d39f5b8f3b29a3087b4a32f5ad9ae3039ad418
started: false
- name: clean-cilium-state
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:11Z'
finishedAt: '2025-04-17T22:05:11Z'
containerID: >-
containerd://3acd0b1f35a64d1fd8c17f350c1165c7b1a908667734aa0bd2f254cc04525481
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://3acd0b1f35a64d1fd8c17f350c1165c7b1a908667734aa0bd2f254cc04525481
started: false
- name: install-cni-binaries
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:12Z'
finishedAt: '2025-04-17T22:05:12Z'
containerID: >-
containerd://126ad01811023bd6393d2541419d645821f6050eb31df73c2ee348d9e5b79291
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://126ad01811023bd6393d2541419d645821f6050eb31df73c2ee348d9e5b79291
started: false
containerStatuses:
- name: cilium-agent
state:
running:
startedAt: '2025-04-17T22:05:13Z'
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://ead05b0607a380bcd9968c83eba8979fc46495fc068594ef988d2c253f1cf132
started: true
qosClass: Burstable
metadata:
name: cilium-g7zrz
generateName: cilium-
namespace: kube-system
uid: b737ef68-c16e-4507-ac42-c65a5eab8d0a
resourceVersion: '92140142'
creationTimestamp: '2025-04-17T22:04:46Z'
labels:
app.kubernetes.io/name: cilium-agent
app.kubernetes.io/part-of: cilium
controller-revision-hash: 79f45cdb77
doks.digitalocean.com/managed: 'true'
k8s-app: cilium
kubernetes.io/cluster-service: 'true'
pod-template-generation: '6'
annotations:
clusterlint.digitalocean.com/disabled-checks: privileged-containers,non-root-user,resource-requirements,hostpath-volume
container.apparmor.security.beta.kubernetes.io/apply-sysctl-overwrites: unconfined
container.apparmor.security.beta.kubernetes.io/cilium-agent: unconfined
container.apparmor.security.beta.kubernetes.io/clean-cilium-state: unconfined
container.apparmor.security.beta.kubernetes.io/mount-cgroup: unconfined
kubectl.kubernetes.io/default-container: cilium-agent
prometheus.io/port: '9090'
prometheus.io/scrape: 'true'
ownerReferences:
- apiVersion: apps/v1
kind: DaemonSet
name: cilium
uid: f644a837-ae29-48a0-89c7-2d886e50903e
controller: true
blockOwnerDeletion: true
spec:
volumes:
- name: host-kubectl
hostPath:
path: /usr/bin/kubectl
type: File
- name: tmp
emptyDir: {}
- name: cilium-run
hostPath:
path: /var/run/cilium
type: DirectoryOrCreate
- name: bpf-maps
hostPath:
path: /sys/fs/bpf
type: DirectoryOrCreate
- name: hostproc
hostPath:
path: /proc
type: Directory
- name: cilium-cgroup
hostPath:
path: /run/cilium/cgroupv2
type: DirectoryOrCreate
- name: cni-path
hostPath:
path: /opt/cni/bin
type: DirectoryOrCreate
- name: etc-cni-netd
hostPath:
path: /etc/cni/net.d
type: DirectoryOrCreate
- name: lib-modules
hostPath:
path: /lib/modules
type: ''
- name: xtables-lock
hostPath:
path: /run/xtables.lock
type: FileOrCreate
- name: clustermesh-secrets
projected:
sources:
- secret:
name: cilium-clustermesh
optional: true
- secret:
name: clustermesh-apiserver-remote-cert
items:
- key: tls.key
path: common-etcd-client.key
- key: tls.crt
path: common-etcd-client.crt
- key: ca.crt
path: common-etcd-client-ca.crt
optional: true
defaultMode: 256
- name: host-proc-sys-net
hostPath:
path: /proc/sys/net
type: Directory
- name: host-proc-sys-kernel
hostPath:
path: /proc/sys/kernel
type: Directory
- name: hubble-tls
projected:
sources:
- secret:
name: hubble-server-certs
items:
- key: tls.crt
path: server.crt
- key: tls.key
path: server.key
- key: ca.crt
path: client-ca.crt
optional: true
defaultMode: 256
- name: kube-api-access-t7zzb
projected:
sources:
- serviceAccountToken:
expirationSeconds: 3607
path: token
- configMap:
name: kube-root-ca.crt
items:
- key: ca.crt
path: ca.crt
- downwardAPI:
items:
- path: namespace
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
defaultMode: 420
initContainers:
- name: delay-cilium-for-ccm
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- bash
- '-e'
- '-c'
- >
# This will get the node object for the local node and search through
# the assigned addresses in the object in order to check whether CCM
# already set the internal AND external IP since cilium needs both
# for a clean startup.
# The grep matches regardless of the order of IPs.
until /host/usr/bin/kubectl get node ${HOSTNAME} -o
jsonpath="{.status.addresses[*].type}" | grep -E
"InternalIP.*ExternalIP|ExternalIP.*InternalIP"; do echo "waiting for
CCM to store internal and external IP addresses in node object:
${HOSTNAME}" && sleep 3; done;
env:
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 100m
memory: 100Mi
volumeMounts:
- name: host-kubectl
mountPath: /host/usr/bin/kubectl
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: File
imagePullPolicy: IfNotPresent
- name: config
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium
- build-config
- '--source=config-map:cilium-config'
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: tmp
mountPath: /tmp
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
- name: mount-cgroup
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- >
cp /usr/bin/cilium-mount /hostbin/cilium-mount;
nsenter --cgroup=/hostproc/1/ns/cgroup --mount=/hostproc/1/ns/mnt
"${BIN_PATH}/cilium-mount" $CGROUP_ROOT;
rm /hostbin/cilium-mount
env:
- name: CGROUP_ROOT
value: /run/cilium/cgroupv2
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: apply-sysctl-overwrites
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- sh
- '-ec'
- |
cp /usr/bin/cilium-sysctlfix /hostbin/cilium-sysctlfix;
nsenter --mount=/hostproc/1/ns/mnt "${BIN_PATH}/cilium-sysctlfix";
rm /hostbin/cilium-sysctlfix
env:
- name: BIN_PATH
value: /opt/cni/bin
resources: {}
volumeMounts:
- name: hostproc
mountPath: /hostproc
- name: cni-path
mountPath: /hostbin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- SYS_ADMIN
- SYS_CHROOT
- SYS_PTRACE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: mount-bpf-fs
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /bin/bash
- '-c'
- '--'
args:
- mount | grep "/sys/fs/bpf type bpf" || mount -t bpf bpf /sys/fs/bpf
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: Bidirectional
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
privileged: true
- name: clean-cilium-state
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /init-container.sh
env:
- name: CILIUM_ALL_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-state
optional: true
- name: CILIUM_BPF_STATE
valueFrom:
configMapKeyRef:
name: cilium-config
key: clean-cilium-bpf-state
optional: true
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources: {}
volumeMounts:
- name: bpf-maps
mountPath: /sys/fs/bpf
- name: cilium-cgroup
mountPath: /run/cilium/cgroupv2
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- NET_ADMIN
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
- name: install-cni-binaries
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- /install-plugin.sh
resources:
requests:
cpu: 100m
memory: 10Mi
volumeMounts:
- name: cni-path
mountPath: /host/opt/cni/bin
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
containers:
- name: cilium-agent
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
command:
- cilium-agent
args:
- '--config-dir=/tmp/cilium/config-map'
- >-
--k8s-api-server=https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- '--ipv4-native-routing-cidr=10.244.0.0/16'
ports:
- name: peer-service
hostPort: 4244
containerPort: 4244
protocol: TCP
- name: prometheus
hostPort: 9090
containerPort: 9090
protocol: TCP
env:
- name: K8S_NODE_NAME
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: spec.nodeName
- name: CILIUM_K8S_NAMESPACE
valueFrom:
fieldRef:
apiVersion: v1
fieldPath: metadata.namespace
- name: CILIUM_CLUSTERMESH_CONFIG
value: /var/lib/cilium/clustermesh/
- name: KUBERNETES_SERVICE_HOST
value: f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com
- name: KUBERNETES_SERVICE_PORT
value: '443'
resources:
requests:
cpu: 300m
memory: 300Mi
volumeMounts:
- name: host-proc-sys-net
mountPath: /host/proc/sys/net
- name: host-proc-sys-kernel
mountPath: /host/proc/sys/kernel
- name: bpf-maps
mountPath: /sys/fs/bpf
mountPropagation: HostToContainer
- name: cilium-run
mountPath: /var/run/cilium
- name: etc-cni-netd
mountPath: /host/etc/cni/net.d
- name: clustermesh-secrets
readOnly: true
mountPath: /var/lib/cilium/clustermesh
- name: lib-modules
readOnly: true
mountPath: /lib/modules
- name: xtables-lock
mountPath: /run/xtables.lock
- name: hubble-tls
readOnly: true
mountPath: /var/lib/cilium/tls/hubble
- name: tmp
mountPath: /tmp
- name: kube-api-access-t7zzb
readOnly: true
mountPath: /var/run/secrets/kubernetes.io/serviceaccount
livenessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
initialDelaySeconds: 120
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 10
readinessProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 5
periodSeconds: 30
successThreshold: 1
failureThreshold: 3
startupProbe:
httpGet:
path: /healthz
port: 9879
host: 127.0.0.1
scheme: HTTP
httpHeaders:
- name: brief
value: 'true'
timeoutSeconds: 1
periodSeconds: 2
successThreshold: 1
failureThreshold: 105
lifecycle:
postStart:
exec:
command:
- bash
- '-c'
- >
set -o errexit
set -o pipefail
set -o nounset
# When running in AWS ENI mode, it's likely that 'aws-node' has
# had a chance to install SNAT iptables rules. These can result
# in dropped traffic, so we should attempt to remove them.
# We do it using a 'postStart' hook since this may need to run
# for nodes which might have already been init'ed but may still
# have dangling rules. This is safe because there are no
# dependencies on anything that is part of the startup script
# itself, and can be safely run multiple times per node (e.g. in
# case of a restart).
if [[ "$(iptables-save | grep -E -c
'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN')" != "0" ]];
then
echo 'Deleting iptables rules created by the AWS CNI VPC plugin'
iptables-save | grep -E -v 'AWS-SNAT-CHAIN|AWS-CONNMARK-CHAIN' | iptables-restore
fi
echo 'Done!'
preStop:
exec:
command:
- /cni-uninstall.sh
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
add:
- CHOWN
- KILL
- NET_ADMIN
- NET_RAW
- IPC_LOCK
- SYS_MODULE
- SYS_ADMIN
- SYS_RESOURCE
- DAC_OVERRIDE
- FOWNER
- SETGID
- SETUID
drop:
- ALL
seLinuxOptions:
type: spc_t
level: s0
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: cilium
serviceAccount: cilium
automountServiceAccountToken: true
nodeName: system-0-655pn
hostNetwork: true
securityContext: {}
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchFields:
- key: metadata.name
operator: In
values:
- system-0-655pn
podAntiAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- operator: Exists
- key: node.kubernetes.io/not-ready
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/unreachable
operator: Exists
effect: NoExecute
- key: node.kubernetes.io/disk-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/memory-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/pid-pressure
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/unschedulable
operator: Exists
effect: NoSchedule
- key: node.kubernetes.io/network-unavailable
operator: Exists
effect: NoSchedule
priorityClassName: system-node-critical
priority: 2000001000
enableServiceLinks: true
preemptionPolicy: PreemptLowerPriority
status:
phase: Running
conditions:
- type: PodReadyToStartContainers
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:06Z'
- type: Initialized
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:13Z'
- type: Ready
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:18Z'
- type: ContainersReady
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:05:18Z'
- type: PodScheduled
status: 'True'
lastProbeTime: null
lastTransitionTime: '2025-04-17T22:04:46Z'
hostIP: 10.108.0.2
podIP: 10.108.0.2
podIPs:
- ip: 10.108.0.2
startTime: '2025-04-17T22:04:47Z'
initContainerStatuses:
- name: delay-cilium-for-ccm
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:05Z'
finishedAt: '2025-04-17T22:05:05Z'
containerID: >-
containerd://5cad42abc2d5f864fde4735e377461c0630af08b7a56c2e7e91c8de7681105a4
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://5cad42abc2d5f864fde4735e377461c0630af08b7a56c2e7e91c8de7681105a4
started: false
- name: config
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:07Z'
finishedAt: '2025-04-17T22:05:07Z'
containerID: >-
containerd://6956cf408e4724970ee6a52486ed925caa1e779dca23dbb997446f0558de2fe9
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://6956cf408e4724970ee6a52486ed925caa1e779dca23dbb997446f0558de2fe9
started: false
- name: mount-cgroup
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:08Z'
finishedAt: '2025-04-17T22:05:08Z'
containerID: >-
containerd://5d78c27fa86987f0a3fa51a536be849d057c2ddeff25c5382b06498fb0b4b05c
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://5d78c27fa86987f0a3fa51a536be849d057c2ddeff25c5382b06498fb0b4b05c
started: false
- name: apply-sysctl-overwrites
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:09Z'
finishedAt: '2025-04-17T22:05:09Z'
containerID: >-
containerd://50258722f5c9aaaeb030257bdf6d61fce449308930782a15657e3d9dbf420e98
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://50258722f5c9aaaeb030257bdf6d61fce449308930782a15657e3d9dbf420e98
started: false
- name: mount-bpf-fs
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:10Z'
finishedAt: '2025-04-17T22:05:10Z'
containerID: >-
containerd://2fc78efb0180960b0ea71c0a19d39f5b8f3b29a3087b4a32f5ad9ae3039ad418
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://2fc78efb0180960b0ea71c0a19d39f5b8f3b29a3087b4a32f5ad9ae3039ad418
started: false
- name: clean-cilium-state
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:11Z'
finishedAt: '2025-04-17T22:05:11Z'
containerID: >-
containerd://3acd0b1f35a64d1fd8c17f350c1165c7b1a908667734aa0bd2f254cc04525481
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://3acd0b1f35a64d1fd8c17f350c1165c7b1a908667734aa0bd2f254cc04525481
started: false
- name: install-cni-binaries
state:
terminated:
exitCode: 0
reason: Completed
startedAt: '2025-04-17T22:05:12Z'
finishedAt: '2025-04-17T22:05:12Z'
containerID: >-
containerd://126ad01811023bd6393d2541419d645821f6050eb31df73c2ee348d9e5b79291
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://126ad01811023bd6393d2541419d645821f6050eb31df73c2ee348d9e5b79291
started: false
containerStatuses:
- name: cilium-agent
state:
running:
startedAt: '2025-04-17T22:05:13Z'
lastState: {}
ready: true
restartCount: 0
image: ghcr.io/digitalocean-packages/cilium:v1.14.18-conformance-fix
imageID: >-
ghcr.io/digitalocean-packages/cilium@sha256:2466e77785d14d01810bd8d9907893fbd5163460b966912ff1972219fb2a21a2
containerID: >-
containerd://ead05b0607a380bcd9968c83eba8979fc46495fc068594ef988d2c253f1cf132
started: true
qosClass: Burstable
time="2025-04-17T22:05:13Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:05:13Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:05:13Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:05:13Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.035509ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="321.276µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.139622ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.681µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:05:13Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=117.484699ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="42.14µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="224.508µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.17µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="83.243µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="208.071µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="200.473µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="202.202µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="916.22µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="188.629µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:05:13Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=23.698859ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=4.182395ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="41.206µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="207.231µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="293.972µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="208.526µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="189.175µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=31.520894ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="70.695µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.122µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.676µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.89.0.0/16
time="2025-04-17T22:05:14Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=107.278973ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.44µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=100.556663ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.286µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=200.665286ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.687µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.369µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.345µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.24µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="4.194µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="41.475µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="38.347µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="48.114µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="563.032µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.707µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="8.079µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:05:14Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=2.80848ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="314.303µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=1.386562ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="3.136µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="12.564µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:05:14Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.2 mtu=1500 subsys=mtu
time="2025-04-17T22:05:14Z" level=info msg="Local boot ID is \"7f7f7913-4eb2-4025-983e-df322a37a2b5\"" subsys=node
time="2025-04-17T22:05:14Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:05:14Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-655pn\" already exists" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-655pn subsys=k8s
time="2025-04-17T22:05:16Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.2 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.2 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-655pn subsys=k8s v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:05:16Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local node-name: system-0-655pn" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" External-Node IPv4: 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Internal-Node IPv4: 10.244.1.165" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 allocation prefix: 10.244.1.128/25" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 policyRevision=2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e policyRevision=3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.17.0.6" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 policyRevision=4 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Adding local node to cluster" node="{system-0-655pn default [{ExternalIP 64.225.60.89} {InternalIP 10.108.0.2} {CiliumInternalIP 10.244.1.165} {ExternalIP 64.225.60.89}] 10.244.1.128/25 [] <nil> [] 10.244.1.222 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 7f7f7913-4eb2-4025-983e-df322a37a2b5}" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 policyRevision=5 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 policyRevision=6 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 policyRevision=7 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b policyRevision=8 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.165 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.128/25 v4healthIP.IPv4=10.244.1.222 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:05:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:16Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:45473" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:05:16Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration=2.455332543s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="157.644µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="6.008µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="247.07µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="4.871µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:05:16Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:05:16Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:05:16Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:05:16Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Daemon initialization completed" bootstrapTime=3.694082514s subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:05:16.9790939 +0000 UTC m=+3.777584086" subsys=monitor-agent
time="2025-04-17T22:05:16Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:05:17Z" level=info msg="Compiled new BPF template" BPFCompilationTime=505.310648ms file-path=/var/run/cilium/state/templates/5aaf125271bf9916651b97512cb053ef03b67f2b3978c7d7c70a926d9bb7357b/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:05:17Z" level=info msg="Create endpoint request" addressing="&{10.244.1.205 7d10d271-a1dc-4291-a571-564a5409e74f default }" containerID=53e45f9f6a10b8b2b62a9ab11f70e73ef31eccabf1d339d90b22569ef50d8d9d datapathConfiguration="&{false false false false false <nil>}" interface=lxc7f8654bb9ddf k8sPodName=kube-system/konnectivity-agent-d54mg labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Waiting for endpoint to be generated" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:17Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=753 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identity=4 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:18Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:05:19Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.817672457s file-path=/var/run/cilium/state/templates/66fcee6b127281654b77182ef809a08d06c4bda30601191c13385ae4192cb86e/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:19Z" level=info msg="Successful endpoint creation" containerID=53e45f9f6a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=daemon
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=136 identity=4 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.196 62e942ce-66c5-4806-b859-bc8ba747cc19 default }" containerID=8b854afabcebb3bbc0f24c53651f0d10f7845c0fc063a065b5a5f9393c95015e datapathConfiguration="&{false false false false false <nil>}" interface=lxc72bf4f377db8 k8sPodName=kube-system/coredns-854895db77-2x6tn labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:40Z" level=info msg="New endpoint" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Resolving identity labels (blocking)" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:05:40Z" level=info msg="Identity of endpoint changed" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Waiting for endpoint to be generated" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Rewrote endpoint BPF program" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Successful endpoint creation" containerID=8b854afabc datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=daemon
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:10:16Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0005950927734375 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:17:46Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00102996826171875 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:29:01Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0015411376953125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:54Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0023040771484375 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:11:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00347137451171875 newInterval=38m0s subsys=map-ct
time="2025-04-17T23:49:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.005157470703125 newInterval=57m0s subsys=map-ct
time="2025-04-18T00:46:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.007720947265625 newInterval=1h25m30s subsys=map-ct
time="2025-04-18T02:11:44Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01155853271484375 newInterval=2h8m15s subsys=map-ct
time="2025-04-18T04:19:59Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01715850830078125 newInterval=3h12m23s subsys=map-ct
time="2025-04-18T07:32:23Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0255126953125 newInterval=4h48m35s subsys=map-ct
time="2025-04-18T12:20:58Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.03720855712890625 newInterval=7h12m53s subsys=map-ct
time="2025-04-19T07:37:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777217 subsys=policy
time="2025-04-19T07:37:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:37:20Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 policyRevision=22 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c policyRevision=26 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 policyRevision=34 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 policyRevision=36 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="Create endpoint request" addressing="&{10.244.1.223 bd160c34-044b-4807-90c1-064865370e03 default }" containerID=b537ca15ed4b1101be846c970cb24a9cfe1a5305a7ee47e8e267b7981e872fc7 datapathConfiguration="&{false false false false false <nil>}" interface=lxccb2cec03d915 k8sPodName=kubeintel/cm-acme-http-solver-dfvjs labels="[]" subsys=daemon sync-build=true
time="2025-04-27T20:18:38Z" level=info msg="New endpoint" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Resolving identity labels (blocking)" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:kubeintel]" subsys=crd-allocator
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:38Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1001522845;k8s:acme.cert-manager.io/http-token=812125645;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-27T20:18:38Z" level=info msg="Identity of endpoint changed" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs oldIdentity="no identity" subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Waiting for endpoint to be generated" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Rewrote endpoint BPF program" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Successful endpoint creation" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:39Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:59Z" level=info msg="Delete endpoint request" containerID=b537ca15ed endpointID=3857 k8sNamespace=kubeintel k8sPodName=cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:59Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1001522845 k8s:acme.cert-manager.io/http-token=812125645 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-04-27T20:18:59Z" level=info msg="Removed endpoint" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 policyRevision=38 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc policyRevision=40 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da policyRevision=42 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a policyRevision=44 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f policyRevision=48 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 policyRevision=50 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-04T17:34:05Z" level=info msg="Create endpoint request" addressing="&{10.244.1.190 5136d175-4ed5-42a3-96e6-b9454e63c119 default }" containerID=ae7e28295da4c07b493635b5e30c0d0d6970296baf09923a31687b6a789f9194 datapathConfiguration="&{false false false false false <nil>}" interface=lxca76f31062f2c k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz labels="[]" subsys=daemon sync-build=true
time="2025-05-04T17:34:05Z" level=info msg="New endpoint" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Resolving identity labels (blocking)" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-04T17:34:05Z" level=info msg="Identity of endpoint changed" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz oldIdentity="no identity" subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Waiting for endpoint to be generated" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Rewrote endpoint BPF program" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Successful endpoint creation" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-05T00:12:01Z" level=info msg="Create endpoint request" addressing="&{10.244.1.157 e7daef0e-04d2-44ff-b77e-81a1d9a0a4ab default }" containerID=91efb54a92e333c39dfe14c34684d67d9a42be8522258b160baa8a0a2190ec16 datapathConfiguration="&{false false false false false <nil>}" interface=lxc62302748819b k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp labels="[]" subsys=daemon sync-build=true
time="2025-05-05T00:12:01Z" level=info msg="New endpoint" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Resolving identity labels (blocking)" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-05-05T00:12:01Z" level=info msg="Identity of endpoint changed" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp oldIdentity="no identity" subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Waiting for endpoint to be generated" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Rewrote endpoint BPF program" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Successful endpoint creation" containerID=91efb54a92 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=daemon
time="2025-05-06T03:53:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.189 b8b18320-ac3e-40b4-881d-e811dab2b460 default }" containerID=76de9f3a728fda8007ff2a915648ba69f6a7d61cc1925195734f47095d8d35a3 datapathConfiguration="&{false false false false false <nil>}" interface=lxc0d71c0b24747 k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh labels="[]" subsys=daemon sync-build=true
time="2025-05-06T03:53:41Z" level=info msg="New endpoint" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Resolving identity labels (blocking)" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-06T03:53:41Z" level=info msg="Identity of endpoint changed" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh oldIdentity="no identity" subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Waiting for endpoint to be generated" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Rewrote endpoint BPF program" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Successful endpoint creation" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:12:32Z" level=info msg="Create endpoint request" addressing="&{10.244.1.173 37e60dcf-d105-43fb-9d1d-8f24cd310df6 default }" containerID=2e4196cc018db244fcdbb9693ae97273c8e95016049d430e4735bf321cc11062 datapathConfiguration="&{false false false false false <nil>}" interface=lxcee23b4ccdc87 k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn labels="[]" subsys=daemon sync-build=true
time="2025-05-06T15:12:32Z" level=info msg="New endpoint" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Resolving identity labels (blocking)" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Identity of endpoint changed" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn oldIdentity="no identity" subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Waiting for endpoint to be generated" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Rewrote endpoint BPF program" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Successful endpoint creation" containerID=2e4196cc01 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Delete endpoint request" containerID=76de9f3a72 endpointID=713 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T15:13:02Z" level=info msg="Removed endpoint" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T19:15:14Z" level=info msg="Delete endpoint request" containerID=ae7e28295d endpointID=1537 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-06T19:15:14Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-06T19:15:14Z" level=info msg="Removed endpoint" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.236 03120237-78f2-439d-bd10-09556e2ba49c default }" containerID=ccb4035800d5f85d9593f911b15878f4335a4da20da7412d7978ee6e6a051413 datapathConfiguration="&{false false false false false <nil>}" interface=lxc3719859f07d9 k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f labels="[]" subsys=daemon sync-build=true
time="2025-05-06T19:16:49Z" level=info msg="New endpoint" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Resolving identity labels (blocking)" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-06T19:16:49Z" level=info msg="Identity of endpoint changed" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f oldIdentity="no identity" subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Waiting for endpoint to be generated" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Rewrote endpoint BPF program" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Successful endpoint creation" containerID=ccb4035800 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=daemon
time="2025-05-08T22:09:27Z" level=info msg="Create endpoint request" addressing="&{10.244.1.225 948ba0d3-18d3-436b-b236-96cbe37459ed default }" containerID=38c1670b13928c712aeb5522f0d8d47e84c4c53d80f03f95465ce1ffb4b29b6f datapathConfiguration="&{false false false false false <nil>}" interface=lxc58f952d2e9d8 k8sPodName=demo/pi-2-2ks2s labels="[]" subsys=daemon sync-build=true
time="2025-05-08T22:09:27Z" level=info msg="New endpoint" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Resolving identity labels (blocking)" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:demo]" subsys=crd-allocator
time="2025-05-08T22:09:27Z" level=info msg="Allocated new global key" key="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:batch.kubernetes.io/job-name=pi-2;k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;k8s:job-name=pi-2;" subsys=allocator
time="2025-05-08T22:09:27Z" level=info msg="Identity of endpoint changed" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s oldIdentity="no identity" subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Waiting for endpoint to be generated" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:09:27Z" level=info msg="Rewrote endpoint BPF program" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Successful endpoint creation" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Delete endpoint request" containerID=38c1670b13 endpointID=211 k8sNamespace=demo k8sPodName=pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Releasing key" key="[k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:batch.kubernetes.io/job-name=pi-2 k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo k8s:job-name=pi-2]" subsys=allocator
time="2025-05-08T22:09:53Z" level=info msg="Removed endpoint" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:28:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777218 subsys=policy
time="2025-05-16T08:28:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:28:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d policyRevision=78 subsys=daemon
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.227 008d307d-564f-49f6-a23d-b9e90a0090ef default }" containerID=8e639bae91df353a607f319e014432f280015f4c9a54a9c94a6bca43ef6a7d7c datapathConfiguration="&{false false false false false <nil>}" interface=lxc5e900a634a2d k8sPodName=bikexbike/cm-acme-http-solver-g4dth labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=608961820;k8s:acme.cert-manager.io/http-token=1208684116;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.197 dbbcb16d-f23b-4870-af7b-a3a7a4384dc2 default }" containerID=1c80f3d7cf9d6bb07e2517d122506e9b40804945769a0a34e2f3ffca68acfd55 datapathConfiguration="&{false false false false false <nil>}" interface=lxc9e8ad6d199d4 k8sPodName=bikexbike/cm-acme-http-solver-7l7xp labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1025771183;k8s:acme.cert-manager.io/http-token=939200182;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:51Z" level=info msg="regenerating all endpoints" reason= subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=1c80f3d7cf endpointID=227 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1025771183 k8s:acme.cert-manager.io/http-token=939200182 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=8e639bae91 endpointID=1088 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=608961820 k8s:acme.cert-manager.io/http-token=1208684116 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Removed endpoint" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:53:11Z" level=info msg="Removed endpoint" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.208 3bc73c64-f463-4f05-8599-d43a7b4b9a65 default }" containerID=370fd7c6d8c03b733609c2a2494f683699384a0ad21be26a3f3dd02a1d85cdbf datapathConfiguration="&{false false false false false <nil>}" interface=lxc518e75b83fcc k8sPodName=kube-system/coredns-6b79676d8-kx7lx labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=370fd7c6d8 datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Delete endpoint request" containerID=8b854afabc endpointID=935 k8sNamespace=kube-system k8sPodName=coredns-854895db77-2x6tn subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:43Z" level=info msg="Removed endpoint" containerID=8b854afabc datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
cilium-agent
time="2025-04-17T22:05:13Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:05:13Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:05:13Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:05:13Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.035509ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="321.276µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.139622ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.681µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:05:13Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=117.484699ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="42.14µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="224.508µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.17µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="83.243µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="208.071µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="200.473µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="202.202µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="916.22µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="188.629µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:05:13Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=23.698859ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=4.182395ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="41.206µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="207.231µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="293.972µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="208.526µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="189.175µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=31.520894ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="70.695µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.122µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.676µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.89.0.0/16
time="2025-04-17T22:05:14Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=107.278973ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.44µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=100.556663ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.286µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=200.665286ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.687µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.369µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.345µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.24µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="4.194µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="41.475µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="38.347µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="48.114µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="563.032µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.707µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="8.079µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:05:14Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=2.80848ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="314.303µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=1.386562ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="3.136µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="12.564µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:05:14Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.2 mtu=1500 subsys=mtu
time="2025-04-17T22:05:14Z" level=info msg="Local boot ID is \"7f7f7913-4eb2-4025-983e-df322a37a2b5\"" subsys=node
time="2025-04-17T22:05:14Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:05:14Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-655pn\" already exists" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-655pn subsys=k8s
time="2025-04-17T22:05:16Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.2 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.2 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-655pn subsys=k8s v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:05:16Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local node-name: system-0-655pn" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" External-Node IPv4: 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Internal-Node IPv4: 10.244.1.165" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 allocation prefix: 10.244.1.128/25" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 policyRevision=2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e policyRevision=3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.17.0.6" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 policyRevision=4 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Adding local node to cluster" node="{system-0-655pn default [{ExternalIP 64.225.60.89} {InternalIP 10.108.0.2} {CiliumInternalIP 10.244.1.165} {ExternalIP 64.225.60.89}] 10.244.1.128/25 [] <nil> [] 10.244.1.222 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 7f7f7913-4eb2-4025-983e-df322a37a2b5}" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 policyRevision=5 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 policyRevision=6 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 policyRevision=7 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b policyRevision=8 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.165 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.128/25 v4healthIP.IPv4=10.244.1.222 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:05:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:16Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:45473" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:05:16Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration=2.455332543s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="157.644µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="6.008µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="247.07µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="4.871µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:05:16Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:05:16Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:05:16Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:05:16Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Daemon initialization completed" bootstrapTime=3.694082514s subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:05:16.9790939 +0000 UTC m=+3.777584086" subsys=monitor-agent
time="2025-04-17T22:05:16Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:05:17Z" level=info msg="Compiled new BPF template" BPFCompilationTime=505.310648ms file-path=/var/run/cilium/state/templates/5aaf125271bf9916651b97512cb053ef03b67f2b3978c7d7c70a926d9bb7357b/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:05:17Z" level=info msg="Create endpoint request" addressing="&{10.244.1.205 7d10d271-a1dc-4291-a571-564a5409e74f default }" containerID=53e45f9f6a10b8b2b62a9ab11f70e73ef31eccabf1d339d90b22569ef50d8d9d datapathConfiguration="&{false false false false false <nil>}" interface=lxc7f8654bb9ddf k8sPodName=kube-system/konnectivity-agent-d54mg labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Waiting for endpoint to be generated" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:17Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=753 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identity=4 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:18Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:05:19Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.817672457s file-path=/var/run/cilium/state/templates/66fcee6b127281654b77182ef809a08d06c4bda30601191c13385ae4192cb86e/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:19Z" level=info msg="Successful endpoint creation" containerID=53e45f9f6a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=daemon
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=136 identity=4 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.196 62e942ce-66c5-4806-b859-bc8ba747cc19 default }" containerID=8b854afabcebb3bbc0f24c53651f0d10f7845c0fc063a065b5a5f9393c95015e datapathConfiguration="&{false false false false false <nil>}" interface=lxc72bf4f377db8 k8sPodName=kube-system/coredns-854895db77-2x6tn labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:40Z" level=info msg="New endpoint" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Resolving identity labels (blocking)" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:05:40Z" level=info msg="Identity of endpoint changed" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Waiting for endpoint to be generated" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Rewrote endpoint BPF program" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Successful endpoint creation" containerID=8b854afabc datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=daemon
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:10:16Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0005950927734375 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:17:46Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00102996826171875 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:29:01Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0015411376953125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:54Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0023040771484375 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:11:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00347137451171875 newInterval=38m0s subsys=map-ct
time="2025-04-17T23:49:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.005157470703125 newInterval=57m0s subsys=map-ct
time="2025-04-18T00:46:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.007720947265625 newInterval=1h25m30s subsys=map-ct
time="2025-04-18T02:11:44Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01155853271484375 newInterval=2h8m15s subsys=map-ct
time="2025-04-18T04:19:59Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01715850830078125 newInterval=3h12m23s subsys=map-ct
time="2025-04-18T07:32:23Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0255126953125 newInterval=4h48m35s subsys=map-ct
time="2025-04-18T12:20:58Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.03720855712890625 newInterval=7h12m53s subsys=map-ct
time="2025-04-19T07:37:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777217 subsys=policy
time="2025-04-19T07:37:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:37:20Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 policyRevision=22 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c policyRevision=26 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 policyRevision=34 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 policyRevision=36 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="Create endpoint request" addressing="&{10.244.1.223 bd160c34-044b-4807-90c1-064865370e03 default }" containerID=b537ca15ed4b1101be846c970cb24a9cfe1a5305a7ee47e8e267b7981e872fc7 datapathConfiguration="&{false false false false false <nil>}" interface=lxccb2cec03d915 k8sPodName=kubeintel/cm-acme-http-solver-dfvjs labels="[]" subsys=daemon sync-build=true
time="2025-04-27T20:18:38Z" level=info msg="New endpoint" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Resolving identity labels (blocking)" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:kubeintel]" subsys=crd-allocator
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:38Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1001522845;k8s:acme.cert-manager.io/http-token=812125645;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-27T20:18:38Z" level=info msg="Identity of endpoint changed" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs oldIdentity="no identity" subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Waiting for endpoint to be generated" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Rewrote endpoint BPF program" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Successful endpoint creation" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:39Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:59Z" level=info msg="Delete endpoint request" containerID=b537ca15ed endpointID=3857 k8sNamespace=kubeintel k8sPodName=cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:59Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1001522845 k8s:acme.cert-manager.io/http-token=812125645 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-04-27T20:18:59Z" level=info msg="Removed endpoint" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 policyRevision=38 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc policyRevision=40 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da policyRevision=42 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a policyRevision=44 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f policyRevision=48 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 policyRevision=50 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-04T17:34:05Z" level=info msg="Create endpoint request" addressing="&{10.244.1.190 5136d175-4ed5-42a3-96e6-b9454e63c119 default }" containerID=ae7e28295da4c07b493635b5e30c0d0d6970296baf09923a31687b6a789f9194 datapathConfiguration="&{false false false false false <nil>}" interface=lxca76f31062f2c k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz labels="[]" subsys=daemon sync-build=true
time="2025-05-04T17:34:05Z" level=info msg="New endpoint" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Resolving identity labels (blocking)" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-04T17:34:05Z" level=info msg="Identity of endpoint changed" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz oldIdentity="no identity" subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Waiting for endpoint to be generated" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Rewrote endpoint BPF program" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Successful endpoint creation" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-05T00:12:01Z" level=info msg="Create endpoint request" addressing="&{10.244.1.157 e7daef0e-04d2-44ff-b77e-81a1d9a0a4ab default }" containerID=91efb54a92e333c39dfe14c34684d67d9a42be8522258b160baa8a0a2190ec16 datapathConfiguration="&{false false false false false <nil>}" interface=lxc62302748819b k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp labels="[]" subsys=daemon sync-build=true
time="2025-05-05T00:12:01Z" level=info msg="New endpoint" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Resolving identity labels (blocking)" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-05-05T00:12:01Z" level=info msg="Identity of endpoint changed" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp oldIdentity="no identity" subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Waiting for endpoint to be generated" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Rewrote endpoint BPF program" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Successful endpoint creation" containerID=91efb54a92 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=daemon
time="2025-05-06T03:53:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.189 b8b18320-ac3e-40b4-881d-e811dab2b460 default }" containerID=76de9f3a728fda8007ff2a915648ba69f6a7d61cc1925195734f47095d8d35a3 datapathConfiguration="&{false false false false false <nil>}" interface=lxc0d71c0b24747 k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh labels="[]" subsys=daemon sync-build=true
time="2025-05-06T03:53:41Z" level=info msg="New endpoint" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Resolving identity labels (blocking)" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-06T03:53:41Z" level=info msg="Identity of endpoint changed" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh oldIdentity="no identity" subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Waiting for endpoint to be generated" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Rewrote endpoint BPF program" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Successful endpoint creation" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:12:32Z" level=info msg="Create endpoint request" addressing="&{10.244.1.173 37e60dcf-d105-43fb-9d1d-8f24cd310df6 default }" containerID=2e4196cc018db244fcdbb9693ae97273c8e95016049d430e4735bf321cc11062 datapathConfiguration="&{false false false false false <nil>}" interface=lxcee23b4ccdc87 k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn labels="[]" subsys=daemon sync-build=true
time="2025-05-06T15:12:32Z" level=info msg="New endpoint" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Resolving identity labels (blocking)" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Identity of endpoint changed" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn oldIdentity="no identity" subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Waiting for endpoint to be generated" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Rewrote endpoint BPF program" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Successful endpoint creation" containerID=2e4196cc01 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Delete endpoint request" containerID=76de9f3a72 endpointID=713 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T15:13:02Z" level=info msg="Removed endpoint" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T19:15:14Z" level=info msg="Delete endpoint request" containerID=ae7e28295d endpointID=1537 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-06T19:15:14Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-06T19:15:14Z" level=info msg="Removed endpoint" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.236 03120237-78f2-439d-bd10-09556e2ba49c default }" containerID=ccb4035800d5f85d9593f911b15878f4335a4da20da7412d7978ee6e6a051413 datapathConfiguration="&{false false false false false <nil>}" interface=lxc3719859f07d9 k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f labels="[]" subsys=daemon sync-build=true
time="2025-05-06T19:16:49Z" level=info msg="New endpoint" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Resolving identity labels (blocking)" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-06T19:16:49Z" level=info msg="Identity of endpoint changed" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f oldIdentity="no identity" subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Waiting for endpoint to be generated" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Rewrote endpoint BPF program" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Successful endpoint creation" containerID=ccb4035800 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=daemon
time="2025-05-08T22:09:27Z" level=info msg="Create endpoint request" addressing="&{10.244.1.225 948ba0d3-18d3-436b-b236-96cbe37459ed default }" containerID=38c1670b13928c712aeb5522f0d8d47e84c4c53d80f03f95465ce1ffb4b29b6f datapathConfiguration="&{false false false false false <nil>}" interface=lxc58f952d2e9d8 k8sPodName=demo/pi-2-2ks2s labels="[]" subsys=daemon sync-build=true
time="2025-05-08T22:09:27Z" level=info msg="New endpoint" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Resolving identity labels (blocking)" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:demo]" subsys=crd-allocator
time="2025-05-08T22:09:27Z" level=info msg="Allocated new global key" key="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:batch.kubernetes.io/job-name=pi-2;k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;k8s:job-name=pi-2;" subsys=allocator
time="2025-05-08T22:09:27Z" level=info msg="Identity of endpoint changed" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s oldIdentity="no identity" subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Waiting for endpoint to be generated" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:09:27Z" level=info msg="Rewrote endpoint BPF program" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Successful endpoint creation" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Delete endpoint request" containerID=38c1670b13 endpointID=211 k8sNamespace=demo k8sPodName=pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Releasing key" key="[k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:batch.kubernetes.io/job-name=pi-2 k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo k8s:job-name=pi-2]" subsys=allocator
time="2025-05-08T22:09:53Z" level=info msg="Removed endpoint" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:28:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777218 subsys=policy
time="2025-05-16T08:28:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:28:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d policyRevision=78 subsys=daemon
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.227 008d307d-564f-49f6-a23d-b9e90a0090ef default }" containerID=8e639bae91df353a607f319e014432f280015f4c9a54a9c94a6bca43ef6a7d7c datapathConfiguration="&{false false false false false <nil>}" interface=lxc5e900a634a2d k8sPodName=bikexbike/cm-acme-http-solver-g4dth labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=608961820;k8s:acme.cert-manager.io/http-token=1208684116;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.197 dbbcb16d-f23b-4870-af7b-a3a7a4384dc2 default }" containerID=1c80f3d7cf9d6bb07e2517d122506e9b40804945769a0a34e2f3ffca68acfd55 datapathConfiguration="&{false false false false false <nil>}" interface=lxc9e8ad6d199d4 k8sPodName=bikexbike/cm-acme-http-solver-7l7xp labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1025771183;k8s:acme.cert-manager.io/http-token=939200182;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:51Z" level=info msg="regenerating all endpoints" reason= subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=1c80f3d7cf endpointID=227 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1025771183 k8s:acme.cert-manager.io/http-token=939200182 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=8e639bae91 endpointID=1088 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=608961820 k8s:acme.cert-manager.io/http-token=1208684116 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Removed endpoint" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:53:11Z" level=info msg="Removed endpoint" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.208 3bc73c64-f463-4f05-8599-d43a7b4b9a65 default }" containerID=370fd7c6d8c03b733609c2a2494f683699384a0ad21be26a3f3dd02a1d85cdbf datapathConfiguration="&{false false false false false <nil>}" interface=lxc518e75b83fcc k8sPodName=kube-system/coredns-6b79676d8-kx7lx labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=370fd7c6d8 datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Delete endpoint request" containerID=8b854afabc endpointID=935 k8sNamespace=kube-system k8sPodName=coredns-854895db77-2x6tn subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:43Z" level=info msg="Removed endpoint" containerID=8b854afabc datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:13Z" level=info msg="Memory available for map entries (0.003% of 4105375744B): 10263439B" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-tcp-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-ct-global-any-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-nat-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-neigh-global-max set by dynamic sizing to 131072" subsys=config
time="2025-04-17T22:05:13Z" level=info msg="option bpf-sock-rev-map-max set by dynamic sizing to 65536" subsys=config
time="2025-04-17T22:05:13Z" level=info msg=" --agent-health-port='9879'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-liveness-update-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --agent-not-ready-taint-key='node.cilium.io/agent-not-ready'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allocator-list-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-icmp-frag-needed='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --allow-localhost='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --annotate-k8s-node='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --api-rate-limit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --arping-refresh-period='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-create-cilium-node-resource='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --auto-direct-node-routes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-lb-ip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-announce-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bgp-config-path='/var/lib/cilium/bgp/config.yaml'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-auth-map-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-any-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-global-tcp-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-fin='10s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-regular-tcp-syn='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-any='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp='6h0m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-ct-timeout-service-tcp-grace='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-filter-priority='1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-fragments-map-max='8192'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-affinity-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dev-ip-addr-inherit=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-dispatch='opt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-dsr-l4-xlate='frontend'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-external-clusterip='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-hash-seed='JLfvgnHc2kaSUFaI'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-maglev-table-size='16381'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-map-max='65536'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-proto-diff='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rev-nat-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv4-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-rss-ipv6-src-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-backend-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-service-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-sock-hostns-only='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-lb-source-range-map-max='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-dynamic-size-ratio='0.0025'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-map-event-buffers=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-nat-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-neigh-global-max='524288'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-full-reconciliation-interval='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-root='/sys/fs/bpf'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bpf-sock-rev-map-max='262144'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --bypass-ip-availability-upon-restore='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --certificates-directory='/var/run/cilium/certs'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cflags=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cgroup-root='/run/cilium/cgroupv2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cilium-endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-health-port='4240'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-id='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-name='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cluster-pool-ipv4-mask-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-config='/var/lib/cilium/clustermesh/'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --clustermesh-ip-identities-sync-timeout='1m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cmdref=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-mode='portmap'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-chaining-target=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-exclusive='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-external-routing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --cni-log-file='/var/run/cilium/cilium-cni.log'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-dir='/tmp/cilium/config-map'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --config-sources='config-map:kube-system/cilium-config'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --conntrack-gc-max-interval='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --container-ip-local-reserved-ports='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --crd-wait-timeout='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --custom-cni-conf='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --datapath-mode='veth'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --debug-verbose=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --derive-masquerade-ip-addr-from-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --devices=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --direct-routing-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-cnp-status-updates='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-endpoint-crd='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-envoy-version-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --disable-iptables-feeder-rules=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-max-ips-per-restored-rule='1000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dns-policy-unload-on-shutdown='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-limit='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-concurrency-processing-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-enable-transparent-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-insecure-skip-transparent-mode-check='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-count='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-lock-timeout='500ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --dnsproxy-socket-linger-timeout='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-policy-map-max='16384'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-gateway-reconciliation-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-masquerade-interfaces=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --egress-multi-home-ip-rule-compat='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-auto-protect-node-port-range='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bandwidth-manager='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bbr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bgp-control-plane='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-clock-probe='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-masquerade='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-bpf-tproxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-endpoint-slice='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-cilium-health-api-server-access='*'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-custom-calls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-endpoint-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-envoy-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-external-ips='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-check-nodeport='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-health-checking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-high-scale-ipcache='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-firewall='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-legacy-routing='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-host-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-hubble-recorder-api='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-icmp-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-identity-mark='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ip-masq-agent='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-key-watcher='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipsec-xfrm-state-caching='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-egress-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-fragment-tracking='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv4-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-big-tcp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-masquerade='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-ipv6-ndp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-api-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-endpoint-slice='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-event-handover='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-networkpolicy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-k8s-terminating-endpoint='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-neigh-discovery='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l2-pod-announcements='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-l7-proxy='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-node-route='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-local-redirect-policy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-mke='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-monitor='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-nat46x64-gateway='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-node-port='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-pmtu-discovery='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-policy='default'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-recorder='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-remote-node-identity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-runtime-device-detection='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-sctp='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-service-topology='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-session-affinity='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-srv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-stale-cilium-endpoint-cleanup='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-svc-source-range-check='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-tracing='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-unreachable-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-vtep='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-well-known-identities='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-wireguard-userspace-fallback='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xdp-prefilter='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --enable-xt-socket-fallback='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --encrypt-node='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-queue-size='25'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --endpoint-status=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-config-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --envoy-log=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --exclude-local-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --external-envoy-proxy='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fixed-identity-mapping=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --fqdn-regex-compile-lru-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --gops-port='9890'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-403-msg=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-idle-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-max-grpc-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-normalize-path='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-request-timeout='3600'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-count='3'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --http-retry-timeout='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-disable-tls='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-buffer-capacity='4095'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-event-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-compress='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-backups='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-max-size-mb='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-export-file-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-listen-address=':4244'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-metrics-server=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-monitor-events=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-prefer-ipv6='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-sink-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-recorder-storage-path='/var/run/cilium/pcaps'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-skip-unknown-cgroup-ids='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-socket-path='/var/run/cilium/hubble.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-cert-file='/var/lib/cilium/tls/hubble/server.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-client-ca-files='/var/lib/cilium/tls/hubble/client-ca.crt'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --hubble-tls-key-file='/var/lib/cilium/tls/hubble/server.key'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-allocation-mode='crd'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-change-grace-period='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-gc-interval='5m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-heartbeat-timeout='15m'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --identity-restore-grace-period='10m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-egress-gateway-routes='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-iptables-rules='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --install-no-conntrack-iptables-rules='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-allocation-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ip-masq-agent-config-path='/etc/config/ip-masq-agent'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam='cluster-pool'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-cilium-node-update-rate='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipam-multi-pool-pre-allocation='default=8'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipsec-key-rotation-duration='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-lock-timeout='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --iptables-random-fully='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-native-routing-cidr='10.244.0.0/16'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-loopback-address='169.254.42.1'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv4-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-cluster-alloc-cidr='f00d::/64'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-mcast-device=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-native-routing-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-node='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-pod-subnets=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --ipv6-service-range='auto'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --join-cluster='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-api-server='https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-burst='10'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-client-qps='5'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-heartbeat-timeout='30s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-kubeconfig-path=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-namespace='kube-system'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv4-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-require-ipv6-pod-cidr='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-cache-size='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-service-proxy-name=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-sync-timeout='3m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --k8s-watcher-endpoint-selector='metadata.name!=kube-scheduler,metadata.name!=kube-controller-manager,metadata.name!=etcd-operator,metadata.name!=gcp-controller-manager'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --keep-config='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement='partial'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kube-proxy-replacement-healthz-bind-address=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-connectivity-timeout='2m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-lease-ttl='15m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-max-consecutive-quorum-errors='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --kvstore-periodic-sync='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-lease-duration='15s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-renew-deadline='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-announcements-retry-period='2s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --l2-pod-announcements-interface=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --label-prefix-file=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --labels=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --lib-dir='/var/lib/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-max-addr-scope='252'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv4=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --local-router-ipv6=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-driver=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-opt=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --log-system-load='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --max-controller-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-enabled='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-gc-interval='5m0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-mutual-listener-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-rotated-identities-queue-size='1024'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-signal-backoff-duration='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spiffe-trust-domain='spiffe.cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mesh-auth-spire-admin-socket=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --metrics='+cilium_bpf_map_pressure'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mke-cgroup-mount=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation='medium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-flags='all'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-aggregation-interval='5s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --monitor-queue-size='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --mtu='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-encryption-opt-out-labels='node-role.kubernetes.io/control-plane'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-acceleration='disabled'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-algorithm='random'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-bind-protection='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-mode='snat'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --node-port-range='30000,32767'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --nodes-gc-interval='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --operator-api-serve-addr='127.0.0.1:9234'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-audit-mode='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-queue-size='100'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --policy-trigger-interval='1s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-address='localhost'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --pprof-port='6060'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --preallocate-bpf-maps='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prepend-iptables-chains='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --procfs='/host/proc'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --prometheus-serve-addr=':9090'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-connect-timeout='2'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-gid='1337'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-idle-timeout-seconds='60'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-connection-duration-seconds='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-max-requests-per-connection='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-prometheus-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-egress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --proxy-xff-num-trusted-hops-ingress='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --read-cni-conf=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --remove-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restore='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --restored-proxy-ports-age-limit='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --route-metric='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --routing-mode='native'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-is-up-condition='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --set-cilium-node-taints='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --sidecar-istio-proxy-image='cilium/istio_proxy'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --single-cluster-route='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --skip-cnp-status-startup-clean='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --socket-path='/var/run/cilium/cilium.sock'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --srv6-encap-mode='reduced'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --state-dir='/var/run/cilium'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --synchronize-k8s-nodes='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-dns-reject-response-code='refused'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-enable-dns-compression='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-endpoint-max-ip-per-hostname='50'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-idle-connection-grace-period='0s'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-max-deferred-connection-deletes='10000'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-min-ttl='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-pre-cache=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tofqdns-proxy-response-max-delay='100ms'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-payloadlen='128'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --trace-sock='true'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-port='0'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --tunnel-protocol='vxlan'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --unmanaged-pod-watcher-interval='15'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --use-cilium-internal-ip-for-ipsec='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --version='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vlan-bpf-bypass=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-cidr=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-endpoint=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mac=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --vtep-mask=''" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --wireguard-encapsulate='false'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" --write-cni-conf-when-ready='/host/etc/cni/net.d/05-cilium.conflist'" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" _ _ _" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg=" ___|_| |_|_ _ _____" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="| _| | | | | | |" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="|___|_|_|_|___|_|_|_|" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="Cilium 1.14.18 5418622a22 2024-07-03T11:57:56+02:00 go version go1.22.10 linux/amd64" subsys=daemon
time="2025-04-17T22:05:13Z" level=info msg="clang (10.0.0) and kernel (6.1.0) versions: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="linking environment: OK!" subsys=linux-datapath
time="2025-04-17T22:05:13Z" level=info msg="Kernel config file not found: if the agent fails to start, check the system requirements at https://docs.cilium.io/en/stable/operations/system_requirements" subsys=probes
time="2025-04-17T22:05:13Z" level=info msg="Detected mounted BPF filesystem at /sys/fs/bpf" subsys=bpf
time="2025-04-17T22:05:13Z" level=info msg="Mounted cgroupv2 filesystem at /run/cilium/cgroupv2" subsys=cgroups
time="2025-04-17T22:05:13Z" level=info msg="Parsing base label prefixes from default label list" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Parsing additional label prefixes from user inputs: []" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg="Final label prefixes to be used for identity evaluation:" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - reserved:.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.kubernetes\\.pod\\.namespace" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :io\\.cilium\\.k8s\\.namespace\\.labels" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - :app\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:io\\.kubernetes" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:.*beta\\.kubernetes\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:k8s\\.io" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-generation" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:pod-template-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:controller-revision-hash" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:annotation.*" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=" - !:etcd_node" subsys=labels-filter
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.035509ms function="pprof.init.func1 (cell.go:50)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="321.276µs" function="gops.registerGopsHooks (cell.go:38)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=1.139622ms function="metrics.init.func1 (cell.go:11)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.681µs" function="metrics.init.func2 (cell.go:14)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Spire Delegate API Client is disabled as no socket path is configured" subsys=spire-delegate
time="2025-04-17T22:05:13Z" level=info msg="Mutual authentication handler is disabled as no port is configured" subsys=auth
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration=117.484699ms function="cmd.init.func3 (daemon_main.go:1638)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="42.14µs" function="bgpv1.init.func1 (cell.go:46)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="224.508µs" function="metrics.RegisterCollector (metrics.go:56)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="12.17µs" function="gc.registerSignalHandler (cell.go:47)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="83.243µs" function="utime.initUtimeSync (cell.go:29)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="208.071µs" function="agentliveness.newAgentLivenessUpdater (agent_liveness.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="200.473µs" function="l2responder.NewL2ResponderReconciler (l2responder.go:63)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Invoked duration="202.202µs" function="garp.newGARPProcessor (processor.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg=Starting subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Started gops server" address="127.0.0.1:9890" subsys=gops
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="916.22µs" function="gops.registerGopsHooks.func1 (cell.go:43)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="188.629µs" function="metrics.NewRegistry.func1 (registry.go:86)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Establishing connection to apiserver" host="https://f6ce2907-8531-4ab3-861e-4e2affa620b1.k8s.ondigitalocean.com" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Serving prometheus metrics on :9090" subsys=metrics
time="2025-04-17T22:05:13Z" level=info msg="Connected to apiserver" subsys=k8s-client
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=23.698859ms function="client.(*compositeClientset).onStart" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration=4.182395ms function="authmap.newAuthMap.func1 (cell.go:27)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="41.206µs" function="configmap.newMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="207.231µs" function="signalmap.newMap.func1 (cell.go:44)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="293.972µs" function="nodemap.newNodeMap.func1 (cell.go:23)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="208.526µs" function="eventsmap.newEventsMap.func1 (cell.go:35)" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Generating CNI configuration file with mode portmap" subsys=cni-config
time="2025-04-17T22:05:13Z" level=info msg="Start hook executed" duration="189.175µs" function="*cni.cniConfigManager.Start" subsys=hive
time="2025-04-17T22:05:13Z" level=info msg="Wrote CNI configuration file to /host/etc/cni/net.d/05-cilium.conflist" subsys=cni-config
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=31.520894ms function="datapath.newDatapath.func1 (cells.go:113)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Restored 0 node IDs from the BPF map" subsys=linux-datapath
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="70.695µs" function="datapath.newDatapath.func2 (cells.go:126)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.122µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Node].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.676µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNode].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using autogenerated IPv4 allocation range" subsys=node v4Prefix=10.89.0.0/16
time="2025-04-17T22:05:14Z" level=info msg="no local ciliumnode found, will not restore cilium internal ips from k8s" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=107.278973ms function="node.NewLocalNodeStore.func1 (local_node_store.go:76)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.44µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=100.556663ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Service].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.286µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using discoveryv1.EndpointSlice" subsys=k8s
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=200.665286ms function="*manager.diffStore[*github.com/cilium/cilium/pkg/k8s.Endpoints].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="10.687µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Pod].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.369µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/slim/k8s/api/core/v1.Namespace].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.345µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="1.24µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2.CiliumClusterwideNetworkPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="4.194µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumCIDRGroup].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="41.475µs" function="endpointmanager.newDefaultEndpointManager.func1 (cell.go:201)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="38.347µs" function="cmd.newPolicyTrifecta.func1 (policy.go:135)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="48.114µs" function="*manager.manager.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Serving cilium node monitor v1.2 API at unix:///var/run/cilium/monitor1_2.sock" subsys=monitor-agent
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="563.032µs" function="agent.newMonitorAgent.func1 (cell.go:61)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="2.707µs" function="*resource.resource[*github.com/cilium/cilium/pkg/k8s/apis/cilium.io/v2alpha1.CiliumL2AnnouncementPolicy].Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="8.079µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Resoring proxy ports from file failed, falling back to restoring from iptables rules" error="stat /var/run/cilium/state/proxy_ports_state.json: no such file or directory" file-path=/var/run/cilium/state/proxy_ports_state.json subsys=proxy
time="2025-04-17T22:05:14Z" level=info msg="Envoy: Starting xDS gRPC server listening on /var/run/cilium/envoy/sockets/xds.sock" subsys=envoy-manager
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=2.80848ms function="proxy.newProxy.func1 (cell.go:63)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="314.303µs" function="signal.provideSignalManager.func1 (cell.go:25)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Datapath signal listener running" subsys=signal
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration=1.386562ms function="auth.registerAuthManager.func1 (cell.go:109)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="3.136µs" function="auth.registerGCJobs.func1 (cell.go:158)" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Start hook executed" duration="12.564µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:14Z" level=info msg="Using Managed Neighbor Kernel support" subsys=daemon
time="2025-04-17T22:05:14Z" level=warning msg="Deprecated value for --kube-proxy-replacement: partial (use either \"true\", or \"false\")" subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Inheriting MTU from external network interface" device=eth1 ipAddr=10.108.0.2 mtu=1500 subsys=mtu
time="2025-04-17T22:05:14Z" level=info msg="Local boot ID is \"7f7f7913-4eb2-4025-983e-df322a37a2b5\"" subsys=node
time="2025-04-17T22:05:14Z" level=info msg="Removed map pin at /sys/fs/bpf/tc/globals/cilium_ipcache, recreating and re-pinning map cilium_ipcache" file-path=/sys/fs/bpf/tc/globals/cilium_ipcache name=cilium_ipcache subsys=bpf
time="2025-04-17T22:05:14Z" level=info msg="Restored services from maps" failedServices=0 restoredServices=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Restored backends from maps" failedBackends=0 restoredBackends=0 skippedBackends=0 subsys=service
time="2025-04-17T22:05:14Z" level=info msg="Reading old endpoints..." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="No old endpoints found." subsys=daemon
time="2025-04-17T22:05:14Z" level=info msg="Waiting until all Cilium CRDs are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="All Cilium CRDs have been found and are available" subsys=k8s
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to get node resource" error="ciliumnodes.cilium.io \"system-0-655pn\" not found" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=info msg="Successfully created CiliumNode resource" subsys=nodediscovery
time="2025-04-17T22:05:15Z" level=warning msg="Unable to create CiliumNode resource, will retry" error="ciliumnodes.cilium.io \"system-0-655pn\" already exists" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Retrieved node information from cilium node" nodeName=system-0-655pn subsys=k8s
time="2025-04-17T22:05:16Z" level=info msg="Received own node information from API server" ipAddr.ipv4=10.108.0.2 ipAddr.ipv6="<nil>" k8sNodeIP=10.108.0.2 labels="map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3]" nodeName=system-0-655pn subsys=k8s v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="k8s mode: Allowing localhost to reach local endpoints" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Detected devices" devices="[]" subsys=linux-datapath
time="2025-04-17T22:05:16Z" level=info msg="Enabling k8s event listener" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Removing stale endpoint interfaces" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Waiting until local node addressing before starting watchers depending on it" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Skipping kvstore configuration" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing node addressing" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing cluster-pool IPAM" subsys=ipam v4Prefix=10.244.1.128/25 v6Prefix="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Restoring endpoints..." subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Endpoints restored" failed=0 restored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Addressing information:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-Name: default" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Cluster-ID: 0" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local node-name: system-0-655pn" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Node-IPv6: <nil>" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" External-Node IPv4: 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Internal-Node IPv4: 10.244.1.165" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 allocation prefix: 10.244.1.128/25" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" IPv4 native routing prefix: 10.244.0.0/16" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Loopback IPv4: 169.254.42.1" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=040b8479-1053-4ec0-97c9-265119521782 policyRevision=2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=66809df7-c24d-4610-8d65-488b27d23a9e policyRevision=3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" Local IPv4 addresses:" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.17.0.6" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 10.108.0.2" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg=" - 64.225.60.89" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-655pn subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-65529 subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6415158a-1311-4121-9735-85c0b7e33fe3 policyRevision=4 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Adding local node to cluster" node="{system-0-655pn default [{ExternalIP 64.225.60.89} {InternalIP 10.108.0.2} {CiliumInternalIP 10.244.1.165} {ExternalIP 64.225.60.89}] 10.244.1.128/25 [] <nil> [] 10.244.1.222 <nil> <nil> <nil> 0 local 0 map[beta.kubernetes.io/arch:amd64 beta.kubernetes.io/instance-type:s-2vcpu-4gb beta.kubernetes.io/os:linux doks.digitalocean.com/managed:true doks.digitalocean.com/node-id:0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f doks.digitalocean.com/node-pool:system-0 doks.digitalocean.com/node-pool-id:73347348-171d-4091-a3c9-1d4b0945c964 doks.digitalocean.com/version:1.30.10-do.0 failure-domain.beta.kubernetes.io/region:nyc3 kubernetes.io/arch:amd64 kubernetes.io/hostname:system-0-655pn kubernetes.io/os:linux node.kubernetes.io/instance-type:s-2vcpu-4gb region:nyc3 topology.kubernetes.io/region:nyc3] map[] 1 7f7f7913-4eb2-4025-983e-df322a37a2b5}" subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=9c50c780-5e51-481e-a8e7-dd3fad62b4b2 policyRevision=5 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=03c20995-2a4f-47c8-9529-395b38d9d919 policyRevision=6 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2756fcc8-31bc-4236-8f97-16d28cd34cf3 policyRevision=7 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7d1bb8e4-1969-4671-a7ac-2aca76969c7b policyRevision=8 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Creating or updating CiliumNode resource" node=system-0-655pn subsys=nodediscovery
time="2025-04-17T22:05:16Z" level=info msg="Waiting until all pre-existing resources have been received" subsys=k8s-watcher
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Node updated" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:05:16Z" level=info msg="Annotating k8s node" subsys=daemon v4CiliumHostIP.IPv4=10.244.1.165 v4IngressIP.IPv4="<nil>" v4Prefix=10.244.1.128/25 v4healthIP.IPv4=10.244.1.222 v6CiliumHostIP.IPv6="<nil>" v6IngressIP.IPv6="<nil>" v6Prefix="<nil>" v6healthIP.IPv6="<nil>"
time="2025-04-17T22:05:16Z" level=info msg="Initializing identity allocator" subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Allocating identities between range" cluster-id=0 max=65535 min=256 subsys=identity-cache
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_host.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.forwarding sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.accept_local sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.cilium_net.send_redirects sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.core.bpf_jit_enable sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.conf.all.rp_filter sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=net.ipv4.fib_multipath_use_neigh sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.unprivileged_bpf_disabled sysParamValue=1
time="2025-04-17T22:05:16Z" level=info msg="Setting sysctl" subsys=sysctl sysParamName=kernel.timer_migration sysParamValue=0
time="2025-04-17T22:05:16Z" level=info msg="Setting up BPF datapath" bpfClockSource=ktime bpfInsnSet="<nil>" subsys=datapath-loader
time="2025-04-17T22:05:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:16Z" level=info msg="Iptables rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Adding new proxy port rules for cilium-dns-egress:45473" id=cilium-dns-egress subsys=proxy
time="2025-04-17T22:05:16Z" level=info msg="Iptables proxy rules installed" subsys=iptables
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration=2.455332543s function="cmd.newDaemonPromise.func1 (daemon_main.go:1694)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="157.644µs" function="utime.initUtimeSync.func1 (cell.go:33)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="6.008µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="247.07µs" function="l2respondermap.newMap.func1 (l2_responder_map4.go:44)" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Start hook executed" duration="4.871µs" function="*job.group.Start" subsys=hive
time="2025-04-17T22:05:16Z" level=info msg="Starting IP identity watcher" subsys=ipcache
time="2025-04-17T22:05:16Z" level=info msg="Initializing daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Validating configured node address ranges" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Starting connection tracking garbage collector" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initial scan of connection tracking completed" subsys=ct-gc
time="2025-04-17T22:05:16Z" level=info msg="Regenerating restored endpoints" numRestored=0 subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Creating host endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=753 identity=1 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:doks.digitalocean.com/node-id=0c59dc13-5e8b-4d6a-9057-4cbcf64aeb3f,k8s:doks.digitalocean.com/node-pool-id=73347348-171d-4091-a3c9-1d4b0945c964,k8s:doks.digitalocean.com/node-pool=system-0,k8s:doks.digitalocean.com/version=1.30.10-do.0,k8s:node.kubernetes.io/instance-type=s-2vcpu-4gb,k8s:region=nyc3,k8s:topology.kubernetes.io/region=nyc3,reserved:host" ipv4= ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health daemon" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Launching Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Started healthz status API server" address="127.0.0.1:9879" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Processing queued endpoint deletion requests from /var/run/cilium/deleteQueue" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="processing 0 queued deletion requests" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Initializing Cilium API" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Finished regenerating restored endpoints" regenerated=0 subsys=daemon total=0
time="2025-04-17T22:05:16Z" level=info msg="Deleted orphan backends" orphanBackends=0 subsys=service
time="2025-04-17T22:05:16Z" level=info msg="Cleaning up Cilium health endpoint" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Removed stale bpf map" file-path=/sys/fs/bpf/tc/globals/cilium_lb4_source_range subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Daemon initialization completed" bootstrapTime=3.694082514s subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Configuring Hubble server" eventQueueSize=2048 maxFlows=4095 subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Serving cilium API at unix:///var/run/cilium/cilium.sock" subsys=daemon
time="2025-04-17T22:05:16Z" level=info msg="Beginning to read perf buffer" startTime="2025-04-17 22:05:16.9790939 +0000 UTC m=+3.777584086" subsys=monitor-agent
time="2025-04-17T22:05:16Z" level=info msg="Starting local Hubble server" address="unix:///var/run/cilium/hubble.sock" subsys=hubble
time="2025-04-17T22:05:16Z" level=info msg="Starting Hubble server" address=":4244" subsys=hubble
time="2025-04-17T22:05:17Z" level=info msg="Compiled new BPF template" BPFCompilationTime=505.310648ms file-path=/var/run/cilium/state/templates/5aaf125271bf9916651b97512cb053ef03b67f2b3978c7d7c70a926d9bb7357b/bpf_host.o subsys=datapath-loader
time="2025-04-17T22:05:17Z" level=info msg="Create endpoint request" addressing="&{10.244.1.205 7d10d271-a1dc-4291-a571-564a5409e74f default }" containerID=53e45f9f6a10b8b2b62a9ab11f70e73ef31eccabf1d339d90b22569ef50d8d9d datapathConfiguration="&{false false false false false <nil>}" interface=lxc7f8654bb9ddf k8sPodName=kube-system/konnectivity-agent-d54mg labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=konnectivity-agent;" subsys=allocator
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=konnectivity-agent,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=konnectivity-agent" ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Waiting for endpoint to be generated" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-17T22:05:17Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=753 identity=1 ipv4= ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="New endpoint" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Resolving identity labels (blocking)" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:17Z" level=info msg="Identity of endpoint changed" containerID= datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=136 identity=4 identityLabels="reserved:health" ipv4=10.244.1.222 ipv6= k8sPodName=/ oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:18Z" level=info msg="Serving cilium health API at unix:///var/run/cilium/health.sock" subsys=health-server
time="2025-04-17T22:05:19Z" level=info msg="Compiled new BPF template" BPFCompilationTime=1.817672457s file-path=/var/run/cilium/state/templates/66fcee6b127281654b77182ef809a08d06c4bda30601191c13385ae4192cb86e/bpf_lxc.o subsys=datapath-loader
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID=53e45f9f6a datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=endpoint
time="2025-04-17T22:05:19Z" level=info msg="Successful endpoint creation" containerID=53e45f9f6a datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=51 identity=32430 ipv4=10.244.1.205 ipv6= k8sPodName=kube-system/konnectivity-agent-d54mg subsys=daemon
time="2025-04-17T22:05:19Z" level=info msg="Rewrote endpoint BPF program" containerID= datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=136 identity=4 ipv4=10.244.1.222 ipv6= k8sPodName=/ subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Create endpoint request" addressing="&{10.244.1.196 62e942ce-66c5-4806-b859-bc8ba747cc19 default }" containerID=8b854afabcebb3bbc0f24c53651f0d10f7845c0fc063a065b5a5f9393c95015e datapathConfiguration="&{false false false false false <nil>}" interface=lxc72bf4f377db8 k8sPodName=kube-system/coredns-854895db77-2x6tn labels="[]" subsys=daemon sync-build=true
time="2025-04-17T22:05:40Z" level=info msg="New endpoint" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Resolving identity labels (blocking)" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Reusing existing global key" key="k8s:doks.digitalocean.com/managed=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=coredns;k8s:io.kubernetes.pod.namespace=kube-system;k8s:k8s-app=kube-dns;" subsys=allocator
time="2025-04-17T22:05:40Z" level=info msg="Identity of endpoint changed" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn oldIdentity="no identity" subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Waiting for endpoint to be generated" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Rewrote endpoint BPF program" containerID=8b854afabc datapathPolicyRevision=0 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-04-17T22:05:40Z" level=info msg="Successful endpoint creation" containerID=8b854afabc datapathPolicyRevision=8 desiredPolicyRevision=8 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=daemon
time="2025-04-17T22:06:16Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3b subsys=nodemanager
time="2025-04-17T22:06:56Z" level=info msg="Node deleted" clusterName=default nodeName=system-0-6tk3r subsys=nodemanager
time="2025-04-17T22:10:16Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0005950927734375 newInterval=7m30s subsys=map-ct
time="2025-04-17T22:17:46Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00102996826171875 newInterval=11m15s subsys=map-ct
time="2025-04-17T22:29:01Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0015411376953125 newInterval=16m53s subsys=map-ct
time="2025-04-17T22:45:54Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0023040771484375 newInterval=25m20s subsys=map-ct
time="2025-04-17T23:11:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.00347137451171875 newInterval=38m0s subsys=map-ct
time="2025-04-17T23:49:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.005157470703125 newInterval=57m0s subsys=map-ct
time="2025-04-18T00:46:14Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.007720947265625 newInterval=1h25m30s subsys=map-ct
time="2025-04-18T02:11:44Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01155853271484375 newInterval=2h8m15s subsys=map-ct
time="2025-04-18T04:19:59Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.01715850830078125 newInterval=3h12m23s subsys=map-ct
time="2025-04-18T07:32:23Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.0255126953125 newInterval=4h48m35s subsys=map-ct
time="2025-04-18T12:20:58Z" level=info msg="Conntrack garbage collector interval recalculated" deleteRatio=0.03720855712890625 newInterval=7h12m53s subsys=map-ct
time="2025-04-19T07:37:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777217 subsys=policy
time="2025-04-19T07:37:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:37:20Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6dc3d794-8c3b-4f41-a3a9-80039d5dd099 policyRevision=10 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d8fcfd77-a41f-4bf6-9e15-0761d8646b13 policyRevision=12 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54a422d-3344-4ddb-b735-f3d1a419b92f policyRevision=14 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ab898772-f5c9-4e04-9f06-74f83ef5a562 policyRevision=16 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f555af24-f092-4040-8028-fb956bb9d41b policyRevision=18 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c3ac0f15-63f8-4b2e-a1c9-eaa7be076039 policyRevision=20 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 subsys=daemon
time="2025-04-19T07:38:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-19T07:38:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d14c6f1-e698-4fec-8b90-3285a9555ee1 policyRevision=22 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=55e0468e-9fb9-42d1-866f-94c482737362 policyRevision=24 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33865cba-4a2b-4a97-8fbe-bb5433e34a7c policyRevision=26 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=098726c6-5820-431b-a507-6705f5f9bd77 policyRevision=28 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=dc9228f1-3a2f-40e0-8bc3-37289428db53 policyRevision=30 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=d1b4ad49-a81c-4ce8-8ea7-0f678fa9a8f5 policyRevision=32 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6b267068-7ab4-4890-93be-69b5f1f59354 policyRevision=34 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-22T07:41:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=74435a11-d798-4d20-9137-091dd8fc35f7 policyRevision=36 subsys=daemon
time="2025-04-22T07:41:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:05:51Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:06:52Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:80/ANY" subsys=k8s-watcher
time="2025-04-26T18:27:31Z" level=warning msg="service not found" k8sNamespace=ingress-nginx k8sSvcName=bikexbike obj="10.245.147.216:443/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:38Z" level=info msg="Create endpoint request" addressing="&{10.244.1.223 bd160c34-044b-4807-90c1-064865370e03 default }" containerID=b537ca15ed4b1101be846c970cb24a9cfe1a5305a7ee47e8e267b7981e872fc7 datapathConfiguration="&{false false false false false <nil>}" interface=lxccb2cec03d915 k8sPodName=kubeintel/cm-acme-http-solver-dfvjs labels="[]" subsys=daemon sync-build=true
time="2025-04-27T20:18:38Z" level=info msg="New endpoint" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Resolving identity labels (blocking)" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:kubeintel]" subsys=crd-allocator
time="2025-04-27T20:18:38Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:38Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1001522845;k8s:acme.cert-manager.io/http-token=812125645;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-04-27T20:18:38Z" level=info msg="Identity of endpoint changed" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 identityLabels="k8s:acme.cert-manager.io/http-domain=1001522845,k8s:acme.cert-manager.io/http-token=812125645,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs oldIdentity="no identity" subsys=endpoint
time="2025-04-27T20:18:38Z" level=info msg="Waiting for endpoint to be generated" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Rewrote endpoint BPF program" containerID=b537ca15ed datapathPolicyRevision=0 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:18:39Z" level=info msg="Successful endpoint creation" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:39Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-27T20:18:59Z" level=warning msg="service not found" k8sNamespace=kubeintel k8sSvcName=cm-acme-http-solver-cmhbq obj="10.245.224.188:8089/ANY" subsys=k8s-watcher
time="2025-04-27T20:18:59Z" level=info msg="Delete endpoint request" containerID=b537ca15ed endpointID=3857 k8sNamespace=kubeintel k8sPodName=cm-acme-http-solver-dfvjs subsys=daemon
time="2025-04-27T20:18:59Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1001522845 k8s:acme.cert-manager.io/http-token=812125645 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=kubeintel]" subsys=allocator
time="2025-04-27T20:18:59Z" level=info msg="Removed endpoint" containerID=b537ca15ed datapathPolicyRevision=36 desiredPolicyRevision=36 endpointID=3857 identity=34403 ipv4=10.244.1.223 ipv6= k8sPodName=kubeintel/cm-acme-http-solver-dfvjs subsys=endpoint
time="2025-04-27T20:51:16Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b3d26b6-3f49-4c04-a7ad-2f66e6b0c367 policyRevision=38 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=0a865511-47ed-4634-8cf3-0f7525a0e5cc policyRevision=40 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4f462fcc-6cce-4776-bb1e-a805b777a7da policyRevision=42 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4ea53fc9-11be-4a31-9515-83715cfec69a policyRevision=44 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c8fc6f9f-d896-4333-a947-b2829ddfc661 policyRevision=46 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35ae6b41-78e8-4c5d-814e-3793da4a716f policyRevision=48 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-04-29T12:38:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=c54698d1-1621-4768-bfb2-8a5cdfea66f7 policyRevision=50 subsys=daemon
time="2025-04-29T12:38:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-04T17:34:05Z" level=info msg="Create endpoint request" addressing="&{10.244.1.190 5136d175-4ed5-42a3-96e6-b9454e63c119 default }" containerID=ae7e28295da4c07b493635b5e30c0d0d6970296baf09923a31687b6a789f9194 datapathConfiguration="&{false false false false false <nil>}" interface=lxca76f31062f2c k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz labels="[]" subsys=daemon sync-build=true
time="2025-05-04T17:34:05Z" level=info msg="New endpoint" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Resolving identity labels (blocking)" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-04T17:34:05Z" level=info msg="Identity of endpoint changed" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz oldIdentity="no identity" subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Waiting for endpoint to be generated" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Rewrote endpoint BPF program" containerID=ae7e28295d datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-04T17:34:05Z" level=info msg="Successful endpoint creation" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-05T00:12:01Z" level=info msg="Create endpoint request" addressing="&{10.244.1.157 e7daef0e-04d2-44ff-b77e-81a1d9a0a4ab default }" containerID=91efb54a92e333c39dfe14c34684d67d9a42be8522258b160baa8a0a2190ec16 datapathConfiguration="&{false false false false false <nil>}" interface=lxc62302748819b k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp labels="[]" subsys=daemon sync-build=true
time="2025-05-05T00:12:01Z" level=info msg="New endpoint" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Resolving identity labels (blocking)" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=kubeintel;k8s:app.kubernetes.io/name=kubeintel;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=kubeintel;k8s:io.kubernetes.pod.namespace=kubeintel;" subsys=allocator
time="2025-05-05T00:12:01Z" level=info msg="Identity of endpoint changed" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 identityLabels="k8s:app.kubernetes.io/instance=kubeintel,k8s:app.kubernetes.io/name=kubeintel,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kubeintel,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=kubeintel,k8s:io.kubernetes.pod.namespace=kubeintel" ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp oldIdentity="no identity" subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Waiting for endpoint to be generated" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Rewrote endpoint BPF program" containerID=91efb54a92 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=endpoint
time="2025-05-05T00:12:01Z" level=info msg="Successful endpoint creation" containerID=91efb54a92 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=471 identity=23569 ipv4=10.244.1.157 ipv6= k8sPodName=kubeintel/kubeintel-55955b8c46-ftprp subsys=daemon
time="2025-05-06T03:53:41Z" level=info msg="Create endpoint request" addressing="&{10.244.1.189 b8b18320-ac3e-40b4-881d-e811dab2b460 default }" containerID=76de9f3a728fda8007ff2a915648ba69f6a7d61cc1925195734f47095d8d35a3 datapathConfiguration="&{false false false false false <nil>}" interface=lxc0d71c0b24747 k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh labels="[]" subsys=daemon sync-build=true
time="2025-05-06T03:53:41Z" level=info msg="New endpoint" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Resolving identity labels (blocking)" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Reusing existing global key" key="k8s:app.kubernetes.io/instance=bikexbike;k8s:app.kubernetes.io/name=bikexbike;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-06T03:53:41Z" level=info msg="Identity of endpoint changed" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh oldIdentity="no identity" subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Waiting for endpoint to be generated" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Rewrote endpoint BPF program" containerID=76de9f3a72 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T03:53:41Z" level=info msg="Successful endpoint creation" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:12:32Z" level=info msg="Create endpoint request" addressing="&{10.244.1.173 37e60dcf-d105-43fb-9d1d-8f24cd310df6 default }" containerID=2e4196cc018db244fcdbb9693ae97273c8e95016049d430e4735bf321cc11062 datapathConfiguration="&{false false false false false <nil>}" interface=lxcee23b4ccdc87 k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn labels="[]" subsys=daemon sync-build=true
time="2025-05-06T15:12:32Z" level=info msg="New endpoint" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Resolving identity labels (blocking)" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Identity of endpoint changed" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 identityLabels="k8s:app.kubernetes.io/instance=bikexbike,k8s:app.kubernetes.io/name=bikexbike,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn oldIdentity="no identity" subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Waiting for endpoint to be generated" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Rewrote endpoint BPF program" containerID=2e4196cc01 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=endpoint
time="2025-05-06T15:12:32Z" level=info msg="Successful endpoint creation" containerID=2e4196cc01 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=2200 identity=29331 ipv4=10.244.1.173 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-5qgwn subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Delete endpoint request" containerID=76de9f3a72 endpointID=713 k8sNamespace=bikexbike k8sPodName=bikexbike-588cddd898-fxlnh subsys=daemon
time="2025-05-06T15:13:02Z" level=info msg="Releasing key" key="[k8s:app.kubernetes.io/instance=bikexbike k8s:app.kubernetes.io/name=bikexbike k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-06T15:13:02Z" level=info msg="Removed endpoint" containerID=76de9f3a72 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=713 identity=29331 ipv4=10.244.1.189 ipv6= k8sPodName=bikexbike/bikexbike-588cddd898-fxlnh subsys=endpoint
time="2025-05-06T19:15:14Z" level=info msg="Delete endpoint request" containerID=ae7e28295d endpointID=1537 k8sNamespace=demo k8sPodName=nginx-deployment-86dcfdf4c6-mt7gz subsys=daemon
time="2025-05-06T19:15:14Z" level=info msg="Releasing key" key="[k8s:app=nginx k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo]" subsys=allocator
time="2025-05-06T19:15:14Z" level=info msg="Removed endpoint" containerID=ae7e28295d datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=1537 identity=60447 ipv4=10.244.1.190 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-mt7gz subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.236 03120237-78f2-439d-bd10-09556e2ba49c default }" containerID=ccb4035800d5f85d9593f911b15878f4335a4da20da7412d7978ee6e6a051413 datapathConfiguration="&{false false false false false <nil>}" interface=lxc3719859f07d9 k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f labels="[]" subsys=daemon sync-build=true
time="2025-05-06T19:16:49Z" level=info msg="New endpoint" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Resolving identity labels (blocking)" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Reusing existing global key" key="k8s:app=nginx;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;" subsys=allocator
time="2025-05-06T19:16:49Z" level=info msg="Identity of endpoint changed" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 identityLabels="k8s:app=nginx,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo" ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f oldIdentity="no identity" subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Waiting for endpoint to be generated" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Rewrote endpoint BPF program" containerID=ccb4035800 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=endpoint
time="2025-05-06T19:16:49Z" level=info msg="Successful endpoint creation" containerID=ccb4035800 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=256 identity=60447 ipv4=10.244.1.236 ipv6= k8sPodName=demo/nginx-deployment-86dcfdf4c6-x7r8f subsys=daemon
time="2025-05-08T22:09:27Z" level=info msg="Create endpoint request" addressing="&{10.244.1.225 948ba0d3-18d3-436b-b236-96cbe37459ed default }" containerID=38c1670b13928c712aeb5522f0d8d47e84c4c53d80f03f95465ce1ffb4b29b6f datapathConfiguration="&{false false false false false <nil>}" interface=lxc58f952d2e9d8 k8sPodName=demo/pi-2-2ks2s labels="[]" subsys=daemon sync-build=true
time="2025-05-08T22:09:27Z" level=info msg="New endpoint" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Resolving identity labels (blocking)" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:demo]" subsys=crd-allocator
time="2025-05-08T22:09:27Z" level=info msg="Allocated new global key" key="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:batch.kubernetes.io/job-name=pi-2;k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=demo;k8s:job-name=pi-2;" subsys=allocator
time="2025-05-08T22:09:27Z" level=info msg="Identity of endpoint changed" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 identityLabels="k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:batch.kubernetes.io/job-name=pi-2,k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=demo,k8s:job-name=pi-2" ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s oldIdentity="no identity" subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Waiting for endpoint to be generated" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-08T22:09:27Z" level=info msg="Rewrote endpoint BPF program" containerID=38c1670b13 datapathPolicyRevision=0 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:09:27Z" level=info msg="Successful endpoint creation" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Delete endpoint request" containerID=38c1670b13 endpointID=211 k8sNamespace=demo k8sPodName=pi-2-2ks2s subsys=daemon
time="2025-05-08T22:09:53Z" level=info msg="Releasing key" key="[k8s:batch.kubernetes.io/controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:batch.kubernetes.io/job-name=pi-2 k8s:controller-uid=0317f955-b62d-4825-8cbf-76309ce47142 k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=demo k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=demo k8s:job-name=pi-2]" subsys=allocator
time="2025-05-08T22:09:53Z" level=info msg="Removed endpoint" containerID=38c1670b13 datapathPolicyRevision=50 desiredPolicyRevision=50 endpointID=211 identity=2226 ipv4=10.244.1.225 ipv6= k8sPodName=demo/pi-2-2ks2s subsys=endpoint
time="2025-05-08T22:41:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b7ad43b9-f312-485c-8c3f-7ad5327a160d policyRevision=52 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e53e63a4-7df3-4e4b-80c0-ac707b9b916d policyRevision=54 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f9733521-c7be-4054-afa5-279f2c4b2137 policyRevision=56 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=21c67fb2-89fd-4de2-9a05-63f85bf4b0b2 policyRevision=58 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=b96cd4f2-28dd-45a6-a1c3-002dd89ac752 policyRevision=60 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a6fbe11e-a123-4a05-bbc6-3b3f87b30b41 policyRevision=62 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-13T08:26:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=35dc9be9-c917-4f13-ad65-0010bbbad3bd policyRevision=64 subsys=daemon
time="2025-05-13T08:26:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:28:19Z" level=warning msg="UpdateIdentities: Skipping Delete of a non-existing identity" identity=16777218 subsys=policy
time="2025-05-16T08:28:19Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:28:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fec02dfa-6368-44ac-922b-d03a2e1603ab policyRevision=66 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=f144a001-8965-4a5a-84fb-3aa602e59c75 policyRevision=68 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2ace3607-16b9-4047-a9ac-d8d12c8f6de9 policyRevision=70 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=bfdb4d99-3c7d-4682-b887-181ff2cf6564 policyRevision=72 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=25d6f8c4-9d3d-4c59-a21e-7701a73ed64d policyRevision=74 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=84cbbf6a-186d-413e-91ad-bc2f53476946 policyRevision=76 subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d subsys=daemon
time="2025-05-16T08:29:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-16T08:29:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=4fc67321-c661-4fd9-8d93-55b29325182d policyRevision=78 subsys=daemon
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.227 008d307d-564f-49f6-a23d-b9e90a0090ef default }" containerID=8e639bae91df353a607f319e014432f280015f4c9a54a9c94a6bca43ef6a7d7c datapathConfiguration="&{false false false false false <nil>}" interface=lxc5e900a634a2d k8sPodName=bikexbike/cm-acme-http-solver-g4dth labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=608961820;k8s:acme.cert-manager.io/http-token=1208684116;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 identityLabels="k8s:acme.cert-manager.io/http-domain=608961820,k8s:acme.cert-manager.io/http-token=1208684116,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:49Z" level=info msg="Create endpoint request" addressing="&{10.244.1.197 dbbcb16d-f23b-4870-af7b-a3a7a4384dc2 default }" containerID=1c80f3d7cf9d6bb07e2517d122506e9b40804945769a0a34e2f3ffca68acfd55 datapathConfiguration="&{false false false false false <nil>}" interface=lxc9e8ad6d199d4 k8sPodName=bikexbike/cm-acme-http-solver-7l7xp labels="[]" subsys=daemon sync-build=true
time="2025-05-20T22:52:49Z" level=info msg="New endpoint" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Resolving identity labels (blocking)" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Skipped non-kubernetes labels when labelling ciliumidentity. All labels will still be used in identity determination" labels="map[k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name:bikexbike]" subsys=crd-allocator
time="2025-05-20T22:52:49Z" level=info msg="Allocated new global key" key="k8s:acme.cert-manager.io/http-domain=1025771183;k8s:acme.cert-manager.io/http-token=939200182;k8s:acme.cert-manager.io/http01-solver=true;k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike;k8s:io.cilium.k8s.policy.cluster=default;k8s:io.cilium.k8s.policy.serviceaccount=default;k8s:io.kubernetes.pod.namespace=bikexbike;" subsys=allocator
time="2025-05-20T22:52:49Z" level=info msg="Identity of endpoint changed" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 identityLabels="k8s:acme.cert-manager.io/http-domain=1025771183,k8s:acme.cert-manager.io/http-token=939200182,k8s:acme.cert-manager.io/http01-solver=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=default,k8s:io.kubernetes.pod.namespace=bikexbike" ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp oldIdentity="no identity" subsys=endpoint
time="2025-05-20T22:52:49Z" level=info msg="Waiting for endpoint to be generated" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=1c80f3d7cf datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="Rewrote endpoint BPF program" containerID=8e639bae91 datapathPolicyRevision=0 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T22:52:50Z" level=info msg="Successful endpoint creation" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:52:50Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T22:52:51Z" level=info msg="regenerating all endpoints" reason= subsys=endpoint-manager
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-h5gc9 obj="10.245.119.204:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=warning msg="service not found" k8sNamespace=bikexbike k8sSvcName=cm-acme-http-solver-d8z46 obj="10.245.21.144:8089/ANY" subsys=k8s-watcher
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=1c80f3d7cf endpointID=227 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-7l7xp subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=1025771183 k8s:acme.cert-manager.io/http-token=939200182 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Delete endpoint request" containerID=8e639bae91 endpointID=1088 k8sNamespace=bikexbike k8sPodName=cm-acme-http-solver-g4dth subsys=daemon
time="2025-05-20T22:53:10Z" level=info msg="Releasing key" key="[k8s:acme.cert-manager.io/http-domain=608961820 k8s:acme.cert-manager.io/http-token=1208684116 k8s:acme.cert-manager.io/http01-solver=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=bikexbike k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=default k8s:io.kubernetes.pod.namespace=bikexbike]" subsys=allocator
time="2025-05-20T22:53:10Z" level=info msg="Removed endpoint" containerID=1c80f3d7cf datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=227 identity=11396 ipv4=10.244.1.197 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-7l7xp subsys=endpoint
time="2025-05-20T22:53:11Z" level=info msg="Removed endpoint" containerID=8e639bae91 datapathPolicyRevision=78 desiredPolicyRevision=78 endpointID=1088 identity=940 ipv4=10.244.1.227 ipv6= k8sPodName=bikexbike/cm-acme-http-solver-g4dth subsys=endpoint
time="2025-05-20T23:26:21Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-20T23:26:22Z" level=info msg="regenerating all endpoints" reason="one or more identities created or deleted" subsys=endpoint-manager
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=e3089740-11c8-43cd-8db9-7a08030e92af policyRevision=80 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=ff88eaca-a834-47f2-81f7-b1a794ebc44f policyRevision=82 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=7358c5f9-b16b-4a6a-b0fb-170cbba3b5c3 policyRevision=84 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a52a23a7-9f13-4955-a5ae-d397074bb257 policyRevision=86 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a54ecbcf-2bd6-466c-bd10-0d4faa10b351 policyRevision=88 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=928f660a-055c-4c39-be3b-4836da740c3c policyRevision=90 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-21T02:11:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2b57e550-c919-4916-b4ce-885116c19b48 policyRevision=92 subsys=daemon
time="2025-05-21T02:11:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-22T22:00:42Z" level=info msg="Create endpoint request" addressing="&{10.244.1.208 3bc73c64-f463-4f05-8599-d43a7b4b9a65 default }" containerID=370fd7c6d8c03b733609c2a2494f683699384a0ad21be26a3f3dd02a1d85cdbf datapathConfiguration="&{false false false false false <nil>}" interface=lxc518e75b83fcc k8sPodName=kube-system/coredns-6b79676d8-kx7lx labels="[]" subsys=daemon sync-build=true
time="2025-05-22T22:00:42Z" level=info msg="New endpoint" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Resolving identity labels (blocking)" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Identity of endpoint changed" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 identityLabels="k8s:doks.digitalocean.com/managed=true,k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system,k8s:io.cilium.k8s.policy.cluster=default,k8s:io.cilium.k8s.policy.serviceaccount=coredns,k8s:io.kubernetes.pod.namespace=kube-system,k8s:k8s-app=kube-dns" ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx oldIdentity="no identity" subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Waiting for endpoint to be generated" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=0 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Rewrote endpoint BPF program" containerID=370fd7c6d8 datapathPolicyRevision=0 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=endpoint
time="2025-05-22T22:00:42Z" level=info msg="Successful endpoint creation" containerID=370fd7c6d8 datapathPolicyRevision=92 desiredPolicyRevision=92 endpointID=33 identity=30020 ipv4=10.244.1.208 ipv6= k8sPodName=kube-system/coredns-6b79676d8-kx7lx subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Delete endpoint request" containerID=8b854afabc endpointID=935 k8sNamespace=kube-system k8sPodName=coredns-854895db77-2x6tn subsys=daemon
time="2025-05-22T22:00:43Z" level=info msg="Releasing key" key="[k8s:doks.digitalocean.com/managed=true k8s:io.cilium.k8s.namespace.labels.kubernetes.io/metadata.name=kube-system k8s:io.cilium.k8s.policy.cluster=default k8s:io.cilium.k8s.policy.serviceaccount=coredns k8s:io.kubernetes.pod.namespace=kube-system k8s:k8s-app=kube-dns]" subsys=allocator
time="2025-05-22T22:00:43Z" level=info msg="Removed endpoint" containerID=8b854afabc datapathPolicyRevision=92 desiredPolicyRevision=78 endpointID=935 identity=30020 ipv4=10.244.1.196 ipv6= k8sPodName=kube-system/coredns-854895db77-2x6tn subsys=endpoint
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=70a7b83f-78e4-4d1f-ae86-3e8b8307204e policyRevision=94 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=33d97012-ad8b-4a26-b60a-4996d87361c6 policyRevision=96 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=2bf058b5-6d24-4c81-b8b4-3f47c1e5af0a policyRevision=98 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=82e40dcf-d39c-4bb6-8fea-2f88ff3bd62b policyRevision=100 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1aeb10a9-a550-4638-a8dc-c9d3aae8ff8f policyRevision=102 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=5f8d8aff-ff51-45aa-a19a-8045cbb20421 policyRevision=104 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-24T02:14:09Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=6cdc2019-52b5-42c9-815e-2dae0a1779c1 policyRevision=106 subsys=daemon
time="2025-05-24T02:14:09Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8082 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-application-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=3c6ecf5a-f85b-4b92-8853-c5144ec36f1d] Description:}]" policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=a4a0a664-1de3-4ec6-9201-a1383da03db9 policyRevision=108 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-application-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:7000 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:8080 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-applicationset-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=1e1dff1b-259e-44b5-b059-ce1905e9171e] Description:}]" policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-applicationset-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=714287ea-0187-44e5-932e-f6438c6a9adb policyRevision=110 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-dex-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5556 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>} {Ports:[{Port:5557 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:5558 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-dex-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=e49e30b6-0973-4561-b6ce-a9a6f0079c8f] Description:}]" policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-dex-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=fc25f8a8-61a1-4842-94ad-2f9ec63f53bb policyRevision=112 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:9001 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-notifications-controller-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=87ba9e6b-7b27-467e-9e33-cfa0d5d0b215] Description:}]" policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=138dc700-7d6c-477a-8a3c-40861bc2c901 policyRevision=114 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-notifications-controller-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-redis\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:6379 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-redis-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=d1b5ac69-1fdf-4acd-bb8b-d16dfe54fde9] Description:}]" policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-redis-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1d2205bb-b216-42f6-af3d-6b5d2606e832 policyRevision=116 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-repo-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-application-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-notifications-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-applicationset-controller\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8081 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>} {IngressCommonRule:{FromEndpoints:[{\"matchExpressions\":[{\"key\":\"k8s:io.kubernetes.pod.namespace\",\"operator\":\"Exists\"}]}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[{Ports:[{Port:8084 Protocol:TCP}] TerminatingTLS:<nil> OriginatingTLS:<nil> ServerNames:[] Listener:<nil> Rules:<nil>}] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-repo-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=9d8395f2-2be4-4c57-b273-30051106facd] Description:}]" policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=1e03efb0-697a-425a-9a5b-60346d800a17 policyRevision=118 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-repo-server-network-policy subsys=k8s-watcher
time="2025-05-26T10:47:08Z" level=info msg="Policy Add Request" ciliumNetworkPolicy="[&{EndpointSelector:{\"matchLabels\":{\"k8s:app.kubernetes.io/name\":\"argocd-server\",\"k8s:io.kubernetes.pod.namespace\":\"argocd\"}} NodeSelector:{} Ingress:[{IngressCommonRule:{FromEndpoints:[{}] FromRequires:[] FromCIDR: FromCIDRSet:[] FromEntities:[] aggregatedSelectors:[]} ToPorts:[] ICMPs:[] Authentication:<nil>}] IngressDeny:[] Egress:[] EgressDeny:[] Labels:[k8s:io.cilium.k8s.policy.derived-from=NetworkPolicy k8s:io.cilium.k8s.policy.name=argocd-server-network-policy k8s:io.cilium.k8s.policy.namespace=argocd k8s:io.cilium.k8s.policy.uid=b715e8e4-907c-4ce6-9a27-400c05084a8e] Description:}]" policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="Policy imported via API, recalculating..." policyAddRequest=490f6703-f8b1-4ec2-8c08-a1a2894eab48 policyRevision=120 subsys=daemon
time="2025-05-26T10:47:08Z" level=info msg="NetworkPolicy successfully added" k8sApiVersion= k8sNetworkPolicyName=argocd-server-network-policy subsys=k8s-watcher
Kubeintel ©2024