Kubeintel Logo

Kubeintel

  • Search
  • Pods
  • Deployments
  • Statefulsets
  • jobJobs
  • Daemonsets
  • Namespaces
  • Nodes
  • Services
  • Configmaps
  1. Home
  2. /
  3. namespaces
  4. /
  5. kube-system
  6. /
  7. deployments
  8. /
  9. hubble-relay
  10. /
  11. pods
Summary
Metadata
Containers
Status
Spec
All
Pods
Events
Investigator
Deployment Details

Name: hubble-relay

Namespace: kube-system

Replicas: 1/1

Strategy: RollingUpdate

Selector: k8s-app: hubble-relay

Kubectl Commands
  • View
  • Delete
  • Describe
  • Scale
Containers
Name
Image
Ports...
hubble-relayghcr.io/digitalocean-packages/hubble-rel...4245/TCP...
  • 1
Metadata

Creation Time: 2024-07-01T18:52:38Z

Labels:

  • app.kubernetes.io/name: hubble-relay...
  • app.kubernetes.io/part-of: cilium...
  • c3.doks.digitalocean.com/component: cilium...
  • c3.doks.digitalocean.com/plane: data...
  • doks.digitalocean.com/managed: true...
  • k8s-app: hubble-relay

Annotation:

  • deployment.kubernetes.io/revision: 4...
name: hubble-relay
namespace: kube-system
uid: 082aae43-3384-479a-9c50-db2d1ba05714
resourceVersion: '92139946'
generation: 4
creationTimestamp: '2024-07-01T18:52:38Z'
labels:
app.kubernetes.io/name: hubble-relay
app.kubernetes.io/part-of: cilium
c3.doks.digitalocean.com/component: cilium
c3.doks.digitalocean.com/plane: data
doks.digitalocean.com/managed: 'true'
k8s-app: hubble-relay
annotations:
deployment.kubernetes.io/revision: '4'
- name: hubble-relay
image: ghcr.io/digitalocean-packages/hubble-relay:v1.14.18
command:
- hubble-relay
args:
- serve
ports:
- name: grpc
containerPort: 4245
protocol: TCP
resources: {}
volumeMounts:
- name: config
readOnly: true
mountPath: /etc/hubble-relay
- name: tls
readOnly: true
mountPath: /var/lib/hubble-relay/tls
livenessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
readinessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
runAsUser: 65532
runAsGroup: 65532
runAsNonRoot: true
observedGeneration: 4
replicas: 1
updatedReplicas: 1
readyReplicas: 1
availableReplicas: 1
conditions:
- type: Available
status: 'True'
lastUpdateTime: '2024-07-01T18:52:38Z'
lastTransitionTime: '2024-07-01T18:52:38Z'
reason: MinimumReplicasAvailable
message: Deployment has minimum availability.
- type: Progressing
status: 'True'
lastUpdateTime: '2025-04-17T22:01:30Z'
lastTransitionTime: '2024-07-01T18:52:38Z'
reason: NewReplicaSetAvailable
message: ReplicaSet "hubble-relay-fbcb88677" has successfully progressed.
replicas: 1
selector:
matchLabels:
k8s-app: hubble-relay
template:
metadata:
creationTimestamp: null
labels:
app.kubernetes.io/name: hubble-relay
app.kubernetes.io/part-of: cilium
doks.digitalocean.com/managed: 'true'
k8s-app: hubble-relay
annotations:
cluster-autoscaler.kubernetes.io/safe-to-evict: 'true'
clusterlint.digitalocean.com/disabled-checks: resource-requirements
spec:
volumes:
- name: config
configMap:
name: hubble-relay-config
items:
- key: config.yaml
path: config.yaml
defaultMode: 420
- name: tls
projected:
sources:
- secret:
name: hubble-relay-client-certs
items:
- key: tls.crt
path: client.crt
- key: tls.key
path: client.key
- key: ca.crt
path: hubble-server-ca.crt
defaultMode: 256
containers:
- name: hubble-relay
image: ghcr.io/digitalocean-packages/hubble-relay:v1.14.18
command:
- hubble-relay
args:
- serve
ports:
- name: grpc
containerPort: 4245
protocol: TCP
resources: {}
volumeMounts:
- name: config
readOnly: true
mountPath: /etc/hubble-relay
- name: tls
readOnly: true
mountPath: /var/lib/hubble-relay/tls
livenessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
readinessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
runAsUser: 65532
runAsGroup: 65532
runAsNonRoot: true
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: hubble-relay
serviceAccount: hubble-relay
automountServiceAccountToken: false
securityContext:
fsGroup: 65532
affinity:
nodeAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
preference:
matchExpressions:
- key: doks.digitalocean.com/gpu-brand
operator: DoesNotExist
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- key: nvidia.com/gpu
operator: Exists
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 1
maxSurge: 25%
revisionHistoryLimit: 10
progressDeadlineSeconds: 600
metadata:
name: hubble-relay
namespace: kube-system
uid: 082aae43-3384-479a-9c50-db2d1ba05714
resourceVersion: '92139946'
generation: 4
creationTimestamp: '2024-07-01T18:52:38Z'
labels:
app.kubernetes.io/name: hubble-relay
app.kubernetes.io/part-of: cilium
c3.doks.digitalocean.com/component: cilium
c3.doks.digitalocean.com/plane: data
doks.digitalocean.com/managed: 'true'
k8s-app: hubble-relay
annotations:
deployment.kubernetes.io/revision: '4'
spec:
replicas: 1
selector:
matchLabels:
k8s-app: hubble-relay
template:
metadata:
creationTimestamp: null
labels:
app.kubernetes.io/name: hubble-relay
app.kubernetes.io/part-of: cilium
doks.digitalocean.com/managed: 'true'
k8s-app: hubble-relay
annotations:
cluster-autoscaler.kubernetes.io/safe-to-evict: 'true'
clusterlint.digitalocean.com/disabled-checks: resource-requirements
spec:
volumes:
- name: config
configMap:
name: hubble-relay-config
items:
- key: config.yaml
path: config.yaml
defaultMode: 420
- name: tls
projected:
sources:
- secret:
name: hubble-relay-client-certs
items:
- key: tls.crt
path: client.crt
- key: tls.key
path: client.key
- key: ca.crt
path: hubble-server-ca.crt
defaultMode: 256
containers:
- name: hubble-relay
image: ghcr.io/digitalocean-packages/hubble-relay:v1.14.18
command:
- hubble-relay
args:
- serve
ports:
- name: grpc
containerPort: 4245
protocol: TCP
resources: {}
volumeMounts:
- name: config
readOnly: true
mountPath: /etc/hubble-relay
- name: tls
readOnly: true
mountPath: /var/lib/hubble-relay/tls
livenessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
readinessProbe:
tcpSocket:
port: grpc
timeoutSeconds: 1
periodSeconds: 10
successThreshold: 1
failureThreshold: 3
terminationMessagePath: /dev/termination-log
terminationMessagePolicy: FallbackToLogsOnError
imagePullPolicy: IfNotPresent
securityContext:
capabilities:
drop:
- ALL
runAsUser: 65532
runAsGroup: 65532
runAsNonRoot: true
restartPolicy: Always
terminationGracePeriodSeconds: 1
dnsPolicy: ClusterFirst
nodeSelector:
kubernetes.io/os: linux
serviceAccountName: hubble-relay
serviceAccount: hubble-relay
automountServiceAccountToken: false
securityContext:
fsGroup: 65532
affinity:
nodeAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- weight: 100
preference:
matchExpressions:
- key: doks.digitalocean.com/gpu-brand
operator: DoesNotExist
podAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
- labelSelector:
matchLabels:
k8s-app: cilium
topologyKey: kubernetes.io/hostname
schedulerName: default-scheduler
tolerations:
- key: nvidia.com/gpu
operator: Exists
strategy:
type: RollingUpdate
rollingUpdate:
maxUnavailable: 1
maxSurge: 25%
revisionHistoryLimit: 10
progressDeadlineSeconds: 600
status:
observedGeneration: 4
replicas: 1
updatedReplicas: 1
readyReplicas: 1
availableReplicas: 1
conditions:
- type: Available
status: 'True'
lastUpdateTime: '2024-07-01T18:52:38Z'
lastTransitionTime: '2024-07-01T18:52:38Z'
reason: MinimumReplicasAvailable
message: Deployment has minimum availability.
- type: Progressing
status: 'True'
lastUpdateTime: '2025-04-17T22:01:30Z'
lastTransitionTime: '2024-07-01T18:52:38Z'
reason: NewReplicaSetAvailable
message: ReplicaSet "hubble-relay-fbcb88677" has successfully progressed.
Name
Namespace
Status
Created
Restarts
Node
IP Address
hubble-relay-fbcb88677-9p8rlkube-systemRunning2 months ago0system-0-6552910.244.1.43
  • 1
Name
Namespace
Status
Created
Restarts
Node
IP Address
hubble-relay-fbcb88677-9p8rlkube-systemRunning2 months ago0system-0-6552910.244.1.43
  • 1
Name
Namespace
Status
Created
Restarts
Node
IP Address
hubble-relay-fbcb88677-9p8rlkube-systemRunning2 months ago0system-0-6552910.244.1.43
  • 1
Kubeintel ©2024