Skip to content

Instantly share code, notes, and snippets.

@vibgy
Created July 23, 2025 10:27
Show Gist options
  • Save vibgy/0f93da19be64a85b841c00c1d5ed5854 to your computer and use it in GitHub Desktop.
Save vibgy/0f93da19be64a85b841c00c1d5ed5854 to your computer and use it in GitHub Desktop.
onyx gist
---
# Source: onyx-stack/charts/minio/templates/console/networkpolicy.yaml
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: onyx-stack-sb-6ck-minio-console
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2.0.1
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
spec:
podSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
policyTypes:
- Ingress
- Egress
egress:
- {}
ingress:
# Allow inbound connections
- ports:
- port: 9090
---
# Source: onyx-stack/charts/minio/templates/networkpolicy.yaml
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
podSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
policyTypes:
- Ingress
- Egress
egress:
- {}
ingress:
# Allow inbound connections
- ports:
- port: 9000
---
# Source: onyx-stack/charts/nginx/templates/networkpolicy.yaml
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: onyx-stack-sb-6ck-nginx
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: nginx
app.kubernetes.io/version: 1.25.4
helm.sh/chart: nginx-15.14.0
spec:
podSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: nginx
policyTypes:
- Ingress
- Egress
egress:
- {}
ingress:
- ports:
- port: 1024
---
# Source: onyx-stack/charts/postgresql/templates/primary/networkpolicy.yaml
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: onyx-stack-sb-6ck-postgresql
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
app.kubernetes.io/component: primary
spec:
podSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: postgresql
app.kubernetes.io/component: primary
policyTypes:
- Ingress
- Egress
egress:
- {}
ingress:
- ports:
- port: 5432
---
# Source: onyx-stack/charts/redis/templates/networkpolicy.yaml
kind: NetworkPolicy
apiVersion: networking.k8s.io/v1
metadata:
name: onyx-stack-sb-6ck-redis
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
spec:
podSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
policyTypes:
- Ingress
- Egress
egress:
- {}
ingress:
# Allow inbound connections
- ports:
- port: 6379
---
# Source: onyx-stack/charts/minio/templates/console/pdb.yaml
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: onyx-stack-sb-6ck-minio-console
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2.0.1
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
spec:
maxUnavailable: 1
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
---
# Source: onyx-stack/charts/minio/templates/pdb.yaml
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
maxUnavailable: 1
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
---
# Source: onyx-stack/charts/redis/templates/master/pdb.yaml
apiVersion: policy/v1
kind: PodDisruptionBudget
metadata:
name: onyx-stack-sb-6ck-redis-master
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
app.kubernetes.io/component: master
spec:
maxUnavailable: 1
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
app.kubernetes.io/component: master
---
# Source: onyx-stack/charts/minio/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/part-of: minio
automountServiceAccountToken: false
secrets:
- name: onyx-stack-sb-6ck-minio
---
# Source: onyx-stack/charts/nginx/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: onyx-stack-sb-6ck-nginx
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: nginx
app.kubernetes.io/version: 1.25.4
helm.sh/chart: nginx-15.14.0
automountServiceAccountToken: false
---
# Source: onyx-stack/charts/postgresql/templates/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
metadata:
name: onyx-stack-sb-6ck-postgresql
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
automountServiceAccountToken: false
---
# Source: onyx-stack/charts/redis/templates/master/serviceaccount.yaml
apiVersion: v1
kind: ServiceAccount
automountServiceAccountToken: false
metadata:
name: onyx-stack-sb-6ck-redis-master
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
---
# Source: onyx-stack/templates/onyx-secret.yaml
apiVersion: v1
kind: Secret
metadata:
name: onyx-secrets
type: Opaque
stringData:
oauth_client_id: ""
oauth_client_secret: ""
oauth_cookie_secret: ""
postgres_password: "postgres"
redis_password: "password"
s3_aws_access_key_id: "minioadmin"
s3_aws_secret_access_key: "minioadmin"
smtp_pass: ""
---
# Source: onyx-stack/charts/redis/templates/configmap.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: onyx-stack-sb-6ck-redis-configuration
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
data:
redis.conf: |-
# User-supplied common configuration:
# Enable AOF https://redis.io/topics/persistence#append-only-file
appendonly no
# Disable RDB persistence, AOF persistence already enabled.
save ""
# End of common configuration
master.conf: |-
dir /data
# User-supplied master configuration:
rename-command FLUSHDB ""
rename-command FLUSHALL ""
# End of master configuration
replica.conf: |-
dir /data
# User-supplied replica configuration:
rename-command FLUSHDB ""
rename-command FLUSHALL ""
# End of replica configuration
---
# Source: onyx-stack/charts/redis/templates/health-configmap.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: onyx-stack-sb-6ck-redis-health
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
data:
ping_readiness_local.sh: |-
#!/bin/bash
[[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
[[ -n "$REDIS_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_PASSWORD"
response=$(
timeout -s 15 $1 \
redis-cli \
-h localhost \
-p $REDIS_PORT \
ping
)
if [ "$?" -eq "124" ]; then
echo "Timed out"
exit 1
fi
if [ "$response" != "PONG" ]; then
echo "$response"
exit 1
fi
ping_liveness_local.sh: |-
#!/bin/bash
[[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
[[ -n "$REDIS_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_PASSWORD"
response=$(
timeout -s 15 $1 \
redis-cli \
-h localhost \
-p $REDIS_PORT \
ping
)
if [ "$?" -eq "124" ]; then
echo "Timed out"
exit 1
fi
responseFirstWord=$(echo $response | head -n1 | awk '{print $1;}')
if [ "$response" != "PONG" ] && [ "$responseFirstWord" != "LOADING" ] && [ "$responseFirstWord" != "MASTERDOWN" ]; then
echo "$response"
exit 1
fi
ping_readiness_master.sh: |-
#!/bin/bash
[[ -f $REDIS_MASTER_PASSWORD_FILE ]] && export REDIS_MASTER_PASSWORD="$(< "${REDIS_MASTER_PASSWORD_FILE}")"
[[ -n "$REDIS_MASTER_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_MASTER_PASSWORD"
response=$(
timeout -s 15 $1 \
redis-cli \
-h $REDIS_MASTER_HOST \
-p $REDIS_MASTER_PORT_NUMBER \
ping
)
if [ "$?" -eq "124" ]; then
echo "Timed out"
exit 1
fi
if [ "$response" != "PONG" ]; then
echo "$response"
exit 1
fi
ping_liveness_master.sh: |-
#!/bin/bash
[[ -f $REDIS_MASTER_PASSWORD_FILE ]] && export REDIS_MASTER_PASSWORD="$(< "${REDIS_MASTER_PASSWORD_FILE}")"
[[ -n "$REDIS_MASTER_PASSWORD" ]] && export REDISCLI_AUTH="$REDIS_MASTER_PASSWORD"
response=$(
timeout -s 15 $1 \
redis-cli \
-h $REDIS_MASTER_HOST \
-p $REDIS_MASTER_PORT_NUMBER \
ping
)
if [ "$?" -eq "124" ]; then
echo "Timed out"
exit 1
fi
responseFirstWord=$(echo $response | head -n1 | awk '{print $1;}')
if [ "$response" != "PONG" ] && [ "$responseFirstWord" != "LOADING" ]; then
echo "$response"
exit 1
fi
ping_readiness_local_and_master.sh: |-
script_dir="$(dirname "$0")"
exit_status=0
"$script_dir/ping_readiness_local.sh" $1 || exit_status=$?
"$script_dir/ping_readiness_master.sh" $1 || exit_status=$?
exit $exit_status
ping_liveness_local_and_master.sh: |-
script_dir="$(dirname "$0")"
exit_status=0
"$script_dir/ping_liveness_local.sh" $1 || exit_status=$?
"$script_dir/ping_liveness_master.sh" $1 || exit_status=$?
exit $exit_status
---
# Source: onyx-stack/charts/redis/templates/scripts-configmap.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: onyx-stack-sb-6ck-redis-scripts
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
data:
start-master.sh: |
#!/bin/bash
[[ -f $REDIS_PASSWORD_FILE ]] && export REDIS_PASSWORD="$(< "${REDIS_PASSWORD_FILE}")"
if [[ -f /opt/bitnami/redis/mounted-etc/master.conf ]];then
cp /opt/bitnami/redis/mounted-etc/master.conf /opt/bitnami/redis/etc/master.conf
fi
if [[ -f /opt/bitnami/redis/mounted-etc/redis.conf ]];then
cp /opt/bitnami/redis/mounted-etc/redis.conf /opt/bitnami/redis/etc/redis.conf
fi
ARGS=("--port" "${REDIS_PORT}")
ARGS+=("--protected-mode" "no")
ARGS+=("--include" "/opt/bitnami/redis/etc/redis.conf")
ARGS+=("--include" "/opt/bitnami/redis/etc/master.conf")
exec redis-server "${ARGS[@]}"
---
# Source: onyx-stack/templates/configmap.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: env-configmap
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
data:
INTERNAL_URL: "http://onyx-stack-sb-6ck-api-service:8080"
POSTGRES_HOST: onyx-stack-sb-6ck-postgresql
VESPA_HOST: da-vespa-0.vespa-service.sandbox-sb-6ckwah035qr3yrfv.svc.cluster.local
REDIS_HOST: onyx-stack-sb-6ck-redis-master
MODEL_SERVER_HOST: "onyx-stack-sb-6ck-inference-model-service"
INDEXING_MODEL_SERVER_HOST: "onyx-stack-sb-6ck-indexing-model-service"
ASYM_PASSAGE_PREFIX: ""
ASYM_QUERY_PREFIX: ""
AUTH_TYPE: "disabled"
BING_API_KEY: ""
CELERY_WORKER_DOCPROCESSING_CONCURRENCY: ""
CELERY_WORKER_LIGHT_CONCURRENCY: ""
CELERY_WORKER_LIGHT_PREFETCH_MULTIPLIER: ""
CONFLUENCE_CONNECTOR_LABELS_TO_SKIP: ""
CONTINUE_ON_CONNECTOR_FAILURE: ""
DANSWER_BOT_DISABLE_COT: ""
DANSWER_BOT_DISABLE_DOCS_ONLY_ANSWER: ""
DANSWER_BOT_DISPLAY_ERROR_MSGS: ""
DANSWER_BOT_RESPOND_EVERY_CHANNEL: ""
DASK_JOB_CLIENT_ENABLED: ""
DISABLE_INDEX_UPDATE_ON_SWAP: ""
DISABLE_LLM_CHOOSE_SEARCH: ""
DISABLE_LLM_DOC_RELEVANCE: ""
DISABLE_LLM_QUERY_REPHRASE: ""
DISABLE_RERANK_FOR_STREAMING: ""
DISABLE_TELEMETRY: ""
DOCUMENT_ENCODER_MODEL: ""
DOC_TIME_DECAY: ""
DOMAIN: "localhost"
EDIT_KEYWORD_QUERY: ""
EMAIL_FROM: ""
EMBEDDING_BATCH_SIZE: ""
EXA_API_KEY: ""
EXPERIMENTAL_CHECKPOINTING_ENABLED: ""
GEN_AI_MAX_TOKENS: ""
GONG_CONNECTOR_START_TIME: ""
HARD_DELETE_CHATS: ""
HYBRID_ALPHA: ""
JIRA_API_VERSION: ""
LANGUAGE_CHAT_NAMING_HINT: ""
LANGUAGE_HINT: ""
LOG_ALL_MODEL_INTERACTIONS: ""
LOG_DANSWER_MODEL_INTERACTIONS: ""
LOG_LEVEL: ""
LOG_VESPA_TIMING_INFORMATION: ""
MAX_CHUNKS_FED_TO_CHAT: ""
MIN_THREADS_ML_MODELS: ""
MODEL_SERVER_PORT: ""
MULTILINGUAL_QUERY_EXPANSION: ""
NORMALIZE_EMBEDDINGS: ""
NOTIFY_SLACKBOT_NO_ANSWER: ""
NOTION_CONNECTOR_ENABLE_RECURSIVE_PAGE_LOOKUP: ""
NUM_INDEXING_WORKERS: ""
QA_PROMPT_OVERRIDE: ""
QA_TIMEOUT: "60"
S3_ENDPOINT_URL: ""
S3_FILE_STORE_BUCKET_NAME: ""
SESSION_EXPIRE_TIME_SECONDS: "86400"
SMTP_PORT: ""
SMTP_SERVER: ""
SMTP_USER: ""
VALID_EMAIL_DOMAINS: ""
VESPA_SEARCHER_THREADS: ""
WEB_DOMAIN: "http://localhost:3000"
S3_ENDPOINT_URL: "http://onyx-stack-sb-6ck-minio:9000"
---
# Source: onyx-stack/templates/nginx-conf.yaml
apiVersion: v1
kind: ConfigMap
metadata:
name: onyx-nginx-conf
data:
nginx.conf: |
upstream api_server {
server onyx-stack-sb-6ck-api-service:8080 fail_timeout=0;
}
upstream web_server {
server onyx-stack-sb-6ck-webserver:3000 fail_timeout=0;
}
server {
listen 1024;
server_name $$DOMAIN;
client_max_body_size 5G; # Maximum upload size
location ~ ^/api(.*)$ {
rewrite ^/api(/.*)$ $1 break;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_buffering off;
proxy_redirect off;
proxy_pass http://api_server;
}
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Host $host;
proxy_set_header Host $host;
proxy_http_version 1.1;
proxy_redirect off;
proxy_pass http://web_server;
}
}
---
# Source: onyx-stack/charts/minio/templates/pvc.yaml
kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
accessModes:
- "ReadWriteOnce"
resources:
requests:
storage: "5Gi"
storageClassName: standard
---
# Source: onyx-stack/charts/minio/templates/console/service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-minio-console
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2.0.1
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
spec:
type: ClusterIP
ports:
- name: http
port: 9090
targetPort: http
nodePort: null
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
---
# Source: onyx-stack/charts/minio/templates/service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
type: ClusterIP
ports:
- name: tcp-api
port: 9000
targetPort: api
nodePort: null
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
---
# Source: onyx-stack/charts/nginx/templates/svc.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-nginx
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: nginx
app.kubernetes.io/version: 1.25.4
helm.sh/chart: nginx-15.14.0
annotations:
spec:
type: LoadBalancer
sessionAffinity: None
externalTrafficPolicy: "Cluster"
ports:
- name: http
port: 80
targetPort: http
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: nginx
---
# Source: onyx-stack/charts/postgresql/templates/primary/svc-headless.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-postgresql-hl
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
app.kubernetes.io/component: primary
annotations:
# Use this annotation in addition to the actual publishNotReadyAddresses
# field below because the annotation will stop being respected soon but the
# field is broken in some versions of Kubernetes:
# https://github.com/kubernetes/kubernetes/issues/58662
service.alpha.kubernetes.io/tolerate-unready-endpoints: "true"
spec:
type: ClusterIP
clusterIP: None
# We want all pods in the StatefulSet to have their addresses published for
# the sake of the other Postgresql pods even before they're ready, since they
# have to be able to talk to each other in order to become ready.
publishNotReadyAddresses: true
ports:
- name: tcp-postgresql
port: 5432
targetPort: tcp-postgresql
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: postgresql
app.kubernetes.io/component: primary
---
# Source: onyx-stack/charts/postgresql/templates/primary/svc.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-postgresql
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
app.kubernetes.io/component: primary
spec:
type: ClusterIP
sessionAffinity: None
ports:
- name: tcp-postgresql
port: 5432
targetPort: tcp-postgresql
nodePort: null
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: postgresql
app.kubernetes.io/component: primary
---
# Source: onyx-stack/charts/redis/templates/headless-svc.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-redis-headless
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
spec:
type: ClusterIP
clusterIP: None
ports:
- name: tcp-redis
port: 6379
targetPort: redis
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
---
# Source: onyx-stack/charts/redis/templates/master/service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-redis-master
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
app.kubernetes.io/component: master
spec:
type: ClusterIP
internalTrafficPolicy: Cluster
sessionAffinity: None
ports:
- name: tcp-redis
port: 6379
targetPort: redis
nodePort: null
selector:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
app.kubernetes.io/component: master
---
# Source: onyx-stack/charts/vespa/templates/service.yaml
# a headless service that allows individual access to each pod in the StatefulSet
apiVersion: v1
kind: Service
metadata:
name: vespa-service # This should match statefulset.yaml/spec/serviceName
labels:
app: vespa
spec:
clusterIP: None
ports:
- port: 19071
targetPort: 19071
protocol: TCP
name: vespa-tenant-port
- port: 8081
targetPort: 8081
protocol: TCP
name: vespa-port
selector:
app: vespa
app.kubernetes.io/instance: onyx
app.kubernetes.io/name: vespa
---
# Source: onyx-stack/templates/api-service.yaml
apiVersion: v1
kind: Service
metadata:
# INTERNAL_URL env variable depends on this, don't change without changing INTERNAL_URL
name: onyx-stack-sb-6ck-api-service
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: api-server
spec:
type: ClusterIP
ports:
- port: 8080
targetPort: 8080
protocol: TCP
name: api-server-port
selector:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: api-server
---
# Source: onyx-stack/templates/indexing-model-service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-indexing-model-service
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
selector:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: indexing-model-server
ports:
- name: modelserver
protocol: TCP
port: 9000
targetPort: 9000
type: ClusterIP
---
# Source: onyx-stack/templates/inference-model-service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-inference-model-service
spec:
type: ClusterIP
ports:
- port: 9000
targetPort: 9000
protocol: TCP
name: modelserver
selector:
app: inference-model-server
---
# Source: onyx-stack/templates/webserver-service.yaml
apiVersion: v1
kind: Service
metadata:
name: onyx-stack-sb-6ck-webserver
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: web-server
spec:
type: ClusterIP
ports:
- port: 3000
targetPort: 3000
protocol: TCP
name: http
selector:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: web-server
---
# Source: onyx-stack/charts/minio/templates/application.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-minio
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
strategy:
type: RollingUpdate
template:
metadata:
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: minio
app.kubernetes.io/part-of: minio
spec:
serviceAccountName: onyx-stack-sb-6ck-minio
affinity:
podAffinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: minio
topologyKey: kubernetes.io/hostname
weight: 1
nodeAffinity:
automountServiceAccountToken: false
securityContext:
fsGroup: 1001
fsGroupChangePolicy: OnRootMismatch
supplementalGroups: []
sysctls: []
initContainers:
containers:
- name: minio
image: docker.io/bitnami/minio:2025.5.24-debian-12-r5
imagePullPolicy: "IfNotPresent"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
privileged: false
readOnlyRootFilesystem: true
runAsGroup: 1001
runAsNonRoot: true
runAsUser: 1001
seLinuxOptions: {}
seccompProfile:
type: RuntimeDefault
env:
- name: BITNAMI_DEBUG
value: "false"
- name: MINIO_DISTRIBUTED_MODE_ENABLED
value: "no"
- name: MINIO_SCHEME
value: "http"
- name: MINIO_FORCE_NEW_KEYS
value: "no"
- name: MINIO_ROOT_USER_FILE
value: /opt/bitnami/minio/secrets/s3_aws_access_key_id
- name: MINIO_ROOT_PASSWORD_FILE
value: /opt/bitnami/minio/secrets/s3_aws_secret_access_key
- name: MINIO_SKIP_CLIENT
value: "no"
- name: MINIO_DEFAULT_BUCKETS
value: onyx-file-store-bucket
- name: MINIO_API_PORT_NUMBER
value: "9000"
- name: MINIO_BROWSER
value: "off"
- name: MINIO_PROMETHEUS_AUTH_TYPE
value: "public"
- name: MINIO_DATA_DIR
value: "/bitnami/minio/data"
ports:
- name: api
containerPort: 9000
livenessProbe:
httpGet:
path: /minio/health/live
port: api
scheme: "HTTP"
initialDelaySeconds: 5
periodSeconds: 5
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
readinessProbe:
tcpSocket:
port: api
initialDelaySeconds: 5
periodSeconds: 5
timeoutSeconds: 1
successThreshold: 1
failureThreshold: 5
resources:
limits:
cpu: 500m
memory: 1Gi
requests:
cpu: 250m
memory: 512Mi
volumeMounts:
- name: empty-dir
mountPath: /tmp
subPath: tmp-dir
- name: empty-dir
mountPath: /opt/bitnami/minio/tmp
subPath: app-tmp-dir
- name: empty-dir
mountPath: /.mc
subPath: app-mc-dir
- name: minio-credentials
mountPath: /opt/bitnami/minio/secrets/
- name: data
mountPath: /bitnami/minio/data
volumes:
- name: empty-dir
emptyDir: {}
- name: minio-credentials
secret:
secretName: onyx-secrets
- name: data
persistentVolumeClaim:
claimName: onyx-stack-sb-6ck-minio
---
# Source: onyx-stack/charts/minio/templates/console/deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-minio-console
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2.0.1
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
spec:
replicas: 1
strategy:
type: RollingUpdate
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
template:
metadata:
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: minio
app.kubernetes.io/version: 2025.5.24
helm.sh/chart: minio-17.0.4
app.kubernetes.io/component: console
app.kubernetes.io/part-of: minio
spec:
serviceAccountName: onyx-stack-sb-6ck-minio
automountServiceAccountToken: false
affinity:
podAffinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: minio
app.kubernetes.io/component: console
topologyKey: kubernetes.io/hostname
weight: 1
nodeAffinity:
securityContext:
fsGroup: 1001
fsGroupChangePolicy: Always
supplementalGroups: []
sysctls: []
containers:
- name: console
image: docker.io/bitnami/minio-object-browser:2.0.1-debian-12-r1
imagePullPolicy: IfNotPresent
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
privileged: false
readOnlyRootFilesystem: true
runAsGroup: 1001
runAsNonRoot: true
runAsUser: 1001
seLinuxOptions: {}
seccompProfile:
type: RuntimeDefault
args:
- server
- --host
- "0.0.0.0"
- --port
- "9090"
env:
- name: CONSOLE_MINIO_SERVER
value: "http://onyx-stack-sb-6ck-minio:9000"
resources:
limits:
cpu: 150m
ephemeral-storage: 2Gi
memory: 192Mi
requests:
cpu: 100m
ephemeral-storage: 50Mi
memory: 128Mi
ports:
- name: http
containerPort: 9090
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 5
periodSeconds: 5
successThreshold: 1
timeoutSeconds: 5
tcpSocket:
port: http
readinessProbe:
failureThreshold: 5
initialDelaySeconds: 5
periodSeconds: 5
successThreshold: 1
timeoutSeconds: 5
httpGet:
path: /minio
port: http
volumeMounts:
- name: empty-dir
mountPath: /tmp
subPath: tmp-dir
- name: empty-dir
mountPath: /.console
subPath: app-console-dir
volumes:
- name: empty-dir
emptyDir: {}
---
# Source: onyx-stack/charts/nginx/templates/deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-nginx
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: nginx
app.kubernetes.io/version: 1.25.4
helm.sh/chart: nginx-15.14.0
spec:
replicas: 1
revisionHistoryLimit: 10
strategy:
rollingUpdate: {}
type: RollingUpdate
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: nginx
template:
metadata:
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: nginx
app.kubernetes.io/version: 1.25.4
helm.sh/chart: nginx-15.14.0
annotations:
spec:
shareProcessNamespace: false
serviceAccountName: onyx-stack-sb-6ck-nginx
automountServiceAccountToken: false
affinity:
podAffinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: nginx
topologyKey: kubernetes.io/hostname
weight: 1
nodeAffinity:
hostNetwork: false
hostIPC: false
securityContext:
fsGroup: 1001
fsGroupChangePolicy: Always
supplementalGroups: []
sysctls: []
initContainers:
- name: preserve-logs-symlinks
image: docker.io/bitnami/nginx:1.25.4-debian-12-r3
imagePullPolicy: "IfNotPresent"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
privileged: false
readOnlyRootFilesystem: false
runAsGroup: 0
runAsNonRoot: true
runAsUser: 1001
seccompProfile:
type: RuntimeDefault
resources:
limits:
cpu: 200m
memory: 256Mi
requests:
cpu: 100m
memory: 128Mi
command:
- /bin/bash
args:
- -ec
- |
#!/bin/bash
. /opt/bitnami/scripts/libfs.sh
# We copy the logs folder because it has symlinks to stdout and stderr
if ! is_dir_empty /opt/bitnami/nginx/logs; then
cp -r /opt/bitnami/nginx/logs /emptydir/app-logs-dir
fi
volumeMounts:
- name: empty-dir
mountPath: /emptydir
containers:
- name: nginx
image: docker.io/bitnami/nginx:1.25.4-debian-12-r3
imagePullPolicy: "IfNotPresent"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
privileged: false
readOnlyRootFilesystem: false
runAsGroup: 0
runAsNonRoot: true
runAsUser: 1001
seccompProfile:
type: RuntimeDefault
env:
- name: BITNAMI_DEBUG
value: "false"
- name: NGINX_HTTP_PORT_NUMBER
value: "1024"
- name: DOMAIN
value: localhost
envFrom:
ports:
- name: http
containerPort: 1024
livenessProbe:
failureThreshold: 6
initialDelaySeconds: 30
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 5
tcpSocket:
port: http
readinessProbe:
failureThreshold: 3
initialDelaySeconds: 5
periodSeconds: 5
successThreshold: 1
timeoutSeconds: 3
tcpSocket:
port: http
resources:
limits:
cpu: 200m
memory: 256Mi
requests:
cpu: 100m
memory: 128Mi
volumeMounts:
- name: empty-dir
mountPath: /tmp
subPath: tmp-dir
- name: empty-dir
mountPath: /opt/bitnami/nginx/conf
subPath: app-conf-dir
- name: empty-dir
mountPath: /opt/bitnami/nginx/logs
subPath: app-logs-dir
- name: empty-dir
mountPath: /opt/bitnami/nginx/tmp
subPath: app-tmp-dir
- name: nginx-server-block
mountPath: /opt/bitnami/nginx/conf/server_blocks
volumes:
- name: empty-dir
emptyDir: {}
- name: nginx-server-block
configMap:
name: onyx-nginx-conf
---
# Source: onyx-stack/templates/api-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-api-server
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: api-server
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: api-server
scope: onyx-backend
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: api-server
securityContext:
{}
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
- "/bin/sh"
- "-c"
- |
alembic upgrade head &&
echo "Starting Onyx Api Server" &&
uvicorn onyx.main:app --host 0.0.0.0 --port 8080
ports:
- name: api-server-port
containerPort: 8080
protocol: TCP
resources:
limits:
cpu: 1000m
memory: 2Gi
requests:
cpu: 500m
memory: 1Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
---
# Source: onyx-stack/templates/celery-beat.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-beat
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-beat
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-beat
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-beat
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.beat",
"beat",
"--loglevel=INFO",
]
resources:
limits:
cpu: 1000m
memory: 1Gi
requests:
cpu: 1000m
memory: 1Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_beat_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_beat_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-docfetching.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-docfetching
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-docfetching
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-docfetching
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-docfetching
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.docfetching",
"worker",
"--pool=threads",
"--concurrency=2",
"--prefetch-multiplier=1",
"--loglevel=INFO",
"--hostname=docfetching@%n",
"-Q",
"connector_doc_fetching,user_files_indexing",
]
resources:
limits:
cpu: 2000m
memory: 16Gi
requests:
cpu: 500m
memory: 8Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_docfetching_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_docfetching_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-docprocessing.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-docprocessing
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-docprocessing
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-docprocessing
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-docprocessing
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.docprocessing",
"worker",
"--pool=threads",
"--concurrency=6",
"--prefetch-multiplier=1",
"--loglevel=INFO",
"--hostname=docprocessing@%n",
"-Q",
"docprocessing",
]
resources:
limits:
cpu: 1000m
memory: 12Gi
requests:
cpu: 500m
memory: 4Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: ENABLE_MULTIPASS_INDEXING
value: ""
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_docprocessing_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_docprocessing_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-heavy.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-heavy
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-heavy
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-heavy
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-heavy
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.heavy",
"worker",
"--loglevel=INFO",
"--hostname=heavy@%n",
"-Q",
"connector_pruning,connector_doc_permissions_sync,connector_external_group_sync,csv_generation",
]
resources:
limits:
cpu: 2500m
memory: 5Gi
requests:
cpu: 1000m
memory: 2Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_heavy_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_heavy_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-light.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-light
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-light
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-light
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-light
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.light",
"worker",
"--loglevel=INFO",
"--hostname=light@%n",
"-Q",
"vespa_metadata_sync,connector_deletion,doc_permissions_upsert,checkpoint_cleanup",
]
resources:
limits:
cpu: 2000m
memory: 4Gi
requests:
cpu: 1000m
memory: 1Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_light_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_light_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-monitoring.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-monitoring
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-monitoring
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-monitoring
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-monitoring
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.monitoring",
"worker",
"--loglevel=INFO",
"--hostname=monitoring@%n",
"-Q",
"monitoring",
]
resources:
limits:
cpu: 2000m
memory: 4Gi
requests:
cpu: 500m
memory: 1Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_monitoring_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_monitoring_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-primary.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-primary
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-primary
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-primary
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-primary
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.primary",
"worker",
"--loglevel=INFO",
"--hostname=primary@%n",
"-Q",
"celery,periodic_tasks",
]
resources:
limits:
cpu: 2000m
memory: 16Gi
requests:
cpu: 1000m
memory: 8Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_primary_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_primary_liveness.txt
---
# Source: onyx-stack/templates/celery-worker-user-files-indexing.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-celery-worker-user-files-indexing
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: celery-worker-user-files-indexing
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: celery-worker-user-files-indexing
scope: onyx-backend-celery
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: celery-worker-user-files-indexing
securityContext:
privileged: true
runAsUser: 0
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command:
[
"celery",
"-A",
"onyx.background.celery.versioned_apps.docprocessing",
"worker",
"--loglevel=INFO",
"--hostname=user-files-indexing@%n",
"-Q",
"user_files_indexing",
]
resources:
limits:
cpu: 4000m
memory: 12Gi
requests:
cpu: 2000m
memory: 6Gi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
startupProbe:
exec:
command:
- test
- -f
- /app/onyx/main.py
failureThreshold: 24
periodSeconds: 5
timeoutSeconds: 3
readinessProbe:
failureThreshold: 24
initialDelaySeconds: 15
periodSeconds: 5
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe readiness
--filename /tmp/onyx_k8s_userfilesindexing_readiness.txt
livenessProbe:
failureThreshold: 5
initialDelaySeconds: 60
periodSeconds: 60
timeoutSeconds: 3
exec:
command:
- /bin/bash
- -c
- >
python onyx/background/celery/celery_k8s_probe.py
--probe liveness
--filename /tmp/onyx_k8s_userfilesindexing_liveness.txt
---
# Source: onyx-stack/templates/indexing-model-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-indexing-model
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 0
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: indexing-model-server
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: indexing-model-server
spec:
containers:
- name: indexing-model-server
image: "onyxdotapp/onyx-model-server:latest"
imagePullPolicy: IfNotPresent
command: [ "uvicorn", "model_server.main:app", "--host", "0.0.0.0", "--port", "9000", "--limit-concurrency", "10" ]
ports:
- name: model-server
containerPort: 9000
protocol: TCP
envFrom:
- configMapRef:
name: env-configmap
env:
- name: INDEXING_ONLY
value: "True"
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
resources:
limits:
cpu: 4000m
memory: 10Gi
requests:
cpu: 2000m
memory: 6Gi
---
# Source: onyx-stack/templates/inference-model-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-inference-model
labels:
app: inference-model-server
spec:
replicas: 0
selector:
matchLabels:
app: inference-model-server
template:
metadata:
labels:
app: inference-model-server
spec:
containers:
- name: model-server-inference
image: "onyxdotapp/onyx-model-server:latest"
imagePullPolicy: IfNotPresent
command: [ "uvicorn", "model_server.main:app", "--host", "0.0.0.0", "--port", "9000" ]
ports:
- name: model-server
containerPort: 9000
protocol: TCP
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
resources:
limits:
cpu: 4000m
memory: 10Gi
requests:
cpu: 2000m
memory: 6Gi
---
# Source: onyx-stack/templates/slackbot.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-slackbot
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: slack-bot
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: slack-bot
scope: onyx-backend
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: slackbot
securityContext:
{}
image: "onyxdotapp/onyx-backend:latest"
imagePullPolicy: IfNotPresent
command: ["python", "onyx/onyxbot/slack/listener.py"]
resources:
limits:
cpu: 1000m
memory: 2000Mi
requests:
cpu: 500m
memory: 500Mi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
---
# Source: onyx-stack/templates/webserver-deployment.yaml
apiVersion: apps/v1
kind: Deployment
metadata:
name: onyx-stack-sb-6ck-web-server
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app: web-server
template:
metadata:
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
app: web-server
spec:
serviceAccountName: default
securityContext:
{}
containers:
- name: web-server
securityContext:
{}
image: "onyxdotapp/onyx-web-server:latest"
imagePullPolicy: IfNotPresent
ports:
- name: http
containerPort: 3000
protocol: TCP
resources:
limits:
cpu: 500m
memory: 1Gi
requests:
cpu: 250m
memory: 512Mi
envFrom:
- configMapRef:
name: env-configmap
env:
- name: "OAUTH_CLIENT_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_id
- name: "OAUTH_CLIENT_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_client_secret
- name: "OAUTH_COOKIE_SECRET"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: oauth_cookie_secret
- name: "POSTGRES_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
- name: "REDIS_PASSWORD"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: redis_password
- name: "S3_AWS_ACCESS_KEY_ID"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_access_key_id
- name: "S3_AWS_SECRET_ACCESS_KEY"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: s3_aws_secret_access_key
- name: "SMTP_PASS"
valueFrom:
secretKeyRef:
name: onyx-secrets
key: smtp_pass
---
# Source: onyx-stack/charts/postgresql/templates/primary/statefulset.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: onyx-stack-sb-6ck-postgresql
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
app.kubernetes.io/component: primary
spec:
replicas: 1
serviceName: onyx-stack-sb-6ck-postgresql-hl
updateStrategy:
rollingUpdate: {}
type: RollingUpdate
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: postgresql
app.kubernetes.io/component: primary
template:
metadata:
name: onyx-stack-sb-6ck-postgresql
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: postgresql
app.kubernetes.io/version: 16.2.0
helm.sh/chart: postgresql-14.3.1
app.kubernetes.io/component: primary
spec:
serviceAccountName: onyx-stack-sb-6ck-postgresql
automountServiceAccountToken: false
affinity:
podAffinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: postgresql
app.kubernetes.io/component: primary
topologyKey: kubernetes.io/hostname
weight: 1
nodeAffinity:
securityContext:
fsGroup: 1001
fsGroupChangePolicy: Always
supplementalGroups: []
sysctls: []
hostNetwork: false
hostIPC: false
containers:
- name: postgresql
image: docker.io/bitnami/postgresql:16.2.0-debian-12-r6
imagePullPolicy: "IfNotPresent"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
privileged: false
readOnlyRootFilesystem: false
runAsGroup: 0
runAsNonRoot: true
runAsUser: 1001
seccompProfile:
type: RuntimeDefault
env:
- name: BITNAMI_DEBUG
value: "false"
- name: POSTGRESQL_PORT_NUMBER
value: "5432"
- name: POSTGRESQL_VOLUME_DIR
value: "/bitnami/postgresql"
- name: PGDATA
value: "/bitnami/postgresql/data"
# Authentication
- name: POSTGRES_PASSWORD
valueFrom:
secretKeyRef:
name: onyx-secrets
key: postgres_password
# Replication
# Initdb
# Standby
# LDAP
- name: POSTGRESQL_ENABLE_LDAP
value: "no"
# TLS
- name: POSTGRESQL_ENABLE_TLS
value: "no"
# Audit
- name: POSTGRESQL_LOG_HOSTNAME
value: "false"
- name: POSTGRESQL_LOG_CONNECTIONS
value: "false"
- name: POSTGRESQL_LOG_DISCONNECTIONS
value: "false"
- name: POSTGRESQL_PGAUDIT_LOG_CATALOG
value: "off"
# Others
- name: POSTGRESQL_CLIENT_MIN_MESSAGES
value: "error"
- name: POSTGRESQL_SHARED_PRELOAD_LIBRARIES
value: "pgaudit"
ports:
- name: tcp-postgresql
containerPort: 5432
livenessProbe:
failureThreshold: 6
initialDelaySeconds: 30
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 5
exec:
command:
- /bin/sh
- -c
- exec pg_isready -U "postgres" -h 127.0.0.1 -p 5432
readinessProbe:
failureThreshold: 6
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 5
exec:
command:
- /bin/sh
- -c
- -e
- |
exec pg_isready -U "postgres" -h 127.0.0.1 -p 5432
[ -f /opt/bitnami/postgresql/tmp/.initialized ] || [ -f /bitnami/postgresql/.initialized ]
resources:
limits:
cpu: "1"
memory: 2Gi
requests:
cpu: 500m
memory: 1Gi
volumeMounts:
- name: empty-dir
mountPath: /tmp
subPath: tmp-dir
- name: empty-dir
mountPath: /opt/bitnami/postgresql/conf
subPath: app-conf-dir
- name: empty-dir
mountPath: /opt/bitnami/postgresql/tmp
subPath: app-tmp-dir
- name: empty-dir
mountPath: /opt/bitnami/postgresql/logs
subPath: app-logs-dir
- name: dshm
mountPath: /dev/shm
- name: data
mountPath: /bitnami/postgresql
volumes:
- name: empty-dir
emptyDir: {}
- name: dshm
emptyDir:
medium: Memory
volumeClaimTemplates:
- apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: data
spec:
accessModes:
- "ReadWriteOnce"
resources:
requests:
storage: "5Gi"
storageClassName: standard
---
# Source: onyx-stack/charts/redis/templates/master/application.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: onyx-stack-sb-6ck-redis-master
namespace: "sandbox-sb-6ckwah035qr3yrfv"
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
app.kubernetes.io/component: master
spec:
replicas: 1
revisionHistoryLimit: 10
selector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
app.kubernetes.io/component: master
serviceName: onyx-stack-sb-6ck-redis-headless
updateStrategy:
type: RollingUpdate
template:
metadata:
labels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/managed-by: Helm
app.kubernetes.io/name: redis
app.kubernetes.io/version: 7.4.0
helm.sh/chart: redis-20.1.0
app.kubernetes.io/component: master
annotations:
checksum/configmap: 92ae5dd7619fd32fbf0410572e9eb10cfd752899665020f650bfe1f2cb960e7f
checksum/health: aff24913d801436ea469d8d374b2ddb3ec4c43ee7ab24663d5f8ff1a1b6991a9
checksum/scripts: 43cdf68c28f3abe25ce017a82f74dbf2437d1900fd69df51a55a3edf6193d141
checksum/secret: 44136fa355b3678a1146ad16f7e8649e94fb4fc21fe77e8310c060f61caaff8a
spec:
securityContext:
fsGroup: 1001
fsGroupChangePolicy: Always
supplementalGroups: []
sysctls: []
serviceAccountName: onyx-stack-sb-6ck-redis-master
automountServiceAccountToken: false
affinity:
podAffinity:
podAntiAffinity:
preferredDuringSchedulingIgnoredDuringExecution:
- podAffinityTerm:
labelSelector:
matchLabels:
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/name: redis
app.kubernetes.io/component: master
topologyKey: kubernetes.io/hostname
weight: 1
nodeAffinity:
enableServiceLinks: true
terminationGracePeriodSeconds: 30
containers:
- name: redis
image: docker.io/bitnami/redis:7.4.0-debian-12-r2
imagePullPolicy: "IfNotPresent"
securityContext:
allowPrivilegeEscalation: false
capabilities:
drop:
- ALL
readOnlyRootFilesystem: true
runAsGroup: 1001
runAsNonRoot: true
runAsUser: 1001
seLinuxOptions: {}
seccompProfile:
type: RuntimeDefault
command:
- /bin/bash
args:
- -c
- /opt/bitnami/scripts/start-scripts/start-master.sh
env:
- name: BITNAMI_DEBUG
value: "false"
- name: REDIS_REPLICATION_MODE
value: master
- name: ALLOW_EMPTY_PASSWORD
value: "yes"
- name: REDIS_TLS_ENABLED
value: "no"
- name: REDIS_PORT
value: "6379"
ports:
- name: redis
containerPort: 6379
livenessProbe:
initialDelaySeconds: 20
periodSeconds: 5
# One second longer than command timeout should prevent generation of zombie processes.
timeoutSeconds: 6
successThreshold: 1
failureThreshold: 5
exec:
command:
- sh
- -c
- /health/ping_liveness_local.sh 5
readinessProbe:
initialDelaySeconds: 20
periodSeconds: 5
timeoutSeconds: 2
successThreshold: 1
failureThreshold: 5
exec:
command:
- sh
- -c
- /health/ping_readiness_local.sh 1
resources:
limits:
cpu: 500m
memory: 1Gi
requests:
cpu: 250m
memory: 512Mi
volumeMounts:
- name: start-scripts
mountPath: /opt/bitnami/scripts/start-scripts
- name: health
mountPath: /health
- name: redis-data
mountPath: /data
- name: config
mountPath: /opt/bitnami/redis/mounted-etc
- name: empty-dir
mountPath: /opt/bitnami/redis/etc/
subPath: app-conf-dir
- name: empty-dir
mountPath: /tmp
subPath: tmp-dir
volumes:
- name: start-scripts
configMap:
name: onyx-stack-sb-6ck-redis-scripts
defaultMode: 0755
- name: health
configMap:
name: onyx-stack-sb-6ck-redis-health
defaultMode: 0755
- name: config
configMap:
name: onyx-stack-sb-6ck-redis-configuration
- name: empty-dir
emptyDir: {}
- name: redis-data
emptyDir: {}
---
# Source: onyx-stack/charts/vespa/templates/statefulset.yaml
apiVersion: apps/v1
kind: StatefulSet
metadata:
name: da-vespa
labels:
app: vespa
spec:
serviceName: vespa-service # This should match service.yaml/metadata/name
replicas: 1
selector:
matchLabels:
app: vespa
app.kubernetes.io/instance: onyx
app.kubernetes.io/name: vespa
template:
metadata:
labels:
app: vespa
app.kubernetes.io/instance: onyx
app.kubernetes.io/name: vespa
spec:
serviceAccountName: default
securityContext:
null
containers:
- name: vespa
securityContext:
privileged: true
runAsUser: 0
image: "vespaengine/vespaengine/vespa:8.526.15"
imagePullPolicy: IfNotPresent
ports:
- containerPort: 19071
- containerPort: 8081
# readinessProbe:
# httpGet:
# path: /state/v1/health
# port: 19071
# scheme: HTTP
resources:
limits:
cpu: 2000m
memory: 4Gi
requests:
cpu: 1000m
memory: 2Gi
volumeMounts:
- mountPath: /opt/vespa/var/
name: vespa-storage
env:
- name: VESPA_CONFIGSERVERS
value: da-vespa-0.vespa-service.sandbox-sb-6ckwah035qr3yrfv.svc.cluster.local
- name: VESPA_SKIP_UPGRADE_CHECK
value: "true"
volumeClaimTemplates:
- metadata:
name: vespa-storage
spec:
accessModes: [ReadWriteOnce]
storageClassName: standard
resources:
requests:
storage: 10Gi
---
# Source: onyx-stack/charts/vespa/templates/tests/test-connection.yaml
apiVersion: v1
kind: Pod
metadata:
name: onyx-stack-sb-6ck-vespa-test-connection
labels:
helm.sh/chart: vespa-0.2.23
app.kubernetes.io/name: vespa
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "8.526.15"
app.kubernetes.io/managed-by: Helm
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command: ['wget']
args: [da-vespa-0.vespa-service:19071/state/v1/health]
restartPolicy: Never
---
# Source: onyx-stack/templates/tests/test-connection.yaml
apiVersion: v1
kind: Pod
metadata:
name: "onyx-stack-sb-6ck-test-connection"
labels:
helm.sh/chart: onyx-stack-0.2.2
app.kubernetes.io/name: onyx-stack
app.kubernetes.io/instance: onyx-stack-sb-6ck
app.kubernetes.io/version: "latest"
app.kubernetes.io/managed-by: Helm
annotations:
"helm.sh/hook": test
spec:
containers:
- name: wget
image: busybox
command:
- /bin/sh
- -c
args:
- |
for i in $(seq 1 40); do
echo "Attempt $i: wget onyx-stack-sb-6ck-webserver:3000"
wget onyx-stack-sb-6ck-webserver:3000 && exit 0
sleep 15
done
echo "Service unavailable after 40 attempts"
exit 1
restartPolicy: Never
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment