From 4575ec9d3f28d442542f61fb95a3ed48b09f61a8 Mon Sep 17 00:00:00 2001 From: titver968 Date: Fri, 24 Oct 2025 12:53:32 +0200 Subject: [PATCH] plane ohne cloude changes --- argocd/apps/plane/values-plane.yaml | 45 ++--------------------------- 1 file changed, 2 insertions(+), 43 deletions(-) diff --git a/argocd/apps/plane/values-plane.yaml b/argocd/apps/plane/values-plane.yaml index a286f89..b41e945 100644 --- a/argocd/apps/plane/values-plane.yaml +++ b/argocd/apps/plane/values-plane.yaml @@ -57,59 +57,18 @@ spec: aws_region: "eu-central-1" aws_s3_endpoint_url: "https://sws3.innovation-hub-niedersachsen.de" - # Celery Worker - Aggressive Memory-Begrenzung - CELERY_WORKER_CONCURRENCY: "1" - CELERY_WORKER_MAX_TASKS_PER_CHILD: "5" - CELERY_WORKER_MAX_MEMORY_PER_CHILD: "400000" - CELERY_WORKER_PREFETCH_MULTIPLIER: "1" - - # Task Limits - Sehr restriktiv - CELERY_TASK_SOFT_TIME_LIMIT: "120" - CELERY_TASK_TIME_LIMIT: "180" - CELERY_TASK_ACKS_LATE: "true" - CELERY_TASK_REJECT_ON_WORKER_LOST: "true" - - # Task-Routing um problematische Tasks zu isolieren - CELERY_TASK_DEFAULT_QUEUE: "default" - CELERY_TASK_CREATE_MISSING_QUEUES: "true" - - # Python Memory Management - PYTHONMALLOC: "malloc" - MALLOC_TRIM_THRESHOLD_: "65536" - MALLOC_MMAP_THRESHOLD_: "65536" - - # Logging erhöhen um Problem-Tasks zu identifizieren - CELERY_WORKER_LOG_LEVEL: "INFO" - CELERY_TASK_LOG_FORMAT: "[%(asctime)s: %(levelname)s/%(processName)s] %(task_name)s[%(task_id)s]: %(message)s - Memory: %(process)s" - worker: replicas: 2 concurrency: 1 resources: requests: - memory: "2Gi" + memory: "4Gi" cpu: "500m" limits: - memory: "6Gi" + memory: "8Gi" cpu: "1500m" - # Health Checks komplett deaktiviert für Debugging - readinessProbe: - exec: - command: ["/bin/true"] - initialDelaySeconds: 10 - periodSeconds: 300 - - livenessProbe: - exec: - command: ["/bin/true"] - initialDelaySeconds: 30 - periodSeconds: 300 - - # Graceful Shutdown - terminationGracePeriodSeconds: 60 - destination: server: 'https://kubernetes.default.svc' namespace: plane