infra/stacks/webhook_handler/main.tf
Viktor Barzin 9e4fb23b10 [ci skip] right-size all pod resources based on VPA + live metrics audit
Full cluster resource audit: cross-referenced Goldilocks VPA recommendations,
live kubectl top metrics, and Terraform definitions for 100+ containers.

Critical fixes:
- dashy: CPU throttled at 98% (490m/500m) → 2 CPU limit
- stirling-pdf: CPU throttled at 99.7% (299m/300m) → 2 CPU limit
- traefik auth-proxy/bot-block-proxy: mem limit 32Mi → 128Mi

Added explicit resources to ~40 containers that had none:
- audiobookshelf, changedetection, cyberchef, dawarich, diun, echo,
  excalidraw, freshrss, hackmd, isponsorblocktv, linkwarden, n8n,
  navidrome, ntfy, owntracks, privatebin, send, shadowsocks, tandoor,
  tor-proxy, wealthfolio, networking-toolbox, rybbit, mailserver,
  cloudflared, pgadmin, phpmyadmin, crowdsec-web, xray, wireguard,
  k8s-portal, tuya-bridge, ollama-ui, whisper, piper, immich-server,
  immich-postgresql, osrm-foot

GPU containers: added CPU/mem alongside GPU limits:
- ollama: removed CPU/mem limits (models vary in size), keep GPU only
- frigate: req 500m/2Gi, lim 4/8Gi + GPU
- immich-ml: req 100m/1Gi, lim 2/4Gi + GPU

Right-sized ~25 over-provisioned containers:
- kms-web-page: 500m/512Mi → 50m/64Mi (was using 0m/10Mi)
- onlyoffice: CPU 8 → 2 (VPA upper 45m)
- realestate-crawler-api: CPU 2000m → 250m
- blog/travel-blog/webhook-handler: 500m → 100m
- coturn/health/plotting-book: reduced to match actual usage

Conservative methodology: limits = max(VPA upper * 2, live usage * 2)
2026-03-01 19:18:50 +00:00

200 lines
4.9 KiB
HCL

variable "tls_secret_name" { type = string }
variable "webhook_handler_secret" { type = string }
variable "webhook_handler_fb_verify_token" { type = string }
variable "webhook_handler_fb_page_token" { type = string }
variable "webhook_handler_fb_app_secret" { type = string }
variable "webhook_handler_git_user" { type = string }
variable "webhook_handler_git_token" { type = string }
variable "webhook_handler_ssh_key" { type = string }
resource "kubernetes_namespace" "webhook-handler" {
metadata {
name = "webhook-handler"
labels = {
tier = local.tiers.aux
}
}
}
module "tls_secret" {
source = "../../modules/kubernetes/setup_tls_secret"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
tls_secret_name = var.tls_secret_name
}
resource "kubernetes_cluster_role" "deployment_updater" {
metadata {
name = "deployment-updater"
}
rule {
verbs = ["create", "update", "get", "patch", "list"]
api_groups = ["extensions", "apps", ""]
resources = ["deployments", "namespaces", "pods", "services"]
}
}
resource "kubernetes_cluster_role_binding" "update_deployment_binding" {
metadata {
name = "update-deployment-binding"
}
subject {
kind = "ServiceAccount"
name = "default"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
}
role_ref {
api_group = "rbac.authorization.k8s.io"
kind = "ClusterRole"
name = "deployment-updater"
}
}
resource "kubernetes_secret" "ssh-key" {
metadata {
name = "ssh-key"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
annotations = {
"reloader.stakater.com/match" = "true"
}
}
data = {
"id_rsa" = var.webhook_handler_ssh_key
}
type = "generic"
}
resource "kubernetes_deployment" "webhook_handler" {
metadata {
name = "webhook-handler"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
labels = {
app = "webhook-handler"
tier = local.tiers.aux
}
annotations = {
"reloader.stakater.com/search" = "true"
}
}
spec {
replicas = 1
selector {
match_labels = {
app = "webhook-handler"
}
}
template {
metadata {
labels = {
app = "webhook-handler"
}
}
spec {
container {
# security_context {
# run_as_user = 1000
# }
# lifecycle {
# post_start {
# exec {
# # Must be kept in sycn with webhook_handler dockerfile
# command = ["echo", "\"$SSH_KEY\"", ">", "/opt/id_rsa", "&&", "chown", "appuser", "/opt/id_rsa", "&&", "chmod", "600", "/opt/id_rsa"]
# }
# }
# }
image = "viktorbarzin/webhook-handler:latest"
name = "webhook-handler"
resources {
limits = {
cpu = "100m"
memory = "256Mi"
}
requests = {
cpu = "10m"
memory = "32Mi"
}
}
port {
container_port = 80
}
volume_mount {
name = "id-rsa"
mount_path = "/opt/id_rsa"
sub_path = "id_rsa"
}
env {
name = "WEBHOOKSECRET"
value = var.webhook_handler_secret
}
env {
name = "FB_APP_SECRET"
value = var.webhook_handler_fb_app_secret
}
env {
name = "FB_VERIFY_TOKEN"
value = var.webhook_handler_fb_verify_token
}
env {
name = "FB_PAGE_TOKEN"
value = var.webhook_handler_fb_page_token
}
env {
name = "CONFIG"
value = "./chatbot/config/viktorwebservices.yaml"
}
env {
name = "GIT_USER"
value = var.webhook_handler_git_user
}
env {
name = "GIT_TOKEN"
value = var.webhook_handler_git_token
}
env {
name = "SSH_KEY"
value = "/opt/id_rsa"
}
}
volume {
name = "id-rsa"
secret {
secret_name = "ssh-key"
}
}
}
}
}
}
resource "kubernetes_service" "webhook_handler" {
metadata {
name = "webhook-handler"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
labels = {
"app" = "webhook-handler"
}
}
spec {
selector = {
app = "webhook-handler"
}
port {
port = "80"
target_port = "3000"
}
}
}
module "ingress" {
source = "../../modules/kubernetes/ingress_factory"
namespace = kubernetes_namespace.webhook-handler.metadata[0].name
name = "webhook-handler"
host = "webhook"
tls_secret_name = var.tls_secret_name
}