diff --git a/roles/immich/README.md b/roles/immich/README.md index 2bced73..cc40f55 100644 --- a/roles/immich/README.md +++ b/roles/immich/README.md @@ -222,7 +222,7 @@ immich_nginx_enabled: true immich_nginx_hostname: "blog.hello.com" # In nginx role configuration (host_vars or group_vars) -acme_email: "admin@carabosse.cloud" +acme_email: "admin@blog.com" ``` **What it does:** diff --git a/roles/immich/tasks/main.yml b/roles/immich/tasks/main.yml index 0f28c61..b2f9839 100644 --- a/roles/immich/tasks/main.yml +++ b/roles/immich/tasks/main.yml @@ -108,7 +108,7 @@ - name: Deploy nginx vhost configuration for Immich ansible.builtin.template: src: nginx-vhost.conf.j2 - dest: /etc/nginx/conf.d/immich.conf + dest: "{{ nginx_conf_dir }}/immich.conf" owner: root group: root mode: "0644" @@ -117,7 +117,7 @@ - name: Remove nginx vhost configuration for Immich ansible.builtin.file: - path: /etc/nginx/conf.d/immich.conf + path: "{{ nginx_conf_dir }}/immich.conf" state: absent when: not immich_nginx_enabled notify: Reload nginx diff --git a/roles/immich/templates/immich.service.j2 b/roles/immich/templates/immich.service.j2 index b284b33..77e74e4 100644 --- a/roles/immich/templates/immich.service.j2 +++ b/roles/immich/templates/immich.service.j2 @@ -7,8 +7,8 @@ After=network-online.target Type=oneshot RemainAfterExit=true WorkingDirectory={{ podman_projects_dir }}/immich -ExecStart=/usr/bin/podman compose up -d -ExecStop=/usr/bin/podman compose down +ExecStart=/usr/bin/podman-compose up -d +ExecStop=/usr/bin/podman-compose down Restart=on-failure RestartSec=10 diff --git a/roles/nginx/tasks/main.yml b/roles/nginx/tasks/main.yml index e64010d..be19bdf 100644 --- a/roles/nginx/tasks/main.yml +++ b/roles/nginx/tasks/main.yml @@ -51,11 +51,10 @@ - name: Enable Certbot renewal timer ansible.builtin.systemd: - name: certbot-renew.timer + name: "{{ certbot_timer }}" enabled: true state: started when: acme_email is defined - ignore_errors: true - name: Ensure nginx conf.d directory exists ansible.builtin.file: diff --git a/roles/nginx/templates/forwarder.conf.j2 b/roles/nginx/templates/forwarder.conf.j2 index b0ebd23..6b20efb 100644 --- a/roles/nginx/templates/forwarder.conf.j2 +++ b/roles/nginx/templates/forwarder.conf.j2 @@ -3,23 +3,19 @@ # Transparent TCP proxy (no protocol inspection) {% if config.http | default(true) %} -upstream {{ domain | replace('.', '_') | replace('-', '_') }}_http { - server {{ config.forward_to }}:80; -} - server { listen 80; - proxy_pass {{ domain | replace('.', '_') | replace('-', '_') }}_http; + # Using variable forces runtime DNS resolution (avoids startup failures) + set $upstream_http {{ config.forward_to }}; + proxy_pass $upstream_http:80; } {% endif %} {% if config.https | default(true) %} -upstream {{ domain | replace('.', '_') | replace('-', '_') }}_https { - server {{ config.forward_to }}:443; -} - server { listen 443; - proxy_pass {{ domain | replace('.', '_') | replace('-', '_') }}_https; + # Using variable forces runtime DNS resolution (avoids startup failures) + set $upstream_https {{ config.forward_to }}; + proxy_pass $upstream_https:443; } {% endif %} diff --git a/roles/nginx/templates/nginx.conf.j2 b/roles/nginx/templates/nginx.conf.j2 index c982165..1bc610b 100644 --- a/roles/nginx/templates/nginx.conf.j2 +++ b/roles/nginx/templates/nginx.conf.j2 @@ -57,6 +57,11 @@ http { {% if nginx_forwarder and nginx_forwarder | length > 0 %} # Stream block for TCP/UDP proxying stream { + # DNS resolver for runtime hostname resolution + # Using 127.0.0.1 (systemd-resolved) with 30s cache and 5s timeout + resolver 127.0.0.1 valid=30s ipv6=off; + resolver_timeout 5s; + # Load stream configurations include {{ nginx_streams_dir }}/*.conf; } diff --git a/roles/nginx/vars/archlinux.yml b/roles/nginx/vars/archlinux.yml index eac5938..bf42eab 100644 --- a/roles/nginx/vars/archlinux.yml +++ b/roles/nginx/vars/archlinux.yml @@ -1,2 +1,3 @@ --- nginx_user: http +certbot_timer: certbot-renew.timer diff --git a/roles/nginx/vars/debian.yml b/roles/nginx/vars/debian.yml index 77369aa..d7c0f01 100644 --- a/roles/nginx/vars/debian.yml +++ b/roles/nginx/vars/debian.yml @@ -1,2 +1,3 @@ --- nginx_user: www-data +certbot_timer: certbot.timer diff --git a/roles/ntfy/meta/main.yml b/roles/ntfy/meta/main.yml index 89d056b..d80fa53 100644 --- a/roles/ntfy/meta/main.yml +++ b/roles/ntfy/meta/main.yml @@ -1,4 +1,3 @@ --- dependencies: - role: podman - - role: nginx diff --git a/roles/ntfy/tasks/main.yml b/roles/ntfy/tasks/main.yml index 9675cd5..8e5e0ee 100644 --- a/roles/ntfy/tasks/main.yml +++ b/roles/ntfy/tasks/main.yml @@ -91,7 +91,7 @@ - name: Deploy nginx vhost configuration for ntfy ansible.builtin.template: src: nginx-vhost.conf.j2 - dest: /etc/nginx/conf.d/ntfy.conf + dest: "{{ nginx_conf_dir }}/ntfy.conf" owner: root group: root mode: "0644" @@ -100,7 +100,7 @@ - name: Remove nginx vhost configuration for ntfy ansible.builtin.file: - path: /etc/nginx/conf.d/ntfy.conf + path: "{{ nginx_conf_dir }}/ntfy.conf" state: absent when: not ntfy_nginx_enabled notify: Reload nginx diff --git a/roles/ntfy/templates/ntfy.service.j2 b/roles/ntfy/templates/ntfy.service.j2 index 13477c5..5372ca8 100644 --- a/roles/ntfy/templates/ntfy.service.j2 +++ b/roles/ntfy/templates/ntfy.service.j2 @@ -7,8 +7,8 @@ After=network-online.target Type=oneshot RemainAfterExit=true WorkingDirectory={{ podman_projects_dir }}/ntfy -ExecStart=/usr/bin/podman compose up -d -ExecStop=/usr/bin/podman compose down +ExecStart=/usr/bin/podman-compose up -d +ExecStop=/usr/bin/podman-compose down Restart=on-failure RestartSec=10 diff --git a/roles/uptime-kuma/README.md b/roles/uptime-kuma/README.md new file mode 100644 index 0000000..0890709 --- /dev/null +++ b/roles/uptime-kuma/README.md @@ -0,0 +1,99 @@ +# uptime-kuma - Self-Hosted Monitoring Tool + +Deploys [Uptime Kuma](https://uptime.kuma.pet/) - a self-hosted monitoring and status page application. + +## Features + +- Website monitoring (HTTP/HTTPS) +- TCP port monitoring +- Ping monitoring +- DNS monitoring +- Status pages +- Notifications (Email, Discord, Slack, ntfy, etc.) +- Multi-language support +- Dark mode + +## Configuration + +### Optional Variables + +See [defaults/main.yml](defaults/main.yml) for all configuration options. + +Key settings: + +```yaml +uptime_kuma_version: "2" +uptime_kuma_port: 3001 +uptime_kuma_data_dir: "{{ podman_projects_dir }}/uptime-kuma/data" + +# Nginx reverse proxy +uptime_kuma_nginx_enabled: false +uptime_kuma_nginx_hostname: uptime.nas.local +``` + +## Storage Requirements + +**CRITICAL:** Uptime Kuma uses SQLite and requires local storage with POSIX file lock support. + +- ✅ **Supported:** Local filesystem, Docker volumes +- ❌ **NOT Supported:** NFS, network filesystems (will cause database corruption) + +## First-Time Setup + +1. Access the web UI: `https://uptime.nas.local` (if nginx enabled) or `http://localhost:3001` +2. Create admin account on first visit +3. No default credentials - account is created during initial setup + +## Usage + +### Adding Monitors + +Web UI → Add New Monitor: +- **Monitor Type:** HTTP(s), TCP Port, Ping, DNS, etc. +- **Friendly Name:** Display name +- **URL/Host:** Target to monitor +- **Heartbeat Interval:** Check frequency (seconds) +- **Retries:** Before marking as down +- **Notifications:** Select notification endpoints + +### Notification Endpoints + +Web UI → Settings → Notifications: +- Email (SMTP) +- Discord, Slack, Telegram +- ntfy (recommended for local notifications) +- Webhooks +- 50+ integrations available + +### Status Pages + +Create public or password-protected status pages showing monitor health. + +Web UI → Status Pages → New Status Page + +## Integration with ntfy + +If you deployed the `ntfy` role: + +1. Settings → Notifications → Add +2. Type: ntfy +3. ntfy Server URL: `https://ntfy.jokester.fr` +4. Topic: `uptime-alerts` +5. Username: `admin` +6. Password: Your ntfy admin password +7. Test notification + +## File Locations + +- Data directory: `{{ uptime_kuma_data_dir }}` +- SQLite database: `{{ uptime_kuma_data_dir }}/kuma.db` + +## Dependencies + +- podman +- nginx (if `uptime_kuma_nginx_enabled: true`) + +## Sources + +- [Install Uptime Kuma using Docker](https://uptimekuma.org/install-uptime-kuma-docker/) +- [Uptime Kuma GitHub Wiki](https://github.com/louislam/uptime-kuma/wiki) diff --git a/roles/uptime-kuma/defaults/main.yml b/roles/uptime-kuma/defaults/main.yml new file mode 100644 index 0000000..b15e673 --- /dev/null +++ b/roles/uptime-kuma/defaults/main.yml @@ -0,0 +1,20 @@ +--- +# Uptime Kuma version to deploy +uptime_kuma_version: "2" + +# Storage location (CRITICAL: Must be local storage, NOT NFS) +# NFS is NOT supported - will cause SQLite corruption +uptime_kuma_data_dir: "{{ podman_projects_dir }}/uptime-kuma/data" + +# Network configuration +uptime_kuma_port: 3001 + +# Container image +uptime_kuma_image: louislam/uptime-kuma + +# Timezone +uptime_kuma_timezone: UTC + +# Nginx reverse proxy configuration +uptime_kuma_nginx_enabled: false +uptime_kuma_nginx_hostname: uptime.nas.local diff --git a/roles/uptime-kuma/handlers/main.yml b/roles/uptime-kuma/handlers/main.yml new file mode 100644 index 0000000..78ce0e3 --- /dev/null +++ b/roles/uptime-kuma/handlers/main.yml @@ -0,0 +1,15 @@ +--- +- name: Reload systemd + ansible.builtin.systemd: + daemon_reload: true + +- name: Restart uptime-kuma + ansible.builtin.systemd: + name: uptime-kuma + state: restarted + daemon_reload: true + +- name: Reload nginx + ansible.builtin.systemd: + name: nginx + state: reloaded diff --git a/roles/uptime-kuma/meta/main.yml b/roles/uptime-kuma/meta/main.yml new file mode 100644 index 0000000..d80fa53 --- /dev/null +++ b/roles/uptime-kuma/meta/main.yml @@ -0,0 +1,3 @@ +--- +dependencies: + - role: podman diff --git a/roles/uptime-kuma/tasks/main.yml b/roles/uptime-kuma/tasks/main.yml new file mode 100644 index 0000000..c78089c --- /dev/null +++ b/roles/uptime-kuma/tasks/main.yml @@ -0,0 +1,58 @@ +--- +- name: Create uptime-kuma project directory + ansible.builtin.file: + path: "{{ podman_projects_dir }}/uptime-kuma" + state: directory + owner: "{{ ansible_user }}" + group: "{{ ansible_user }}" + mode: "0755" + +- name: Create uptime-kuma data directory + ansible.builtin.file: + path: "{{ uptime_kuma_data_dir }}" + state: directory + owner: "{{ ansible_user }}" + group: "{{ ansible_user }}" + mode: "0755" + +- name: Deploy docker-compose.yml for uptime-kuma + ansible.builtin.template: + src: docker-compose.yml.j2 + dest: "{{ podman_projects_dir }}/uptime-kuma/docker-compose.yml" + owner: "{{ ansible_user }}" + group: "{{ ansible_user }}" + mode: "0644" + notify: Restart uptime-kuma + +- name: Create systemd service for uptime-kuma + ansible.builtin.template: + src: uptime-kuma.service.j2 + dest: /etc/systemd/system/uptime-kuma.service + owner: root + group: root + mode: "0644" + notify: Reload systemd + +- name: Enable and start uptime-kuma service + ansible.builtin.systemd: + name: uptime-kuma + enabled: true + state: started + daemon_reload: true + +- name: Deploy nginx vhost configuration for uptime-kuma + ansible.builtin.template: + src: nginx-vhost.conf.j2 + dest: "{{ nginx_conf_dir }}/uptime-kuma.conf" + owner: root + group: root + mode: "0644" + when: uptime_kuma_nginx_enabled + notify: Reload nginx + +- name: Remove nginx vhost configuration for uptime-kuma + ansible.builtin.file: + path: "{{ nginx_conf_dir }}/uptime-kuma.conf" + state: absent + when: not uptime_kuma_nginx_enabled + notify: Reload nginx diff --git a/roles/uptime-kuma/templates/docker-compose.yml.j2 b/roles/uptime-kuma/templates/docker-compose.yml.j2 new file mode 100644 index 0000000..b9ac172 --- /dev/null +++ b/roles/uptime-kuma/templates/docker-compose.yml.j2 @@ -0,0 +1,13 @@ +--- +services: + uptime-kuma: + container_name: uptime-kuma + image: {{ uptime_kuma_image }}:{{ uptime_kuma_version }} + volumes: + - /etc/localtime:/etc/localtime:ro + - {{ uptime_kuma_data_dir }}:/app/data:rw,Z + ports: + - "{{ uptime_kuma_port }}:3001" + restart: unless-stopped + environment: + TZ: {{ uptime_kuma_timezone }} diff --git a/roles/uptime-kuma/templates/nginx-vhost.conf.j2 b/roles/uptime-kuma/templates/nginx-vhost.conf.j2 new file mode 100644 index 0000000..95ac72b --- /dev/null +++ b/roles/uptime-kuma/templates/nginx-vhost.conf.j2 @@ -0,0 +1,54 @@ +# Uptime Kuma vhost with Let's Encrypt (Certbot) +# Managed by Ansible - DO NOT EDIT MANUALLY + +server { + listen 80; + server_name {{ uptime_kuma_nginx_hostname }}; + + # Certbot webroot for ACME challenges + location /.well-known/acme-challenge/ { + root /var/www/certbot; + } + + # Redirect to HTTPS + location / { + return 301 https://$server_name$request_uri; + } +} + +server { + listen 443 ssl; + server_name {{ uptime_kuma_nginx_hostname }}; + + # Let's Encrypt certificates (managed by Certbot) + ssl_certificate /etc/letsencrypt/live/{{ uptime_kuma_nginx_hostname }}/fullchain.pem; + ssl_certificate_key /etc/letsencrypt/live/{{ uptime_kuma_nginx_hostname }}/privkey.pem; + + # SSL configuration + ssl_protocols {{ nginx_ssl_protocols }}; + ssl_prefer_server_ciphers {{ 'on' if nginx_ssl_prefer_server_ciphers else 'off' }}; + +{% if nginx_log_backend == 'journald' %} + access_log syslog:server=unix:/dev/log,nohostname,tag=nginx_uptime_kuma; + error_log syslog:server=unix:/dev/log,nohostname,tag=nginx_uptime_kuma; +{% else %} + access_log /var/log/nginx/{{ uptime_kuma_nginx_hostname }}_access.log main; + error_log /var/log/nginx/{{ uptime_kuma_nginx_hostname }}_error.log; +{% endif %} + + location / { + proxy_pass http://127.0.0.1:{{ uptime_kuma_port }}; + proxy_set_header Host $http_host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + + # WebSocket support for real-time updates + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection "upgrade"; + + # Disable buffering for real-time updates + proxy_buffering off; + } +} diff --git a/roles/uptime-kuma/templates/uptime-kuma.service.j2 b/roles/uptime-kuma/templates/uptime-kuma.service.j2 new file mode 100644 index 0000000..cd7f41c --- /dev/null +++ b/roles/uptime-kuma/templates/uptime-kuma.service.j2 @@ -0,0 +1,16 @@ +[Unit] +Description=Uptime Kuma Monitoring Service +Requires=network-online.target +After=network-online.target + +[Service] +Type=oneshot +RemainAfterExit=true +WorkingDirectory={{ podman_projects_dir }}/uptime-kuma +ExecStart=/usr/bin/podman-compose up -d +ExecStop=/usr/bin/podman-compose down +Restart=on-failure +RestartSec=10 + +[Install] +WantedBy=multi-user.target