From 039a69b38a7f148e2d834d8afae56fd4fd01a021 Mon Sep 17 00:00:00 2001 From: chatton Date: Sat, 15 Jul 2023 21:39:32 +0100 Subject: [PATCH] add nginx proxy manager --- host_vars/qnap.yml | 2 +- .../defaults/main.yml | 11 +++++++ .../tasks/main.yml | 32 +++++++++++++++++++ .../templates/nginx-proxy-manager.j2 | 18 ----------- .../templates/uptime-kuma.j2 | 17 ---------- 5 files changed, 44 insertions(+), 36 deletions(-) create mode 100644 roles/portainer_nginx_proxy_manager/defaults/main.yml create mode 100644 roles/portainer_nginx_proxy_manager/tasks/main.yml delete mode 100644 roles/setup_hosted_services/templates/nginx-proxy-manager.j2 delete mode 100644 roles/setup_hosted_services/templates/uptime-kuma.j2 diff --git a/host_vars/qnap.yml b/host_vars/qnap.yml index 9845692..aa70c94 100644 --- a/host_vars/qnap.yml +++ b/host_vars/qnap.yml @@ -64,7 +64,6 @@ services: default_network: mariadb_net image: nextcloud tag: 27.0 - - name: nginx-proxy-manager - name: plex template_vars: plex_image: lscr.io/linuxserver/plex @@ -97,3 +96,4 @@ portainer_roles: - role: portainer_pihole - role: portainer_overseerr - role: portainer_uptime_kuma + - role: portainer_nginx_proxy_manager diff --git a/roles/portainer_nginx_proxy_manager/defaults/main.yml b/roles/portainer_nginx_proxy_manager/defaults/main.yml new file mode 100644 index 0000000..0ccc529 --- /dev/null +++ b/roles/portainer_nginx_proxy_manager/defaults/main.yml @@ -0,0 +1,11 @@ +--- +ngnix_proxy_manager_state: present +ngnix_proxy_manager_image: jlesage/nginx-proxy-manager +ngnix_proxy_manager_tag: v23.04.1 +ngnix_proxy_manager_http_expose_port: 80 +ngnix_proxy_manager_https_expose_port: 433 +ngnix_proxy_manager_expose_port: 8181 +ngnix_proxy_manager_container_name: _ngnix_proxy_manager +ngnix_proxy_manager_portainer_stack_name: _ngnix_proxy_manager +ngnix_proxy_manager_puid: 1000 +ngnix_proxy_manager_pgid: 1000 diff --git a/roles/portainer_nginx_proxy_manager/tasks/main.yml b/roles/portainer_nginx_proxy_manager/tasks/main.yml new file mode 100644 index 0000000..100497b --- /dev/null +++ b/roles/portainer_nginx_proxy_manager/tasks/main.yml @@ -0,0 +1,32 @@ +--- +- name: "Overseerr | Restore any missing volumes from S3" + ansible.builtin.include_role: + name: chatton.docker_backup.docker_s3_volume_restore + vars: + docker_backup_s3_volume: + name: "{{ ngnix_proxy_manager_portainer_stack_name }}_data" + +- name: "Overseerr | Update Portainer." + chatton.portainer.portainer_stack: + username: '{{ portainer_user }}' + password: '{{ portainer.password }}' + base_url: '{{ portainer_base_url }}' + stack_name: '{{ ngnix_proxy_manager_portainer_stack_name }}' + endpoint_id: '{{ portainer_endpoint }}' + state: "{{ ngnix_proxy_manager_state }}" + definition: + version: "3" + services: + nginx-proxy-manager: + labels: "{{ backup_labels }}" + image: "{{ nginx_proxy_manager_image }}:{{ nginx_proxy_manager_tag }}" + container_name: "{{ ngnix_proxy_manager_container_name }}" + restart: "{{ restart_policy }}" + ports: + - "{{ ngnix_proxy_manager_expose_port }}:8181" + - "{{ ngnix_proxy_manager_http_expose_port }}:8080" + - "{{ ngnix_proxy_manager_https_expose_port }}:4443" + volumes: + - "data:/config:rw" + volumes: + data: diff --git a/roles/setup_hosted_services/templates/nginx-proxy-manager.j2 b/roles/setup_hosted_services/templates/nginx-proxy-manager.j2 deleted file mode 100644 index 681f23c..0000000 --- a/roles/setup_hosted_services/templates/nginx-proxy-manager.j2 +++ /dev/null @@ -1,18 +0,0 @@ ---- -version: "3" -services: - nginx-proxy-manager: - labels: - ie.cianhatton.backup.enabled: "true" - ie.cianhatton.backup.schedule: "{{backups.schedule_keys.nightly}}" - image: jlesage/nginx-proxy-manager - container_name: nginx-proxy-manager - restart: unless-stopped - ports: - - 8181:8181 - - 80:8080 - - 443:4443 - volumes: - - "data:/config:rw" -volumes: - data: diff --git a/roles/setup_hosted_services/templates/uptime-kuma.j2 b/roles/setup_hosted_services/templates/uptime-kuma.j2 deleted file mode 100644 index a8cff7c..0000000 --- a/roles/setup_hosted_services/templates/uptime-kuma.j2 +++ /dev/null @@ -1,17 +0,0 @@ ---- -version: '3.3' -services: - uptime-kuma: - labels: - ie.cianhatton.backup.enabled: "true" - ie.cianhatton.backup.schedule: "{{ backups.schedule_keys.nightly }}" - image: "{{ template_vars.image }}:{{ template_vars.tag }}" - container_name: uptime-kuma - volumes: - - data:/app/data - ports: - - 3001:3001 - restart: unless-stopped - -volumes: - data: