fix nginx proxy manager and olivetin command

authelia
chatton 2 years ago
parent 556414c00b
commit 37ce7b4568

@ -19,6 +19,7 @@ portainer_roles:
- role: portainer_glances - role: portainer_glances
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_vaultwarden - role: portainer_vaultwarden
- role: portainer_bookstack - role: portainer_bookstack
- role: portainer_vikunja - role: portainer_vikunja

@ -80,7 +80,6 @@ services:
tag: 230615 tag: 230615
- name: olivetin - name: olivetin
- name: paperless - name: paperless
- name: diun
pihole_volumes: pihole_volumes:
qnap_app: {} qnap_app: {}
@ -89,6 +88,7 @@ pihole_volumes:
portainer_roles: portainer_roles:
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_glances - role: portainer_glances
- role: portainer_arr - role: portainer_arr

@ -10,7 +10,6 @@ portainer_required_templates:
portainer_endpoint: 35 portainer_endpoint: 35
services: services:
- name: hasteypaste - name: hasteypaste
- name: diun
ansible_pull_path: /usr/local/bin/ansible-pull ansible_pull_path: /usr/local/bin/ansible-pull
@ -26,6 +25,7 @@ pihole_volumes:
portainer_roles: portainer_roles:
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_glances - role: portainer_glances
- role: portainer_pihole - role: portainer_pihole

@ -0,0 +1,9 @@
---
diun_state: present
diun_image: crazymax/diun
diun_tag: latest
diun_portainer_stack_name: diun
diun_container_name: diun
diun_config_dir: /etc/config/diun
diun_config_file: diun-config.yml

@ -0,0 +1,3 @@
---
- name: restart-diun
ansible.builtin.command: docker restart diun

@ -0,0 +1,61 @@
---
- name: "Diun | Restore any missing volumes from S3"
ansible.builtin.include_role:
name: chatton.docker_backup.docker_s3_volume_restore
vars:
docker_backup_s3_volume:
name: "{{ diun_portainer_stack_name }}_data"
- name: Diun | Create config directory.
ansible.builtin.file:
path: '{{ diun_config_dir }}'
state: directory
mode: '0755'
- name: Diun | Template config.
ansible.builtin.template:
src: diun-config.j2
dest: "{{ diun_config_dir }}/{{ diun_config_file}}"
owner: root
group: root
mode: 0440
notify: restart-diun
- name: Diun | Set fact diun_labels.
set_fact:
diun_labels:
diun.enable: true
- name: "Diun | Update Portainer."
chatton.portainer.portainer_stack:
username: '{{ portainer_user }}'
password: '{{ portainer.password }}'
base_url: '{{ portainer_base_url }}'
stack_name: '{{ diun_portainer_stack_name }}'
endpoint_id: '{{ portainer_endpoint }}'
state: "{{ diun_state }}"
definition:
version: "3.5"
services:
diun:
image: "{{ diun_image }}:{{ diun_tag }}"
container_name: diun
command: serve
volumes:
- data:/data
- /var/run/docker.sock:/var/run/docker.sock
- /etc/config/diun/diun-config.yml:/data/config.yml
environment:
- TZ=Europe/Dublin
- LOG_LEVEL=debug
- LOG_JSON=false
- DIUN_WATCH_WORKERS=20
- DIUN_WATCH_SCHEDULE=0 */6 * * *
- DIUN_WATCH_JITTER=30s
- DIUN_PROVIDERS_DOCKER=true
- DIUN_PROVIDERS_DOCKER_WATCHBYDEFAULT=true
- CONFIG=/data/config.yml
labels: "{{ backup_labels | combine(diun_labels) }}"
restart: unless-stopped
volumes:
data:

@ -0,0 +1,24 @@
db:
path: diun.db
watch:
workers: 10
schedule: "0 */6 * * *"
jitter: 30s
# always want notification if an update is available
firstCheckNotif: true
notif:
gotify:
endpoint: https://gotify.cianhatton.ie
token: {{ diun_gotify_token }}
priority: 1
timeout: 10s
templateTitle: "{% raw %}{{ .Entry.Image }}{% endraw %} released"
templateBody: |
Docker tag {% raw %}{{ .Entry.Image }}{% endraw %} which you subscribed to through {% raw %}{{ .Entry.Provider }}{% endraw %} provider has been released.
providers:
# check these separate to the docker system
File:
filename: /data/config.yml

@ -1,11 +1,11 @@
--- ---
ngnix_proxy_manager_state: present nginx_proxy_manager_state: present
ngnix_proxy_manager_image: jlesage/nginx-proxy-manager nginx_proxy_manager_image: jlesage/nginx-proxy-manager
ngnix_proxy_manager_tag: v23.04.1 nginx_proxy_manager_tag: v23.04.1
ngnix_proxy_manager_http_expose_port: 80 nginx_proxy_manager_http_expose_port: 80
ngnix_proxy_manager_https_expose_port: 433 nginx_proxy_manager_https_expose_port: 443
ngnix_proxy_manager_expose_port: 8181 nginx_proxy_manager_expose_port: 8181
ngnix_proxy_manager_container_name: ngnix_proxy_manager nginx_proxy_manager_container_name: nginx-proxy-manager
ngnix_proxy_manager_portainer_stack_name: ngnix_proxy_manager nginx_proxy_manager_portainer_stack_name: nginx-proxy-manager
ngnix_proxy_manager_puid: 1000 nginx_proxy_manager_puid: 1000
ngnix_proxy_manager_pgid: 1000 nginx_proxy_manager_pgid: 1000

@ -1,31 +1,31 @@
--- ---
- name: "Overseerr | Restore any missing volumes from S3" - name: "Nginx proxy manager | Restore any missing volumes from S3"
ansible.builtin.include_role: ansible.builtin.include_role:
name: chatton.docker_backup.docker_s3_volume_restore name: chatton.docker_backup.docker_s3_volume_restore
vars: vars:
docker_backup_s3_volume: docker_backup_s3_volume:
name: "{{ ngnix_proxy_manager_portainer_stack_name }}_data" name: "{{ nginx_proxy_manager_portainer_stack_name }}_data"
- name: "Overseerr | Update Portainer." - name: "Nginx proxy manager | Update Portainer."
chatton.portainer.portainer_stack: chatton.portainer.portainer_stack:
username: '{{ portainer_user }}' username: '{{ portainer_user }}'
password: '{{ portainer.password }}' password: '{{ portainer.password }}'
base_url: '{{ portainer_base_url }}' base_url: '{{ portainer_base_url }}'
stack_name: '{{ ngnix_proxy_manager_portainer_stack_name }}' stack_name: '{{ nginx_proxy_manager_portainer_stack_name }}'
endpoint_id: '{{ portainer_endpoint }}' endpoint_id: '{{ portainer_endpoint }}'
state: "{{ ngnix_proxy_manager_state }}" state: "{{ nginx_proxy_manager_state }}"
definition: definition:
version: "3" version: "3"
services: services:
nginx-proxy-manager: nginx-proxy-manager:
labels: "{{ backup_labels }}" labels: "{{ backup_labels }}"
image: "{{ nginx_proxy_manager_image }}:{{ nginx_proxy_manager_tag }}" image: "{{ nginx_proxy_manager_image }}:{{ nginx_proxy_manager_tag }}"
container_name: "{{ ngnix_proxy_manager_container_name }}" container_name: "{{ nginx_proxy_manager_container_name }}"
restart: "{{ restart_policy }}" restart: "{{ restart_policy }}"
ports: ports:
- "{{ ngnix_proxy_manager_expose_port }}:8181" - "{{ nginx_proxy_manager_expose_port }}:8181"
- "{{ ngnix_proxy_manager_http_expose_port }}:8080" - "{{ nginx_proxy_manager_http_expose_port }}:8080"
- "{{ ngnix_proxy_manager_https_expose_port }}:4443" - "{{ nginx_proxy_manager_https_expose_port }}:4443"
volumes: volumes:
- "data:/config:rw" - "data:/config:rw"
volumes: volumes:

@ -14,5 +14,5 @@ actions:
shell: docker restart overseerr shell: docker restart overseerr
timeout: 30 timeout: 30
- title: Restart VPN Stack - title: Restart VPN Stack
shell: docker restart protonvpn sonarr radarr jackett qbittorrent shell: docker restart protonwire sonarr radarr jackett qbittorrent
timeout: 90 timeout: 90

Loading…
Cancel
Save