Compare commits

...

5 Commits

@ -10,12 +10,21 @@ ansible_pull_path: /usr/local/bin/ansible-pull
backup_directories: [] backup_directories: []
cron_hour: "4" cron_hour: "4"
pihole_volumes:
dell_app: {}
dell_dns: {}
portainer_roles: portainer_roles:
- role: portainer_minio - role: portainer_minio
- role: portainer_glances - role: portainer_glances
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_vaultwarden - role: portainer_vaultwarden
- role: portainer_bookstack - role: portainer_bookstack
- role: portainer_vikunja - role: portainer_vikunja
- role: portainer_linkding - role: portainer_linkding
- role: portainer_gotify
- role: portainer_overseerr
- role: portainer_pihole
- role: portainer_uptime_kuma

@ -64,15 +64,10 @@ services:
default_network: mariadb_net default_network: mariadb_net
image: nextcloud image: nextcloud
tag: 27.0 tag: 27.0
- name: nginx-proxy-manager
- name: plex - name: plex
template_vars: template_vars:
plex_image: lscr.io/linuxserver/plex plex_image: lscr.io/linuxserver/plex
plex_tag: 1.32.4 plex_tag: 1.32.4
- name: uptime-kuma
template_vars:
image: louislam/uptime-kuma
tag: 1.22.0
- name: mariadb - name: mariadb
template_vars: template_vars:
image: mariadb image: mariadb
@ -85,7 +80,6 @@ services:
tag: 230615 tag: 230615
- name: olivetin - name: olivetin
- name: paperless - name: paperless
- name: diun
pihole_volumes: pihole_volumes:
qnap_app: {} qnap_app: {}
@ -94,9 +88,9 @@ pihole_volumes:
portainer_roles: portainer_roles:
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_glances - role: portainer_glances
- role: portainer_arr - role: portainer_arr
- role: portainer_gotify
- role: portainer_pihole - role: portainer_pihole
- role: portainer_overseerr - role: portainer_nginx_proxy_manager

@ -10,7 +10,6 @@ portainer_required_templates:
portainer_endpoint: 35 portainer_endpoint: 35
services: services:
- name: hasteypaste - name: hasteypaste
- name: diun
ansible_pull_path: /usr/local/bin/ansible-pull ansible_pull_path: /usr/local/bin/ansible-pull
@ -26,6 +25,7 @@ pihole_volumes:
portainer_roles: portainer_roles:
- role: portainer_dashy - role: portainer_dashy
- role: portainer_diun
- role: portainer_dashdot - role: portainer_dashdot
- role: portainer_glances - role: portainer_glances
- role: portainer_pihole - role: portainer_pihole

@ -0,0 +1,9 @@
---
diun_state: present
diun_image: crazymax/diun
diun_tag: latest
diun_portainer_stack_name: diun
diun_container_name: diun
diun_config_dir: /etc/config/diun
diun_config_file: diun-config.yml

@ -0,0 +1,3 @@
---
- name: restart-diun
ansible.builtin.command: docker restart diun

@ -0,0 +1,61 @@
---
- name: "Diun | Restore any missing volumes from S3"
ansible.builtin.include_role:
name: chatton.docker_backup.docker_s3_volume_restore
vars:
docker_backup_s3_volume:
name: "{{ diun_portainer_stack_name }}_data"
- name: Diun | Create config directory.
ansible.builtin.file:
path: '{{ diun_config_dir }}'
state: directory
mode: '0755'
- name: Diun | Template config.
ansible.builtin.template:
src: diun-config.j2
dest: "{{ diun_config_dir }}/{{ diun_config_file}}"
owner: root
group: root
mode: 0440
notify: restart-diun
- name: Diun | Set fact diun_labels.
set_fact:
diun_labels:
diun.enable: true
- name: "Diun | Update Portainer."
chatton.portainer.portainer_stack:
username: '{{ portainer_user }}'
password: '{{ portainer.password }}'
base_url: '{{ portainer_base_url }}'
stack_name: '{{ diun_portainer_stack_name }}'
endpoint_id: '{{ portainer_endpoint }}'
state: "{{ diun_state }}"
definition:
version: "3.5"
services:
diun:
image: "{{ diun_image }}:{{ diun_tag }}"
container_name: diun
command: serve
volumes:
- data:/data
- /var/run/docker.sock:/var/run/docker.sock
- /etc/config/diun/diun-config.yml:/data/config.yml
environment:
- TZ=Europe/Dublin
- LOG_LEVEL=debug
- LOG_JSON=false
- DIUN_WATCH_WORKERS=20
- DIUN_WATCH_SCHEDULE=0 */6 * * *
- DIUN_WATCH_JITTER=30s
- DIUN_PROVIDERS_DOCKER=true
- DIUN_PROVIDERS_DOCKER_WATCHBYDEFAULT=true
- CONFIG=/data/config.yml
labels: "{{ backup_labels | combine(diun_labels) }}"
restart: unless-stopped
volumes:
data:

@ -0,0 +1,24 @@
db:
path: diun.db
watch:
workers: 10
schedule: "0 */6 * * *"
jitter: 30s
# always want notification if an update is available
firstCheckNotif: true
notif:
gotify:
endpoint: https://gotify.cianhatton.ie
token: {{ diun_gotify_token }}
priority: 1
timeout: 10s
templateTitle: "{% raw %}{{ .Entry.Image }}{% endraw %} released"
templateBody: |
Docker tag {% raw %}{{ .Entry.Image }}{% endraw %} which you subscribed to through {% raw %}{{ .Entry.Provider }}{% endraw %} provider has been released.
providers:
# check these separate to the docker system
File:
filename: /data/config.yml

@ -0,0 +1,11 @@
---
nginx_proxy_manager_state: present
nginx_proxy_manager_image: jlesage/nginx-proxy-manager
nginx_proxy_manager_tag: v23.04.1
nginx_proxy_manager_http_expose_port: 80
nginx_proxy_manager_https_expose_port: 443
nginx_proxy_manager_expose_port: 8181
nginx_proxy_manager_container_name: nginx-proxy-manager
nginx_proxy_manager_portainer_stack_name: nginx-proxy-manager
nginx_proxy_manager_puid: 1000
nginx_proxy_manager_pgid: 1000

@ -0,0 +1,32 @@
---
- name: "Nginx proxy manager | Restore any missing volumes from S3"
ansible.builtin.include_role:
name: chatton.docker_backup.docker_s3_volume_restore
vars:
docker_backup_s3_volume:
name: "{{ nginx_proxy_manager_portainer_stack_name }}_data"
- name: "Nginx proxy manager | Update Portainer."
chatton.portainer.portainer_stack:
username: '{{ portainer_user }}'
password: '{{ portainer.password }}'
base_url: '{{ portainer_base_url }}'
stack_name: '{{ nginx_proxy_manager_portainer_stack_name }}'
endpoint_id: '{{ portainer_endpoint }}'
state: "{{ nginx_proxy_manager_state }}"
definition:
version: "3"
services:
nginx-proxy-manager:
labels: "{{ backup_labels }}"
image: "{{ nginx_proxy_manager_image }}:{{ nginx_proxy_manager_tag }}"
container_name: "{{ nginx_proxy_manager_container_name }}"
restart: "{{ restart_policy }}"
ports:
- "{{ nginx_proxy_manager_expose_port }}:8181"
- "{{ nginx_proxy_manager_http_expose_port }}:8080"
- "{{ nginx_proxy_manager_https_expose_port }}:4443"
volumes:
- "data:/config:rw"
volumes:
data:

@ -0,0 +1,7 @@
---
uptime_kuma_state: present
uptime_kuma_image: louislam/uptime-kuma
uptime_kuma_tag: 1.22.0
uptime_kuma_expose_port: 3001
uptime_kuma_container_name: uptime-kuma
uptime_kuma_portainer_stack_name: uptime-kuma

@ -0,0 +1,31 @@
---
- name: "Uptime Kuma | Restore any missing volumes from S3"
ansible.builtin.include_role:
name: chatton.docker_backup.docker_s3_volume_restore
vars:
docker_backup_s3_volume:
name: "{{ uptime_kuma_portainer_stack_name }}_data"
- name: "Uptime Kuma | Update Portainer."
chatton.portainer.portainer_stack:
username: '{{ portainer_user }}'
password: '{{ portainer.password }}'
base_url: '{{ portainer_base_url }}'
stack_name: '{{ uptime_kuma_portainer_stack_name }}'
endpoint_id: '{{ portainer_endpoint }}'
state: "{{ uptime_kuma_state }}"
definition:
version: '3.3'
services:
uptime-kuma:
labels: "{{ backup_labels }}"
image: "{{ uptime_kuma_image }}:{{ uptime_kuma_tag }}"
container_name: "{{ uptime_kuma_container_name }}"
volumes:
- data:/app/data
ports:
- "{{ uptime_kuma_expose_port }}:3001"
restart: "{{ restart_policy }}"
volumes:
data:

@ -14,5 +14,5 @@ actions:
shell: docker restart overseerr shell: docker restart overseerr
timeout: 30 timeout: 30
- title: Restart VPN Stack - title: Restart VPN Stack
shell: docker restart protonvpn sonarr radarr jackett qbittorrent shell: docker restart protonwire sonarr radarr jackett qbittorrent
timeout: 90 timeout: 90

@ -1,18 +0,0 @@
---
version: "3"
services:
nginx-proxy-manager:
labels:
ie.cianhatton.backup.enabled: "true"
ie.cianhatton.backup.schedule: "{{backups.schedule_keys.nightly}}"
image: jlesage/nginx-proxy-manager
container_name: nginx-proxy-manager
restart: unless-stopped
ports:
- 8181:8181
- 80:8080
- 443:4443
volumes:
- "data:/config:rw"
volumes:
data:

@ -1,17 +0,0 @@
---
version: '3.3'
services:
uptime-kuma:
labels:
ie.cianhatton.backup.enabled: "true"
ie.cianhatton.backup.schedule: "{{ backups.schedule_keys.nightly }}"
image: "{{ template_vars.image }}:{{ template_vars.tag }}"
container_name: uptime-kuma
volumes:
- data:/app/data
ports:
- 3001:3001
restart: unless-stopped
volumes:
data:
Loading…
Cancel
Save