diff --git a/roles/setup_hosted_services/tasks/main.yml b/roles/setup_hosted_services/tasks/main.yml index e763069..2e5755c 100644 --- a/roles/setup_hosted_services/tasks/main.yml +++ b/roles/setup_hosted_services/tasks/main.yml @@ -10,7 +10,6 @@ source: pull with_items: - ubuntu - - busybox - name: Create required directories. ansible.builtin.file: diff --git a/roles/setup_hosted_services/templates/pihole.j2 b/roles/setup_hosted_services/templates/pihole.j2 index 48d25c6..663bdce 100644 --- a/roles/setup_hosted_services/templates/pihole.j2 +++ b/roles/setup_hosted_services/templates/pihole.j2 @@ -2,6 +2,9 @@ version: "3" services: pihole: + labels: + ie.cianhatton.backup.enabled: "true" + ie.cianhatton.backup.schedule: "{{ backups.schedule_keys.nightly }}" container_name: pihole image: "pihole/pihole:2022.09.2" ports: @@ -17,5 +20,5 @@ services: restart: unless-stopped volumes: - "{{ inventory_hostname_short }}_app": - "{{ inventory_hostname_short }}_dns": + {{ inventory_hostname_short }}_app: + {{ inventory_hostname_short }}_dns: diff --git a/roles/setup_hosted_services/templates/plex.j2 b/roles/setup_hosted_services/templates/plex.j2 index da12e96..a406c66 100644 --- a/roles/setup_hosted_services/templates/plex.j2 +++ b/roles/setup_hosted_services/templates/plex.j2 @@ -26,6 +26,7 @@ services: tautulli: labels: ie.cianhatton.backup.enabled: "true" + ie.cianhatton.backup.schedule: "{{backups.schedule_keys.nightly}}" image: lscr.io/linuxserver/tautulli:latest container_name: tautulli environment: diff --git a/roles/setup_portainer/tasks/main.yml b/roles/setup_portainer/tasks/main.yml index fe3560e..ca70a82 100644 --- a/roles/setup_portainer/tasks/main.yml +++ b/roles/setup_portainer/tasks/main.yml @@ -10,42 +10,25 @@ - name: Portainer | Copy docker compose file. ansible.builtin.copy: src: docker-compose.yml - dest: '{{ directories.docker_compose_directory }}/docker-compose.yml' + dest: '{{ directories.docker_compose_directory }}/portainer/docker-compose.yml' owner: root group: root mode: 0440 -- name: Portainer | Check if volume exists - ansible.builtin.shell: docker volume ls -f name=portainer_portainer_data --format '{{ '{{' }} .Name {{ '}}' }}' - register: portainer_volume - changed_when: false - - name: Portainer | Pull images docker_image: - name: '{{ item }}' + name: ubuntu source: pull - with_items: - - ubuntu - - busybox -- name: Docker Volume Backup | Restore Portainer volume from S3 - when: (portainer_volume.stdout_lines | length) == 0 - docker_container: - command: restore-volume --s3 --volume portainer_portainer_data - image: ghcr.io/chatton/docker-volume-backup:v0.3.0 - name: s3-restore-portainer - cleanup: true # delete container after it's done. - state: started # container should execute. - detach: false # task fails if container exits. - volumes: - - /var/run/docker.sock:/var/run/docker.sock - - /tmp:/tmp # temp s3 archive goes here - env: - AWS_ACCESS_KEY_ID: '{{ aws_s3.aws_access_key }}' - AWS_SECRET_ACCESS_KEY: '{{ aws_s3.aws_secret_key }}' - AWS_DEFAULT_REGION: '{{ aws_s3.region }}' - AWS_BUCKET: '{{ aws_s3.bucket }}' - AWS_ENDPOINT: '{{ aws_s3.s3_url }}' +- name: Restore any missing volumes from S3 for Potainer. + ansible.builtin.include_role: + name: chatton.docker_backup.docker_s3_volume_restore + vars: + docker_backup_restore_force: false + docker_backup_restore_latest_s3_key: true + docker_backup_fail_on_no_s3_backups: false + docker_backup_s3_volume: + name: portainer_portainer_data - name: Portainer | Docker compose up community.docker.docker_compose: