You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
107 lines
3.5 KiB
YAML
107 lines
3.5 KiB
YAML
---
|
|
# https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html#ansible-collections-community-docker-docker-container-module
|
|
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
|
|
|
|
- name: Ensure Volume.
|
|
docker_volume:
|
|
name: "{{ docker_backup_s3_volume.name }}"
|
|
state: present
|
|
register: volume_out
|
|
|
|
- name: Determine if backup is needed.
|
|
ansible.builtin.set_fact: should_perform_backup="{{ docker_backup_restore_force or volume_out.changed }}"
|
|
|
|
# try and find latest volume based on the name.
|
|
- name: Find latest s3 version.
|
|
when:
|
|
- should_perform_backup
|
|
- docker_backup_restore_latest_s3_key
|
|
amazon.aws.aws_s3:
|
|
bucket: "{{ docker_backup_aws_s3_bucket }}"
|
|
mode: list
|
|
prefix: "{{ docker_backup_s3_volume.name }}/{{ docker_backup_s3_volume.name }}"
|
|
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
|
|
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
|
|
region: "{{ docker_backup_aws_s3_region }}"
|
|
s3_url: "{{ docker_backup_aws_s3_url }}"
|
|
register: s3_list_output
|
|
|
|
- name: Fail as there no backups were found.
|
|
when:
|
|
- should_perform_backup
|
|
- docker_backup_fail_on_no_s3_backups
|
|
- s3_list_output.s3_keys | length == 0
|
|
ansible.builtin.fail:
|
|
msg: "There were no s3 backups found for {{ docker_backup_s3_volume.name }}"
|
|
|
|
- name: Extract S3 keys for container.
|
|
when:
|
|
- should_perform_backup
|
|
- docker_backup_restore_latest_s3_key
|
|
- s3_list_output.s3_keys | length > 0
|
|
ansible.builtin.set_fact: container_s3_key="{{ s3_list_output.s3_keys | last }}"
|
|
|
|
- name: Create directories for /tmp file.
|
|
when:
|
|
- should_perform_backup
|
|
- s3_list_output.s3_keys | length > 0
|
|
ansible.builtin.file:
|
|
path: '/tmp/{{ container_s3_key | dirname }}'
|
|
state: directory
|
|
mode: '0755'
|
|
|
|
- name: Download archive from S3.
|
|
when:
|
|
- should_perform_backup
|
|
- s3_list_output.s3_keys | length > 0
|
|
amazon.aws.aws_s3:
|
|
bucket: "{{ docker_backup_aws_s3_bucket }}"
|
|
object: "{{ container_s3_key }}"
|
|
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
|
|
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
|
|
region: "{{ docker_backup_aws_s3_region }}"
|
|
s3_url: "{{ docker_backup_aws_s3_url }}"
|
|
mode: get
|
|
dest: "/tmp/{{ container_s3_key }}"
|
|
register: get_out
|
|
|
|
- name: Remove contents of volumes.
|
|
when:
|
|
- should_perform_backup
|
|
- s3_list_output.s3_keys | length > 0
|
|
community.docker.docker_container:
|
|
name: "restore-container-{{ docker_backup_s3_volume.name }}-{{ 100 | random }}"
|
|
image: ubuntu
|
|
command: "rm -rf ./*"
|
|
cleanup: true
|
|
detach: false # block until this container exists.
|
|
state: started
|
|
# start inside the directory we want to wipe
|
|
working_dir: "/data"
|
|
volumes:
|
|
- "{{ docker_backup_s3_volume.name }}:/data"
|
|
|
|
- name: Restore contents of volumes
|
|
when:
|
|
- should_perform_backup
|
|
- s3_list_output.s3_keys | length > 0
|
|
community.docker.docker_container:
|
|
name: "restore-container-{{ docker_backup_s3_volume.name }}-{{ 100 | random }}"
|
|
image: ubuntu
|
|
# extract the tar into the volume.
|
|
command: "tar xvf /tmp/{{ container_s3_key }} -C /data --strip-components 1"
|
|
cleanup: true
|
|
detach: false # block until this container exists.
|
|
state: started
|
|
volumes:
|
|
- "{{ docker_backup_s3_volume.name }}:/data"
|
|
- /tmp:/tmp
|
|
|
|
- name: Remove uploaded files from /tmp
|
|
when:
|
|
- should_perform_backup
|
|
- s3_list_output.s3_keys | length > 0
|
|
ansible.builtin.file:
|
|
path: '/tmp/{{ container_s3_key }}'
|
|
state: absent
|