You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

91 lines
3.3 KiB
YAML

---
# https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html#ansible-collections-community-docker-docker-container-module
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
- name: Determine backup timestamp.
ansible.builtin.set_fact: backup_time="{{ ansible_date_time.iso8601 }}"
- name: Install Python dependencies
ansible.builtin.pip:
name:
- docker
- boto3
- name: Stop a container
community.docker.docker_container:
name: "{{ container_backup }}"
state: stopped
- name: Get container details
docker_container_info:
name: "{{ container_backup }}"
register: result
- name: Extract only the volume mounts (not bind mounts)
ansible.builtin.set_fact: volume_mounts="{{ result.container.Mounts | selectattr("Type", "equalto", "volume") }}"
- name: Create Backup of Container Volumes
community.docker.docker_container:
name: "backup-container-{{ item.Name }}-{{ 10 | random }}"
image: ubuntu
command: "tar -czvf /backups/{{ item.Name }}-{{ backup_time }}.tar.gz /data"
cleanup: true
detach: false # block until this container exists.
state: started
volumes:
- "{{ item.Name }}:/data"
- "{{ docker_backup_host_backup_directory }}:/backups"
with_items: "{{ volume_mounts }}"
- name: Start the container
community.docker.docker_container:
name: "{{ container_backup }}"
state: started
- name: Upload backups to S3
register: upload_result
amazon.aws.aws_s3:
s3_url: "{{ docker_backup_aws_s3_url }}"
bucket: "{{ docker_backup_aws_s3_bucket }}"
object: "{{ item.Name }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
src: "{{ docker_backup_host_backup_directory }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
mode: put
encrypt: "{{ docker_backup_aws_s3_encrypt }}"
permission: "{{ docker_backup_aws_s3_permissions }}"
with_items: "{{ volume_mounts }}"
# try and find latest volume based on the name.
- name: Fetch Volumes From S3.
when: docker_backup_retain_count is defined
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
mode: list
prefix: "{{ item.Name }}/{{ item.Name }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
register: s3_list_outputs
with_items: "{{ volume_mounts }}"
- name: Find keys to delete.
when: docker_backup_retain_count is defined
ansible.builtin.set_fact:
s3_keys_to_delete: "{{ s3_list_outputs.results | map(attribute='s3_keys') | keep_last(docker_backup_retain_count) | flatten }}"
- name: Delete old backups.
when: docker_backup_retain_count is defined
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
object: "{{ item }}"
mode: delobj
with_items: "{{ s3_keys_to_delete }}"