You cannot select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

91 lines
3.3 KiB
YAML

---
# https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html#ansible-collections-community-docker-docker-container-module
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
- name: Determine backup timestamp.
ansible.builtin.set_fact: backup_time="{{ ansible_date_time.iso8601 }}"
- name: Install Python dependencies
ansible.builtin.pip:
name:
- docker
- boto3
- name: Stop a container
community.docker.docker_container:
name: "{{ container_backup }}"
state: stopped
- name: Get container details
docker_container_info:
name: "{{ container_backup }}"
register: result
- name: Extract only the volume mounts (not bind mounts)
ansible.builtin.set_fact: volume_mounts="{{ result.container.Mounts | selectattr("Type", "equalto", "volume") }}"
- name: Create Backup of Container Volumes
community.docker.docker_container:
name: "backup-container-{{ item.Name }}-{{ 10 | random }}"
image: ubuntu
command: "tar -czvf /backups/{{ item.Name }}-{{ backup_time }}.tar.gz /data"
auto_remove: true
detach: false # block until this container exists.
state: started
volumes:
- "{{ item.Name }}:/data"
- "{{ docker_backup_host_backup_directory }}:/backups"
with_items: "{{ volume_mounts }}"
- name: Start the container
community.docker.docker_container:
name: "{{ container_backup }}"
state: started
- name: Upload backups to S3
register: upload_result
amazon.aws.aws_s3:
s3_url: "{{ docker_backup_aws_s3_url }}"
bucket: "{{ docker_backup_aws_s3_bucket }}"
object: "{{ item.Name }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
src: "{{ docker_backup_host_backup_directory }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
mode: put
permission: "{{ docker_backup_aws_s3_permissions }}"
with_items: "{{ volume_mounts }}"
# try and find latest volume based on the name.
- name: Fetch Volumes From S3.
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
mode: list
prefix: "{{ item.Name }}/{{ item.Name }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
register: s3_list_outputs
with_items: "{{ volume_mounts }}"
# TODO: do this in a more native way rather than a python script reading env vars.
- name: Determine which backups should be deleted.
script: scripts/determine-s3-keys-to-delete.py
environment:
S3_RESULTS: "{{ s3_list_outputs.results }}"
NUM_BACKUPS_TO_KEEP: "{{ docker_backup_retain_count}}"
register: python_output
changed_when: false
- name: Delete old backups.
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
object: "{{ item }}"
mode: delobj
with_items: "{{ python_output.stdout_lines }}"