initial commit, adding s3 backup and restore

pull/6/head
Cian Hatton 3 years ago
parent 0dba73ef6a
commit f7fea9053d

1
.gitignore vendored

@ -0,0 +1 @@
.idea

@ -1 +1,3 @@
# ansible-docker-backup
# Ansible Collection - chatton.docker_backup
Documentation for the collection.

@ -0,0 +1,62 @@
### REQUIRED
# The namespace of the collection. This can be a company/brand/organization or product namespace under which all
# content lives. May only contain alphanumeric lowercase characters and underscores. Namespaces cannot start with
# underscores or numbers and cannot contain consecutive underscores
namespace: chatton
# The name of the collection. Has the same character restrictions as 'namespace'
name: docker_backup
# The version of the collection. Must be compatible with semantic versioning
version: 1.0.0
# The path to the Markdown (.md) readme file. This path is relative to the root of the collection
readme: README.md
# A list of the collection's content authors. Can be just the name or in the format 'Full Name <email> (url)
# @nicks:irc/im.site#channel'
authors:
- Cian Hatton cianhatton@protonmail.com
### OPTIONAL but strongly recommended
# A short summary description of the collection
description: A collection of roles which allow backup and restore of docker volumes.
# Either a single license or a list of licenses for content inside of a collection. Ansible Galaxy currently only
# accepts L(SPDX,https://spdx.org/licenses/) licenses. This key is mutually exclusive with 'license_file'
license:
- MIT
# The path to the license file for the collection. This path is relative to the root of the collection. This key is
# mutually exclusive with 'license'
license_file: 'LICENSE'
# A list of tags you want to associate with the collection for indexing/searching. A tag name has the same character
# requirements as 'namespace' and 'name'
tags: []
# Collections that this collection requires to be installed for it to be usable. The key of the dict is the
# collection label 'namespace.name'. The value is a version range
# L(specifiers,https://python-semanticversion.readthedocs.io/en/latest/#requirement-specification). Multiple version
# range specifiers can be set and are separated by ','
dependencies: {}
# The URL of the originating SCM repository
repository: https://github.com/chatton/ansible-docker-backup
# The URL to any online docs
documentation: https://github.com/chatton/ansible-docker-backup#readme
# The URL to the homepage of the collection/project
homepage: https://github.com/chatton/ansible-docker-backup
# The URL to the collection issue tracker
issues: https://github.com/chatton/ansible-docker-backup/issues
# A list of file glob-like patterns used to filter any files or directories that should not be included in the build
# artifact. A pattern is matched from the relative path of the file or directory of the collection directory. This
# uses 'fnmatch' to match the files or directories. Some directories and files like 'galaxy.yml', '*.pyc', '*.retry',
# and '.git' are always filtered
build_ignore: []

@ -0,0 +1,12 @@
---
# defaults file for chatton.docker_s3_backup
# the backup directory where backups are stored on the host machine.
# these will be uploaded to S3.
docker_s3_backup_aws_s3_region: "us-east-1"
docker_s3_backup_aws_s3_bucket: "backups"
docker_s3_backup_host_backup_directory: ""
docker_s3_backup_aws_s3_url: ""
docker_s3_backup_aws_s3_aws_access_key: ""
docker_s3_backup_aws_s3_aws_secret_key: ""
docker_s3_backup_aws_s3_permissions: []

@ -0,0 +1,52 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
#
# Provide a list of supported platforms, and for each platform a list of versions.
# If you don't wish to enumerate all versions for a particular platform, use 'all'.
# To view available platforms and versions (or releases), visit:
# https://galaxy.ansible.com/api/v1/platforms/
#
# platforms:
# - name: Fedora
# versions:
# - all
# - 25
# - name: SomePlatform
# versions:
# - all
# - 1.0
# - 7
# - 99.99
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

@ -0,0 +1,51 @@
---
# https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html#ansible-collections-community-docker-docker-container-module
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
- name: Determine backup timestamp.
set_fact: backup_time="{{ ansible_date_time.iso8601 }}"
- name: Stop a container
community.docker.docker_container:
name: "{{ container_backup }}"
state: stopped
- name: Get container details
docker_container_info:
name: "{{ container_backup }}"
register: result
- name: Extract only the volume mounts (not bind mounts)
set_fact: volume_mounts="{{ result.container.Mounts | selectattr("Type", "equalto", "volume")}}"
- name: Create Backup of Container Volumes
community.docker.docker_container:
name: "backup-container-{{ item.Name }}-{{ 10 | random }}"
image: ubuntu
command: "tar -czvf /backups/{{ item.Name }}-{{ backup_time }}.tar.gz /data"
auto_remove: true
detach: false # block until this container exists.
state: started
volumes:
- "{{ item.Name }}:/data"
- "{{ docker_s3_backup_host_backup_directory }}":/backups
with_items: "{{ volume_mounts }}"
- name: Start the container
community.docker.docker_container:
name: "{{ container_backup }}"
state: started
- name: Upload backups to S3
register: upload_result
amazon.aws.aws_s3:
s3_url: "{{ docker_s3_backup_aws_s3_url }}"
bucket: "{{ docker_s3_backup_aws_s3_bucket }}"
object: "{{ item.Name }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
src: {{ docker_s3_backup_host_backup_directory }}/{{ item.Name }}-{{ backup_time }}.tar.gz
aws_access_key: "{{ docker_s3_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_s3_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_s3_backup_aws_s3_region }}"
mode: put
permission: "{{ docker_s3_backup_aws_s3_permissions }}"
with_items: "{{ volume_mounts }}"

@ -0,0 +1,2 @@
---
# vars file for chatton.docker_backup

@ -0,0 +1,17 @@
---
# defaults file for docker_s3_volume_restore
# forces a revert to the volume.
docker_s3_volume_restore_force: false
docker_s3_volume_restores: []
#docker_s3_volume_restores:
# - volume_name: "linkding_data"
# s3_key: "linkding_data/linkding_data-2022-09-01T21:32:54Z.tar.gz"
docker_s3_volume_restore_aws_s3_region: "us-east-1"
docker_s3_volume_restore_aws_s3_bucket: "backups"
docker_s3_volume_restore_host_backup_directory: ""
docker_s3_volume_restore_aws_s3_url: ""
docker_s3_volume_restore_aws_s3_aws_access_key: ""
docker_s3_volume_restore_aws_s3_aws_secret_key: ""
docker_s3_volume_restore_aws_s3_permissions: []

@ -0,0 +1,2 @@
---
# handlers file for docker_s3_volume_restore

@ -0,0 +1,52 @@
galaxy_info:
author: your name
description: your role description
company: your company (optional)
# If the issue tracker for your role is not on github, uncomment the
# next line and provide a value
# issue_tracker_url: http://example.com/issue/tracker
# Choose a valid license ID from https://spdx.org - some suggested licenses:
# - BSD-3-Clause (default)
# - MIT
# - GPL-2.0-or-later
# - GPL-3.0-only
# - Apache-2.0
# - CC-BY-4.0
license: license (GPL-2.0-or-later, MIT, etc)
min_ansible_version: 2.1
# If this a Container Enabled role, provide the minimum Ansible Container version.
# min_ansible_container_version:
#
# Provide a list of supported platforms, and for each platform a list of versions.
# If you don't wish to enumerate all versions for a particular platform, use 'all'.
# To view available platforms and versions (or releases), visit:
# https://galaxy.ansible.com/api/v1/platforms/
#
# platforms:
# - name: Fedora
# versions:
# - all
# - 25
# - name: SomePlatform
# versions:
# - all
# - 1.0
# - 7
# - 99.99
galaxy_tags: []
# List tags for your role here, one per line. A tag is a keyword that describes
# and categorizes the role. Users find roles by searching for tags. Be sure to
# remove the '[]' above, if you add tags to this list.
#
# NOTE: A tag is limited to a single word comprised of alphanumeric characters.
# Maximum 20 tags per role.
dependencies: []
# List your role dependencies here, one per line. Be sure to remove the '[]' above,
# if you add dependencies to this list.

@ -0,0 +1,58 @@
---
# https://docs.ansible.com/ansible/latest/collections/community/docker/docker_container_module.html#ansible-collections-community-docker-docker-container-module
# https://docs.docker.com/storage/volumes/#backup-restore-or-migrate-data-volumes
- name: Ensure Volume.
docker_volume:
name: "{{ item.volume_name }}"
state: present
register: volume_out
with_items: "{{ docker_volume_s3_restores }}"
- name: Determine if backup is needed.
set_fact: should_perform_backup="{{ docker_volume_s3_force == true or volume_out.changed == true }}"
- name: End play as no backup is needed.
meta: end_play
when: not should_perform_backup
- name: Download archive from S3
amazon.aws.aws_s3:
bucket: "{{ docker_s3_volume_restore_aws_s3_bucket }}"
object: "{{ item.s3_key }}"
aws_access_key: "{{ docker_s3_volume_restore_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_s3_volume_restore_aws_s3_aws_secret_key }}"
region: "{{ docker_s3_volume_restore_aws_s3_region }}"
s3_url: "{{ docker_s3_volume_restore_aws_s3_url }}"
mode: get
dest: "/tmp/{{ item.s3_key }}"
register: get_out
with_items: "{{ docker_volume_s3_restores }}"
- name: Remove contents of volumes
community.docker.docker_container:
name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}"
image: ubuntu
command: "rm -rf ./*"
auto_remove: true
detach: false # block until this container exists.
state: started
# start inside the directory we want to wipe
working_dir: "/data"
volumes:
- "{{ item.volume_name }}:/data"
with_items: "{{ docker_volume_s3_restores }}"
- name: Restore contents of volumes
community.docker.docker_container:
name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}"
image: ubuntu
# extract the tar into the volume.
command: "tar xvf /tmp/{{ item.s3_key }} -C /data --strip-components 1"
auto_remove: true
detach: false # block until this container exists.
state: started
volumes:
- "{{ item.volume_name }}:/data"
- /tmp:/tmp
with_items: "{{ docker_volume_s3_restores }}"

@ -0,0 +1,2 @@
---
# vars file for docker_s3_volume_restore
Loading…
Cancel
Save