Compare commits

..

3 Commits

Author SHA1 Message Date
chatton cbbb65a9b7 volumes get backed up to minio successfully 3 years ago
chatton 655b9499c0 adding role and playbooks 3 years ago
chatton f3ddff350d allow empty 3 years ago

2
.gitignore vendored

@ -0,0 +1,2 @@
playbooks/backups
.idea

@ -0,0 +1,3 @@
[defaults]
# look in the roles directory to find our defined roles.
roles_path = roles

@ -0,0 +1,31 @@
---
version: '3'
services:
linkding:
labels:
ie.cianhatton.backup.enabled: "true"
container_name: linkding
image: sissbruecker/linkding:latest
ports:
- "9090:9090"
volumes:
- "data:/etc/linkding/data"
restart: unless-stopped
minio:
image: minio/minio:latest
container_name: minio
ports:
- "9000:9000"
- "9001:9001"
volumes:
- minio_storage:/data
environment:
MINIO_ROOT_USER: minio
MINIO_ROOT_PASSWORD: "*!3^wYe&dJ2H9D9aDC68Gh6!v7ydB^eK5G^"
MINIO_API_ROOT_ACCESS: "on"
MINIO_BROWSER_REDIRECT_URL: http://127.0.0.1:9000
command: server --console-address ":9001" /data
volumes:
data:
minio_storage:

@ -0,0 +1,24 @@
---
- name: Backup Docker Volumes.
hosts: localhost
tasks:
- name: Find Containers With Backup Label
docker_host_info:
containers: true
containers_filters:
label:
- "ie.cianhatton.backup.enabled=true"
register: filter_output
- name: Get Container Names
ansible.builtin.set_fact: container_names="{{ filter_output.containers | map(attribute="Names") | flatten }}"
- name: Backup Containers with backup label
ansible.builtin.include_role:
name: docker_s3_backup
vars:
container_backup: "{{ container_item | regex_replace('^\\/', '') }}"
with_items: "{{ container_names }}"
loop_control:
loop_var: container_item

@ -0,0 +1,12 @@
# the aws region. For minio this will always be us-east-1.
docker_backup_aws_s3_region: "us-east-1"
# the name of the bucket in minio or s3
docker_backup_aws_s3_bucket: "backups"
# put backups locally in this directory.
docker_backup_host_backup_directory: "/tmp"
# the url of the minio server.
docker_backup_aws_s3_url: "http://127.0.0.1:9000"
docker_backup_aws_s3_aws_access_key: "83meItmzcEgb1NdasSgl"
docker_backup_aws_s3_aws_secret_key: "lwdAJ60gMkcZxRZCHsC6CsdPw63Xuds6h6mksnSz"
docker_backup_aws_s3_permissions: []
docker_backup_aws_s3_encrypt: off

@ -0,0 +1,61 @@
---
- name: Determine backup timestamp.
ansible.builtin.set_fact: backup_time="{{ ansible_date_time.iso8601 }}"
- name: Install Python dependencies
ansible.builtin.pip:
name:
- docker
- boto3
- name: Stop a container
community.docker.docker_container:
name: "{{ container_backup }}"
state: stopped
- name: Get container details
docker_container_info:
name: "{{ container_backup }}"
register: result
- name: Extract only the volume mounts (not bind mounts)
ansible.builtin.set_fact: volume_mounts="{{ result.container.Mounts | selectattr("Type", "equalto", "volume") }}"
- name: Create Backup of Container Volumes
community.docker.docker_container:
name: "backup-container-{{ item.Name }}-{{ 10 | random }}"
image: ubuntu
command: "tar -czvf /backups/{{ item.Name }}-{{ backup_time }}.tar.gz /data"
cleanup: true
detach: false # block until this container exists.
state: started
volumes:
- "{{ item.Name }}:/data"
- "{{ docker_backup_host_backup_directory }}:/backups"
with_items: "{{ volume_mounts }}"
- name: Start the container
community.docker.docker_container:
name: "{{ container_backup }}"
state: started
- name: Upload backups to S3
register: upload_result
amazon.aws.aws_s3:
s3_url: "{{ docker_backup_aws_s3_url }}"
bucket: "{{ docker_backup_aws_s3_bucket }}"
object: "{{ item.Name }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
src: "{{ docker_backup_host_backup_directory }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
mode: put
encrypt: "{{ docker_backup_aws_s3_encrypt }}"
permission: "{{ docker_backup_aws_s3_permissions }}"
with_items: "{{ volume_mounts }}"
- name: Remove local files.
ansible.builtin.file:
path: "{{ docker_backup_host_backup_directory }}/{{ item.Name }}-{{ backup_time }}.tar.gz"
state: absent
with_items: "{{ volume_mounts }}"
Loading…
Cancel
Save