add support for deleting older backups

pull/12/head
Cian Hatton 3 years ago
parent e22e07c82b
commit 7e07fd3926

@ -1,6 +1,9 @@
---
# defaults file for chatton.docker_s3_backup
# the number of backups of the same volume which should be kept.
# Any excess will be deleted.
docker_backup_retain_count: 3
# the backup directory where backups are stored on the host machine.
# these will be uploaded to S3.
docker_backup_aws_s3_region: "us-east-1"

@ -0,0 +1,21 @@
#!/usr/bin/python
import os
def main():
s3_result = eval(os.getenv("S3_RESULTS"))
num_backups_to_keep = int(os.getenv("NUM_BACKUPS_TO_KEEP"))
items_to_delete = []
for res in s3_result:
s3_keys = res["s3_keys"]
# fetch all of the backups before the desired number.
# these are the ones we want to delete.
items_to_delete.extend(s3_keys[0:-num_backups_to_keep])
for item in items_to_delete:
print(item)
if __name__ == "__main__":
main()

@ -55,3 +55,36 @@
mode: put
permission: "{{ docker_backup_aws_s3_permissions }}"
with_items: "{{ volume_mounts }}"
# try and find latest volume based on the name.
- name: Fetch Volumes From S3.
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
mode: list
prefix: "{{ item.Name }}/{{ item.Name }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
register: s3_list_outputs
with_items: "{{ volume_mounts }}"
# TODO: do this in a more native way rather than a python script reading env vars.
- name: Determine which backups should be deleted.
script: scripts/determine-s3-keys-to-delete.py
environment:
S3_RESULTS: "{{ s3_list_outputs.results }}"
NUM_BACKUPS_TO_KEEP: "{{ docker_backup_retain_count}}"
register: python_output
changed_when: false
- name: Delete old backups.
amazon.aws.aws_s3:
bucket: "{{ docker_backup_aws_s3_bucket }}"
aws_access_key: "{{ docker_backup_aws_s3_aws_access_key }}"
aws_secret_key: "{{ docker_backup_aws_s3_aws_secret_key }}"
region: "{{ docker_backup_aws_s3_region }}"
s3_url: "{{ docker_backup_aws_s3_url }}"
object: "{{ item }}"
mode: delobj
with_items: "{{ python_output.stdout_lines }}"

@ -8,9 +8,11 @@ services:
- 9000:9000
volumes:
- portainer_data:/data
- some_volume:/some_dir
- /var/run/docker.sock:/var/run/docker.sock
volumes:
some_volume:
portainer_data:
external: true
name: portainer_data

Loading…
Cancel
Save