pull/1/head
Cian Hatton 3 years ago
parent 5780d07e5c
commit 617c9645dd

@ -40,6 +40,7 @@
- debug: msg="{{ container_s3_keys }}" - debug: msg="{{ container_s3_keys }}"
- set_fact: volume_names="{{ docker_volume_s3_restores | map(attribute='volume_name') }}" - set_fact: volume_names="{{ docker_volume_s3_restores | map(attribute='volume_name') }}"
# remove existing values so we can determine the s3 key automatically/
- set_fact: docker_volume_s3_restores="{{ [] }}" - set_fact: docker_volume_s3_restores="{{ [] }}"
- set_fact: - set_fact:
@ -50,63 +51,58 @@
- debug: msg="{{ docker_volume_s3_restores }}" - debug: msg="{{ docker_volume_s3_restores }}"
#- set_fact:
# volume_details: "{{ volume_details |default([]) + [ {'mount': item.0, 's3_key': item.1} ] }}"
# with_together: - name: Create directories for /tmp file
# - "{{ volume_mounts }}" file:
# - "{{ container_s3_keys }}" path: '/tmp/{{ item.s3_key | dirname }}'
state: directory
# mode: '0755'
#- name: Create directories for /tmp file with_items: '{{ docker_volume_s3_restores }}'
# file:
# path: '/tmp/{{ item.s3_key | dirname }}' - name: Download archive from S3
# state: directory amazon.aws.aws_s3:
# mode: '0755' bucket: "{{ docker_s3_volume_restore_aws_s3_bucket }}"
# with_items: '{{ docker_volume_s3_restores }}' object: "{{ item.s3_key }}"
# aws_access_key: "{{ docker_s3_volume_restore_aws_s3_aws_access_key }}"
#- name: Download archive from S3 aws_secret_key: "{{ docker_s3_volume_restore_aws_s3_aws_secret_key }}"
# amazon.aws.aws_s3: region: "{{ docker_s3_volume_restore_aws_s3_region }}"
# bucket: "{{ docker_s3_volume_restore_aws_s3_bucket }}" s3_url: "{{ docker_s3_volume_restore_aws_s3_url }}"
# object: "{{ item.s3_key }}" mode: get
# aws_access_key: "{{ docker_s3_volume_restore_aws_s3_aws_access_key }}" dest: "/tmp/{{ item.s3_key }}"
# aws_secret_key: "{{ docker_s3_volume_restore_aws_s3_aws_secret_key }}" register: get_out
# region: "{{ docker_s3_volume_restore_aws_s3_region }}" with_items: "{{ docker_volume_s3_restores }}"
# s3_url: "{{ docker_s3_volume_restore_aws_s3_url }}"
# mode: get - name: Remove contents of volumes
# dest: "/tmp/{{ item.s3_key }}" community.docker.docker_container:
# register: get_out name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}"
# with_items: "{{ docker_volume_s3_restores }}" image: ubuntu
# command: "rm -rf ./*"
#- name: Remove contents of volumes auto_remove: true
# community.docker.docker_container: detach: false # block until this container exists.
# name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}" state: started
# image: ubuntu # start inside the directory we want to wipe
# command: "rm -rf ./*" working_dir: "/data"
# auto_remove: true volumes:
# detach: false # block until this container exists. - "{{ item.volume_name }}:/data"
# state: started with_items: "{{ docker_volume_s3_restores }}"
# # start inside the directory we want to wipe
# working_dir: "/data" - name: Restore contents of volumes
# volumes: community.docker.docker_container:
# - "{{ item.volume_name }}:/data" name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}"
# with_items: "{{ docker_volume_s3_restores }}" image: ubuntu
# # extract the tar into the volume.
#- name: Restore contents of volumes command: "tar xvf /tmp/{{ item.s3_key }} -C /data --strip-components 1"
# community.docker.docker_container: auto_remove: true
# name: "restore-container-{{ item.volume_name }}-{{ 10 | random }}" detach: false # block until this container exists.
# image: ubuntu state: started
# # extract the tar into the volume. volumes:
# command: "tar xvf /tmp/{{ item.s3_key }} -C /data --strip-components 1" - "{{ item.volume_name }}:/data"
# auto_remove: true - /tmp:/tmp
# detach: false # block until this container exists. with_items: "{{ docker_volume_s3_restores }}"
# state: started
# volumes: - name: Remove uploaded files from /tmp
# - "{{ item.volume_name }}:/data" file:
# - /tmp:/tmp path: '/tmp/{{ item.s3_key }}'
# with_items: "{{ docker_volume_s3_restores }}" state: absent
# with_items: '{{ docker_volume_s3_restores }}'
#- name: Remove uploaded files from /tmp
# file:
# path: '/tmp/{{ item.s3_key }}'
# state: absent
# with_items: '{{ docker_volume_s3_restores }}'

Loading…
Cancel
Save