Add additional services and update portainer module to only update on changed file (#5)

pull/6/head
Cian Hatton 3 years ago committed by GitHub
parent 06c3defe62
commit c996e1061d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -113,8 +113,11 @@ class PortainerClient:
"Authorization": f"Bearer {self.token}" "Authorization": f"Bearer {self.token}"
} }
def get(self, endpoint): def get(self, endpoint, query_params=None):
url = f"{self.base_url}/api/{endpoint}" url = f"{self.base_url}/api/{endpoint}"
if query_params:
url = url + _query_params_to_string(query_params)
res = requests.get(url, headers=self.headers) res = requests.get(url, headers=self.headers)
res.raise_for_status() res.raise_for_status()
return res.json() return res.json()
@ -143,10 +146,8 @@ class PortainerClient:
return res.json() return res.json()
def _create_stack(client, module): def _create_stack(client, module, file_contents):
target_stack_name = module.params["stack_name"] target_stack_name = module.params["stack_name"]
with open(module.params["docker_compose_file_path"]) as f:
file_contents = f.read()
body = { body = {
"env": [], "env": [],
"name": target_stack_name, "name": target_stack_name,
@ -182,6 +183,9 @@ def handle_state_present(client, module):
stacks = client.get("stacks") stacks = client.get("stacks")
result["stacks"] = stacks result["stacks"] = stacks
with open(module.params["docker_compose_file_path"]) as f:
file_contents = f.read()
target_stack_name = module.params["stack_name"] target_stack_name = module.params["stack_name"]
for stack in stacks: for stack in stacks:
if stack["Name"] == target_stack_name: if stack["Name"] == target_stack_name:
@ -190,15 +194,24 @@ def handle_state_present(client, module):
break break
if not already_exists: if not already_exists:
stack = _create_stack(client, module) stack = _create_stack(client, module, file_contents)
result["changed"] = True result["changed"] = True
result["stack_id"] = stack["Id"] result["stack_id"] = stack["Id"]
module.exit_json(**result) module.exit_json(**result)
return return
# TODO: is it possible to know if we've changed the stack? stack_id = result["stack_id"]
# the stack exists, we just want to update it. current_file_contents_resp = client.get(f"stacks/{stack_id}/file", query_params={
_update_stack(client, module, result["stack_id"]) "endpointId": 2
})
result["are_equal"] = current_file_contents_resp["StackFileContent"] == file_contents
if result["are_equal"]:
module.exit_json(**result)
return
# the stack exists and we have a new config.
_update_stack(client, module, stack_id)
result["changed"] = True result["changed"] = True
module.exit_json(**result) module.exit_json(**result)

@ -11,6 +11,14 @@ services:
volumes: ["overseerr_config"] volumes: ["overseerr_config"]
- name: nextcloud - name: nextcloud
volumes: ["nextcloud_data"] volumes: ["nextcloud_data"]
- name: dashboards
volumes: []
- name: nginx-proxy-manager
volumes: ["nginx-proxy-manager_data"]
- name: plex
volumes: ["plex_config", "plex_tautulli_config"]
- name: uptime-kuma
volumes: ["uptime-kuma_data"]
docker_networks: docker_networks:
- nextcloud_net - nextcloud_net
@ -20,3 +28,13 @@ aws_s3:
aws_access_key: "nyNMQ3fRMSV0bA1xw5uV" aws_access_key: "nyNMQ3fRMSV0bA1xw5uV"
region: "us-east-1" region: "us-east-1"
bucket: "backups" bucket: "backups"
# any files to be copied which are required in the docker compose files
# as volume mounts.
config_files:
- destination_directory: /etc/config/dashy
destination_file: dashy-config.yml
source_file: dashboards/dashy-config.yml
- destination_directory: /etc/config/olivetin
destination_file: config.yml
source_file: olivetin/config.yml

@ -0,0 +1,104 @@
# https://github.com/Lissy93/dashy/blob/master/docs/showcase.md
# Details about config.
# https://github.com/Lissy93/dashy/blob/master/docs/configuring.md
# Widgets: https://github.com/Lissy93/dashy/blob/master/docs/widgets.md
---
pageInfo:
title: Home Lab
sections:
- name: Dashboards
widgets:
- type: gl-disk-space
options:
hostname: http://qnap:8083
- type: crypto-watch-list
options:
currency: GBP
sortBy: marketCap
assets:
- bitcoin
- ethereum
- monero
- cosmos
- polkadot
- dogecoin
items:
- title: Dash Dot
icon: hl-dashdot
url: "http://qnap:3010"
- title: Uptime Kuma
icon: hl-uptime-kuma
url: "http://qnap:3001"
- title: Tautulli
icon: hl-tautulli
url: "http://qnap:8182"
- title: Glances
icon: hl-glances
url: "http://qnap:8083"
- name: Media Stack
items:
- title: Plex
icon: hl-plex
url: "http://qnap:32400"
statusCheck: false
- title: Sonarr
icon: hl-sonarr
url: "http://qnap:8989"
- title: Radarr
icon: hl-radarr
url: "http://qnap:7878"
- title: Overseerr
icon: hl-overseerr
url: "http://qnap:5055"
- title: Jackett
icon: hl-jackett
url: "http://qnap:9117"
statusCheckUrl: "http://qnap:9117/health"
- title: Qbittorrent
icon: hl-qbittorrent
url: "http://qnap:15000"
- name: Tools
items:
- title: Photoprism
description: Manager photos
icon: hl-photoprism
url: "http://qnap:2342"
- title: Olivetin
description: Run pre-defined shell commands
icon: hl-olivetin
url: "http://qnap:1337"
- title: Linkding
description: Manager photos
icon: hl-linkding
url: "http://qnap:9090"
- title: Nextcloud
icon: hl-nextcloud
url: "http://qnap:8081"
- title: Mealie
icon: hl-mealie
url: "https://mealie.cianhatton.ie"
- title: Gitea
icon: hl-gitea
url: "https://git.cianhatton.ie"
- name: System Admin
items:
- title: Portainer
description: Manage docker apps using Portainer
icon: hl-portainer
url: "http://qnap:9000"
- title: Webmin
icon: hl-webmin
url: "http://qnap:10000"
- title: Adminer
description: Manage MariaDB
icon: hl-adminer
url: "http://qnap:3307"
- title: Nginx Proxy Manager
description: Manage reverse proxies
icon: hl-nginx
url: "http://qnap:8181"
appConfig:
statusCheck: true
showSplashScreen: false
theme: dracula
language: en

@ -0,0 +1,50 @@
version: '3.5'
services:
dash-dot:
container_name: dashdot
image: mauricenino/dashdot:latest
restart: unless-stopped
privileged: true
ports:
- '3010:3001'
volumes:
- /:/mnt/host:ro
dashy:
container_name: dashy
image: lissy93/dashy
volumes:
- /etc/config/dashy/dashy-config.yml:/app/public/conf.yml
ports:
- 4000:80
environment:
- NODE_ENV=production
- UID=1000
- GID=1000
# Specify restart policy
restart: unless-stopped
# Configure healthchecks
healthcheck:
test: ['CMD', 'node', '/app/services/healthcheck']
interval: 1m30s
timeout: 10s
retries: 3
start_period: 40s
extra_hosts:
- "qnap:192.168.178.42"
glances:
image: nicolargo/glances:latest-alpine
restart: always
pid: host
ports:
- "8083:61208"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
environment:
- "GLANCES_OPT=-w"

@ -0,0 +1,16 @@
version: "3"
services:
nginx-proxy-manager:
labels:
ie.cianhatton.backup.enabled: "true"
image: jlesage/nginx-proxy-manager
container_name: nginx-proxy-manager
restart: unless-stopped
ports:
- 8181:8181
- 80:8080
- 443:4443
volumes:
- "data:/config:rw"
volumes:
data:

@ -0,0 +1,19 @@
# Listen on all addresses available, port 1337
listenAddressSingleHTTPFrontend: 0.0.0.0:1337
# Choose from INFO (default), WARN and DEBUG
logLevel: "INFO"
# Actions (buttons) to show up on the WebUI:
actions:
# Docs: https://docs.olivetin.app/action-container-control.html
- title: Restart Plex
icon: plex
shell: docker restart plex
timeout: 30
- title: Restart Dashy
icon: restart
shell: docker restart dashy
timeout: 30
- title: Restart Olivetin
icon: restart
shell: docker restart olivetin
timeout: 30

@ -0,0 +1,12 @@
version: "3.8"
services:
olivetin:
container_name: olivetin
image: jamesread/olivetin
user: root
volumes:
- /etc/config/olivetin:/config
- /var/run/docker.sock:/var/run/docker.sock
ports:
- "1337:1337"
restart: unless-stopped

@ -0,0 +1,42 @@
---
version: "3"
services:
plex:
labels:
ie.cianhatton.backup.enabled: "true"
image: lscr.io/linuxserver/plex:latest
container_name: plex
ports:
- 32400:32400
network_mode: host
environment:
- PUID=1000
- PGID=1000
- VERSION=docker
volumes:
- config:/config
- /mnt/hdds/media/tv:/tv
- /mnt/hdds/media/movies:/movies
- /mnt/ssd0/transcoding:/transcoding
restart: unless-stopped
devices:
- /dev/dri:/dev/dri
tautulli:
labels:
ie.cianhatton.backup.enabled: "true"
image: lscr.io/linuxserver/tautulli:latest
container_name: tautulli
environment:
- PUID=1000
- PGID=1000
- TZ=Europe/London
volumes:
- tautulli_config:/config
ports:
- 8182:8181
restart: unless-stopped
volumes:
config:
tautulli_config:

@ -0,0 +1,16 @@
version: '3.3'
services:
uptime-kuma:
labels:
ie.cianhatton.backup.enabled: "true"
image: louislam/uptime-kuma:1
container_name: uptime-kuma
volumes:
- data:/app/data
ports:
- 3001:3001
restart: unless-stopped
volumes:
data:

@ -1,21 +1,34 @@
- name: Create a directory if it does not exist - name: Docker Compose | Create a directory if it does not exist
ansible.builtin.file: file:
path: "{{docker_compose_directory}}/{{item.name}}" path: "{{docker_compose_directory}}/{{item.name}}"
state: directory state: directory
mode: '0755' mode: '0755'
with_items: "{{services}}" with_items: "{{services}}"
- name: Copy Docker Compose Files - name: Docker Compose | Copy Docker Compose Files
copy: copy:
src: "{{item.name}}/docker-compose.yml" src: "{{item.name}}/docker-compose.yml"
dest: "{{docker_compose_directory}}/{{item.name}}/docker-compose.yml" dest: "{{docker_compose_directory}}/{{item.name}}/docker-compose.yml"
with_items: "{{services}}" with_items: "{{services}}"
- name: Config Files | Create a directory if it does not exist
file:
path: "{{item.destination_directory}}"
state: directory
mode: '0755'
with_items: "{{config_files}}"
- name: Config Files | Copy config file
copy:
src: "{{item.source_file}}"
dest: "{{item.destination_directory}}/{{item.destination_file}}"
with_items: "{{config_files}}"
- name: Install python dependencies (requests) - name: Install python dependencies (requests)
ansible.builtin.pip: ansible.builtin.pip:
name: requests name: requests
- name: Find docker volumes - name: Docker | Find docker volumes
shell: docker volume ls -f name={{item.name}} --format '{{ '{{' }} .Name {{ '}}' }}' shell: docker volume ls -f name={{item.name}} --format '{{ '{{' }} .Name {{ '}}' }}'
with_items: "{{services}}" with_items: "{{services}}"
register: find_volumes register: find_volumes
@ -23,7 +36,7 @@
- debug: msg="{{find_volumes.results | map(attribute='stdout_lines') | list | flatten }}" - debug: msg="{{find_volumes.results | map(attribute='stdout_lines') | list | flatten }}"
- name: Find volumes that need to be restored - name: Docker | Find volumes that need to be restored
script: scripts/find-volumes-to-restore.py script: scripts/find-volumes-to-restore.py
environment: environment:
EXISTING_VOLUMES: "{{ find_volumes.results | map(attribute='stdout_lines') | list | flatten }}" EXISTING_VOLUMES: "{{ find_volumes.results | map(attribute='stdout_lines') | list | flatten }}"
@ -35,7 +48,7 @@
- debug: msg="{{python_output.stdout_lines | list }}" - debug: msg="{{python_output.stdout_lines | list }}"
- name: Restore any missing backups from S3 - name: Docker Volume Backup | Restore any missing backups from S3
docker_container: docker_container:
command: "restore-volume --s3 --volume {{item}}" command: "restore-volume --s3 --volume {{item}}"
image: "ghcr.io/chatton/docker-volume-backup:v0.3.0" image: "ghcr.io/chatton/docker-volume-backup:v0.3.0"
@ -54,12 +67,12 @@
AWS_ENDPOINT: "{{aws_s3.s3_url}}" AWS_ENDPOINT: "{{aws_s3.s3_url}}"
with_items: "{{ python_output.stdout_lines }}" with_items: "{{ python_output.stdout_lines }}"
- name: Create required docker networks - name: Docker | Create required docker networks
docker_network: docker_network:
name: "{{item}}" name: "{{item}}"
with_items: "{{ docker_networks }}" with_items: "{{ docker_networks }}"
- name: Update Portainer Stack - name: Portainer | Update Stack
portainer: portainer:
username: admin username: admin
password: "{{portainer.password}}" password: "{{portainer.password}}"

@ -1,14 +1,14 @@
- name: Create a directory if it does not exist - name: Portainer | Create directory if it does not exist
ansible.builtin.file: ansible.builtin.file:
path: "{{docker_compose_directory}}" path: "{{docker_compose_directory}}"
state: directory state: directory
mode: '0755' mode: '0755'
- name: Copy Portainer Docker Compose File - name: Portainer | Copy docker compose file
copy: copy:
src: docker-compose.yml src: docker-compose.yml
dest: "{{docker_compose_directory}}/docker-compose.yml" dest: "{{docker_compose_directory}}/docker-compose.yml"
- name: Create and Portainer - name: Portainer | Docker compose up
community.docker.docker_compose: community.docker.docker_compose:
project_src: "{{docker_compose_directory}}" project_src: "{{docker_compose_directory}}"

Loading…
Cancel
Save