diff --git a/ansible/library/portainer.py b/ansible/library/portainer.py index 79e1685..f3d79dd 100755 --- a/ansible/library/portainer.py +++ b/ansible/library/portainer.py @@ -113,8 +113,11 @@ class PortainerClient: "Authorization": f"Bearer {self.token}" } - def get(self, endpoint): + def get(self, endpoint, query_params=None): url = f"{self.base_url}/api/{endpoint}" + if query_params: + url = url + _query_params_to_string(query_params) + res = requests.get(url, headers=self.headers) res.raise_for_status() return res.json() @@ -143,10 +146,8 @@ class PortainerClient: return res.json() -def _create_stack(client, module): +def _create_stack(client, module, file_contents): target_stack_name = module.params["stack_name"] - with open(module.params["docker_compose_file_path"]) as f: - file_contents = f.read() body = { "env": [], "name": target_stack_name, @@ -182,6 +183,9 @@ def handle_state_present(client, module): stacks = client.get("stacks") result["stacks"] = stacks + with open(module.params["docker_compose_file_path"]) as f: + file_contents = f.read() + target_stack_name = module.params["stack_name"] for stack in stacks: if stack["Name"] == target_stack_name: @@ -190,15 +194,24 @@ def handle_state_present(client, module): break if not already_exists: - stack = _create_stack(client, module) + stack = _create_stack(client, module, file_contents) result["changed"] = True result["stack_id"] = stack["Id"] module.exit_json(**result) return - # TODO: is it possible to know if we've changed the stack? - # the stack exists, we just want to update it. - _update_stack(client, module, result["stack_id"]) + stack_id = result["stack_id"] + current_file_contents_resp = client.get(f"stacks/{stack_id}/file", query_params={ + "endpointId": 2 + }) + + result["are_equal"] = current_file_contents_resp["StackFileContent"] == file_contents + if result["are_equal"]: + module.exit_json(**result) + return + + # the stack exists and we have a new config. + _update_stack(client, module, stack_id) result["changed"] = True module.exit_json(**result) diff --git a/ansible/roles/setup_hosted_services/defaults/main.yml b/ansible/roles/setup_hosted_services/defaults/main.yml index 64855c3..9175357 100644 --- a/ansible/roles/setup_hosted_services/defaults/main.yml +++ b/ansible/roles/setup_hosted_services/defaults/main.yml @@ -11,6 +11,14 @@ services: volumes: ["overseerr_config"] - name: nextcloud volumes: ["nextcloud_data"] + - name: dashboards + volumes: [] + - name: nginx-proxy-manager + volumes: ["nginx-proxy-manager_data"] + - name: plex + volumes: ["plex_config", "plex_tautulli_config"] + - name: uptime-kuma + volumes: ["uptime-kuma_data"] docker_networks: - nextcloud_net @@ -20,3 +28,13 @@ aws_s3: aws_access_key: "nyNMQ3fRMSV0bA1xw5uV" region: "us-east-1" bucket: "backups" + +# any files to be copied which are required in the docker compose files +# as volume mounts. +config_files: + - destination_directory: /etc/config/dashy + destination_file: dashy-config.yml + source_file: dashboards/dashy-config.yml + - destination_directory: /etc/config/olivetin + destination_file: config.yml + source_file: olivetin/config.yml diff --git a/ansible/roles/setup_hosted_services/files/dashboards/dashy-config.yml b/ansible/roles/setup_hosted_services/files/dashboards/dashy-config.yml new file mode 100644 index 0000000..bfb50dc --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/dashboards/dashy-config.yml @@ -0,0 +1,104 @@ +# https://github.com/Lissy93/dashy/blob/master/docs/showcase.md +# Details about config. +# https://github.com/Lissy93/dashy/blob/master/docs/configuring.md +# Widgets: https://github.com/Lissy93/dashy/blob/master/docs/widgets.md +--- +pageInfo: + title: Home Lab +sections: + - name: Dashboards + widgets: + - type: gl-disk-space + options: + hostname: http://qnap:8083 + - type: crypto-watch-list + options: + currency: GBP + sortBy: marketCap + assets: + - bitcoin + - ethereum + - monero + - cosmos + - polkadot + - dogecoin + items: + - title: Dash Dot + icon: hl-dashdot + url: "http://qnap:3010" + - title: Uptime Kuma + icon: hl-uptime-kuma + url: "http://qnap:3001" + - title: Tautulli + icon: hl-tautulli + url: "http://qnap:8182" + - title: Glances + icon: hl-glances + url: "http://qnap:8083" + - name: Media Stack + items: + - title: Plex + icon: hl-plex + url: "http://qnap:32400" + statusCheck: false + - title: Sonarr + icon: hl-sonarr + url: "http://qnap:8989" + - title: Radarr + icon: hl-radarr + url: "http://qnap:7878" + - title: Overseerr + icon: hl-overseerr + url: "http://qnap:5055" + - title: Jackett + icon: hl-jackett + url: "http://qnap:9117" + statusCheckUrl: "http://qnap:9117/health" + - title: Qbittorrent + icon: hl-qbittorrent + url: "http://qnap:15000" + - name: Tools + items: + - title: Photoprism + description: Manager photos + icon: hl-photoprism + url: "http://qnap:2342" + - title: Olivetin + description: Run pre-defined shell commands + icon: hl-olivetin + url: "http://qnap:1337" + - title: Linkding + description: Manager photos + icon: hl-linkding + url: "http://qnap:9090" + - title: Nextcloud + icon: hl-nextcloud + url: "http://qnap:8081" + - title: Mealie + icon: hl-mealie + url: "https://mealie.cianhatton.ie" + - title: Gitea + icon: hl-gitea + url: "https://git.cianhatton.ie" + - name: System Admin + items: + - title: Portainer + description: Manage docker apps using Portainer + icon: hl-portainer + url: "http://qnap:9000" + - title: Webmin + icon: hl-webmin + url: "http://qnap:10000" + - title: Adminer + description: Manage MariaDB + icon: hl-adminer + url: "http://qnap:3307" + - title: Nginx Proxy Manager + description: Manage reverse proxies + icon: hl-nginx + url: "http://qnap:8181" +appConfig: + statusCheck: true + showSplashScreen: false + theme: dracula + language: en diff --git a/ansible/roles/setup_hosted_services/files/dashboards/docker-compose.yml b/ansible/roles/setup_hosted_services/files/dashboards/docker-compose.yml new file mode 100644 index 0000000..f84d74e --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/dashboards/docker-compose.yml @@ -0,0 +1,50 @@ +version: '3.5' +services: + dash-dot: + container_name: dashdot + image: mauricenino/dashdot:latest + restart: unless-stopped + privileged: true + ports: + - '3010:3001' + volumes: + - /:/mnt/host:ro + + dashy: + container_name: dashy + image: lissy93/dashy + volumes: + - /etc/config/dashy/dashy-config.yml:/app/public/conf.yml + + ports: + - 4000:80 + + environment: + - NODE_ENV=production + - UID=1000 + - GID=1000 + + # Specify restart policy + restart: unless-stopped + + # Configure healthchecks + healthcheck: + test: ['CMD', 'node', '/app/services/healthcheck'] + interval: 1m30s + timeout: 10s + retries: 3 + start_period: 40s + + extra_hosts: + - "qnap:192.168.178.42" + + glances: + image: nicolargo/glances:latest-alpine + restart: always + pid: host + ports: + - "8083:61208" + volumes: + - /var/run/docker.sock:/var/run/docker.sock + environment: + - "GLANCES_OPT=-w" diff --git a/ansible/roles/setup_hosted_services/files/nginx-proxy-manager/docker-compose.yml b/ansible/roles/setup_hosted_services/files/nginx-proxy-manager/docker-compose.yml new file mode 100644 index 0000000..c27a373 --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/nginx-proxy-manager/docker-compose.yml @@ -0,0 +1,16 @@ +version: "3" +services: + nginx-proxy-manager: + labels: + ie.cianhatton.backup.enabled: "true" + image: jlesage/nginx-proxy-manager + container_name: nginx-proxy-manager + restart: unless-stopped + ports: + - 8181:8181 + - 80:8080 + - 443:4443 + volumes: + - "data:/config:rw" +volumes: + data: diff --git a/ansible/roles/setup_hosted_services/files/olivetin/config.yml b/ansible/roles/setup_hosted_services/files/olivetin/config.yml new file mode 100644 index 0000000..2e29747 --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/olivetin/config.yml @@ -0,0 +1,19 @@ +# Listen on all addresses available, port 1337 +listenAddressSingleHTTPFrontend: 0.0.0.0:1337 +# Choose from INFO (default), WARN and DEBUG +logLevel: "INFO" +# Actions (buttons) to show up on the WebUI: +actions: + # Docs: https://docs.olivetin.app/action-container-control.html + - title: Restart Plex + icon: plex + shell: docker restart plex + timeout: 30 + - title: Restart Dashy + icon: restart + shell: docker restart dashy + timeout: 30 + - title: Restart Olivetin + icon: restart + shell: docker restart olivetin + timeout: 30 diff --git a/ansible/roles/setup_hosted_services/files/olivetin/docker-compose.yml b/ansible/roles/setup_hosted_services/files/olivetin/docker-compose.yml new file mode 100644 index 0000000..ee6cd9f --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/olivetin/docker-compose.yml @@ -0,0 +1,12 @@ +version: "3.8" +services: + olivetin: + container_name: olivetin + image: jamesread/olivetin + user: root + volumes: + - /etc/config/olivetin:/config + - /var/run/docker.sock:/var/run/docker.sock + ports: + - "1337:1337" + restart: unless-stopped diff --git a/ansible/roles/setup_hosted_services/files/plex/docker-compose.yml b/ansible/roles/setup_hosted_services/files/plex/docker-compose.yml new file mode 100644 index 0000000..25b69d0 --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/plex/docker-compose.yml @@ -0,0 +1,42 @@ +--- +version: "3" +services: + plex: + labels: + ie.cianhatton.backup.enabled: "true" + image: lscr.io/linuxserver/plex:latest + container_name: plex + ports: + - 32400:32400 + network_mode: host + environment: + - PUID=1000 + - PGID=1000 + - VERSION=docker + volumes: + - config:/config + - /mnt/hdds/media/tv:/tv + - /mnt/hdds/media/movies:/movies + - /mnt/ssd0/transcoding:/transcoding + restart: unless-stopped + devices: + - /dev/dri:/dev/dri + + tautulli: + labels: + ie.cianhatton.backup.enabled: "true" + image: lscr.io/linuxserver/tautulli:latest + container_name: tautulli + environment: + - PUID=1000 + - PGID=1000 + - TZ=Europe/London + volumes: + - tautulli_config:/config + ports: + - 8182:8181 + restart: unless-stopped + +volumes: + config: + tautulli_config: \ No newline at end of file diff --git a/ansible/roles/setup_hosted_services/files/uptime-kuma/docker-compose.yml b/ansible/roles/setup_hosted_services/files/uptime-kuma/docker-compose.yml new file mode 100644 index 0000000..c614c8d --- /dev/null +++ b/ansible/roles/setup_hosted_services/files/uptime-kuma/docker-compose.yml @@ -0,0 +1,16 @@ +version: '3.3' + +services: + uptime-kuma: + labels: + ie.cianhatton.backup.enabled: "true" + image: louislam/uptime-kuma:1 + container_name: uptime-kuma + volumes: + - data:/app/data + ports: + - 3001:3001 + restart: unless-stopped + +volumes: + data: diff --git a/ansible/roles/setup_hosted_services/tasks/main.yml b/ansible/roles/setup_hosted_services/tasks/main.yml index 665be9a..0577c61 100644 --- a/ansible/roles/setup_hosted_services/tasks/main.yml +++ b/ansible/roles/setup_hosted_services/tasks/main.yml @@ -1,21 +1,34 @@ -- name: Create a directory if it does not exist - ansible.builtin.file: +- name: Docker Compose | Create a directory if it does not exist + file: path: "{{docker_compose_directory}}/{{item.name}}" state: directory mode: '0755' with_items: "{{services}}" -- name: Copy Docker Compose Files +- name: Docker Compose | Copy Docker Compose Files copy: src: "{{item.name}}/docker-compose.yml" dest: "{{docker_compose_directory}}/{{item.name}}/docker-compose.yml" with_items: "{{services}}" +- name: Config Files | Create a directory if it does not exist + file: + path: "{{item.destination_directory}}" + state: directory + mode: '0755' + with_items: "{{config_files}}" + +- name: Config Files | Copy config file + copy: + src: "{{item.source_file}}" + dest: "{{item.destination_directory}}/{{item.destination_file}}" + with_items: "{{config_files}}" + - name: Install python dependencies (requests) ansible.builtin.pip: name: requests -- name: Find docker volumes +- name: Docker | Find docker volumes shell: docker volume ls -f name={{item.name}} --format '{{ '{{' }} .Name {{ '}}' }}' with_items: "{{services}}" register: find_volumes @@ -23,7 +36,7 @@ - debug: msg="{{find_volumes.results | map(attribute='stdout_lines') | list | flatten }}" -- name: Find volumes that need to be restored +- name: Docker | Find volumes that need to be restored script: scripts/find-volumes-to-restore.py environment: EXISTING_VOLUMES: "{{ find_volumes.results | map(attribute='stdout_lines') | list | flatten }}" @@ -35,7 +48,7 @@ - debug: msg="{{python_output.stdout_lines | list }}" -- name: Restore any missing backups from S3 +- name: Docker Volume Backup | Restore any missing backups from S3 docker_container: command: "restore-volume --s3 --volume {{item}}" image: "ghcr.io/chatton/docker-volume-backup:v0.3.0" @@ -54,12 +67,12 @@ AWS_ENDPOINT: "{{aws_s3.s3_url}}" with_items: "{{ python_output.stdout_lines }}" -- name: Create required docker networks +- name: Docker | Create required docker networks docker_network: name: "{{item}}" with_items: "{{ docker_networks }}" -- name: Update Portainer Stack +- name: Portainer | Update Stack portainer: username: admin password: "{{portainer.password}}" diff --git a/ansible/roles/setup_portainer/tasks/main.yml b/ansible/roles/setup_portainer/tasks/main.yml index 89af82b..2cdf049 100644 --- a/ansible/roles/setup_portainer/tasks/main.yml +++ b/ansible/roles/setup_portainer/tasks/main.yml @@ -1,14 +1,14 @@ -- name: Create a directory if it does not exist +- name: Portainer | Create directory if it does not exist ansible.builtin.file: path: "{{docker_compose_directory}}" state: directory mode: '0755' -- name: Copy Portainer Docker Compose File +- name: Portainer | Copy docker compose file copy: src: docker-compose.yml dest: "{{docker_compose_directory}}/docker-compose.yml" -- name: Create and Portainer +- name: Portainer | Docker compose up community.docker.docker_compose: project_src: "{{docker_compose_directory}}"