Compare commits

23 Commits

Author SHA1 Message Date
Nathaniel Landau
f7ba237d0d build(deps): update dependencies 2023-08-28 09:14:37 -04:00
Nathaniel Landau
e134616692 fix(recyclarr): migrate to v5 2023-08-28 08:49:03 -04:00
Nathaniel Landau
9194190591 build(deps): bump dependencies 2023-08-08 09:30:49 -04:00
Nathaniel Landau
2bb55f3d51 fix(valentina): update environment variables 2023-08-08 09:26:58 -04:00
Nathaniel Landau
7365e8b3d6 fix(ansible): update transfer method config 2023-08-08 09:25:55 -04:00
Nathaniel Landau
87c2a4e1b4 fix(consul): pull image from hashicorp/consul 2023-08-08 09:25:04 -04:00
Nathaniel Landau
edd9704258 feat: add valentain discord bot 2023-06-18 13:27:38 -04:00
Nathaniel Landau
cb4a0e9f8a build: add exclude_paths to ansible-lint 2023-05-24 14:30:01 -04:00
Nathaniel Landau
57c1a42f66 fix(nomad): bump to v1.5.6 2023-05-22 10:03:11 -04:00
Nathaniel Landau
8499f5029b fix(proxy): update traefik version and bump RAM allocation 2023-05-22 09:34:19 -04:00
Nathaniel Landau
47288456a5 build(deps): bump dependencies 2023-05-22 09:33:10 -04:00
Nathaniel Landau
0f35061a2c fix(recyclarr): move config to Nomad task template 2023-04-25 11:40:17 -04:00
Nathaniel Landau
2842e27282 build(deps): bump dependencies 2023-04-25 11:32:55 -04:00
Nathaniel Landau
d36212b7d7 style: pass ansible-lint 2023-04-25 11:32:29 -04:00
Nathaniel Landau
76f4af703e build: poetry venv in project folder 2023-03-31 10:14:14 -04:00
Nathaniel Landau
9bb7eeb439 fix: bump versions 2023-03-29 16:18:26 -04:00
Nathaniel Landau
5526024244 fix(nomad): run nomad as root user to enable docker plugin on raspberry pis
Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled. More info: https://github.com/hashicorp/nomad/pull/16063
2023-03-16 22:44:52 -04:00
Nathaniel Landau
ec52175c5c fix(nomad): authelia requires additional capabilities 2023-03-16 21:48:38 -04:00
Nathaniel Landau
be56f2a308 fix: revert to nomad v1.4.6 2023-03-13 21:29:09 -04:00
Nathaniel Landau
a757ff0cf2 build: add ignore file for anslible-lint 2023-03-13 10:25:20 -04:00
Nathaniel Landau
d6c155bef1 fix: bump versions 2023-03-12 16:52:16 -04:00
Nathaniel Landau
440d570c87 fix: bump authelia version 2023-02-12 14:41:05 -05:00
Nathaniel Landau
049267cec7 ci: add poe pb to run playbook 2023-02-12 14:40:47 -05:00
33 changed files with 2556 additions and 1932 deletions

32
.ansible-lint-ignore Normal file
View File

@@ -0,0 +1,32 @@
# This file contains ignores rule violations for ansible-lint
handlers/main.yml ignore-errors
handlers/main.yml name[casing]
main.yml name[casing]
main.yml name[missing]
tasks/backups.yml name[casing]
tasks/cluster_storage.yml name[casing]
tasks/consul.yml command-instead-of-module
tasks/consul.yml name[template]
tasks/consul.yml no-changed-when
tasks/debug.yml name[casing]
tasks/docker.yml name[casing]
tasks/docker.yml no-changed-when
tasks/interpolated_variables.yml name[casing]
tasks/logrotate.yml ignore-errors
tasks/logrotate.yml name[casing]
tasks/nomad.yml name[casing]
tasks/nomad.yml name[template]
tasks/orchestration_jobs.yml name[casing]
tasks/packages.yml ignore-errors
tasks/packages.yml name[casing]
tasks/pull_repositories.yml name[casing]
tasks/pull_repositories.yml no-changed-when
tasks/sanity.yml name[casing]
tasks/service_prometheus_nodeExporter.yml name[casing]
tasks/service_prometheus_nodeExporter.yml no-changed-when
tasks/tdarr.yml name[casing]
tasks/tdarr.yml no-changed-when
tasks/telegraf.yml name[casing]
tasks/telegraf.yml name[template]
tasks/telegraf.yml package-latest
vault.yml yaml[document-start]

View File

@@ -10,9 +10,10 @@ exclude_paths:
- galaxy-roles/
- .cz.yaml
- vault.yml
- .venv/
- ansible_collections/
skip_list:
- command-instead-of-shell
- name[template]
- ignore-errors
- meta-incorrect
@@ -21,10 +22,11 @@ skip_list:
- role-name
- unnamed-task
- var-naming
- name[casing]
- latest[git]
warn_list:
- experimental
- risky-file-permissions
- command-instead-of-module
- no-changed-when
- command-instead-of-shell

View File

@@ -1,7 +1,7 @@
---
repos:
- repo: "https://github.com/commitizen-tools/commitizen"
rev: v2.40.0
rev: 3.2.2
hooks:
- id: "commitizen"
@@ -31,7 +31,7 @@ repos:
args: [--markdown-linebreak-ext=md]
- repo: "https://github.com/adrienverge/yamllint.git"
rev: v1.29.0
rev: v1.31.0
hooks:
- id: yamllint
files: \.(yaml|yml)$

View File

@@ -9,4 +9,4 @@ display_skipped_hosts = False
vault_password_file = ./.password_file
[ssh_connection]
scp_if_ssh = True
transfer_method = smart

View File

@@ -1,14 +1,16 @@
---
# ---------------------------------- SOFTWARE VERSIONS
authelia_version: 4.37.3
consul_version: 1.14.2
influxdb_version: 1.8.10
nomad_version: 1.4.3
prometheus_verssion: 1.1.2
authelia_version: 4.37.5
consul_version: 1.16.1
influxdb_version: 1.11.1
nomad_version: 1.6.1
prometheus_verssion: 2.46.0
recyclarr_version: 5.3.1
speedtest_cli_version: 1.2.0
tdarr_installer_version: 2.00.13
telegraf_version: 1.25.0
traefik_version: "v2.9.6"
telegraf_version: 1.27.2
traefik_version: "v2.10.4"
valentina_version: 1.6.0
# ---------------------------------- SERVICE STATIC PORT MAPPINGS
authelia_port: "9091"

View File

@@ -3,80 +3,96 @@
- name: Mount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cv
cmd: automount -cv
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount"
- name: Mount and unmount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cvu
cmd: automount -cvu
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount_unmount"
##################################### TELEGRAF
- name: (Re)Start telegraf (Debian)
become: true
ansible.builtin.service:
name: telegraf
state: restarted
name: telegraf
state: restarted
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
listen: restart_telegraf
- name: (Re)Start telegraf
ansible.builtin.shell:
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
ignore_errors: true
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Darwin'
- ansible_os_family == 'Darwin'
listen: restart_telegraf
##################################### NOMAD
- name: restart nomad (Debian)
- name: Restart nomad (Debian)
become: true
ansible.builtin.systemd:
name: nomad
enabled: true
state: restarted
name: nomad
enabled: true
state: restarted
register: nomad_service
failed_when: nomad_service.rc > 0
changed_when: nomad_service.rc == 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "unload nomad agent (MacOSX)"
- name: "Unload nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
failed_when: false
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "load the nomad agent (MacOSX)"
- name: "Load the nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl load -w {{ nomad_plist_macos }}"
cmd: "launchctl load -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "ensure nomad is really running"
- name: "Ensure nomad is really running"
ansible.builtin.shell:
cmd: "sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "set -o pipefail && sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
failed_when: node_status_response.rc > 0
changed_when: false
changed_when: node_status_response.rc == 0
when: "'nostart' not in ansible_run_tags"
listen: "restart nomad"
# - name: "Ensure sure Nomad service is really running"

134
main.yml
View File

@@ -4,76 +4,76 @@
serial: 1
vars_files:
- default_variables.yml
- vault.yml
- default_variables.yml
- vault.yml
pre_tasks:
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: Populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: Populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
tasks:
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
handlers:
- ansible.builtin.import_tasks: handlers/main.yml
- ansible.builtin.import_tasks: handlers/main.yml

867
poetry.lock generated

File diff suppressed because it is too large Load Diff

2
poetry.toml Normal file
View File

@@ -0,0 +1,2 @@
[virtualenvs]
in-project = true

View File

@@ -7,18 +7,25 @@
version = "0.2.0"
[tool.poetry.dependencies]
ansible = "^7.2.0"
ansible-lint = { version = "^6.12.1", markers = "platform_system != 'Windows'" }
ansible = "^8.3.0"
ansible-lint = { version = "^6.18.0", markers = "platform_system != 'Windows'" }
commitizen = "^2.40.0"
poethepoet = "^0.18.1"
pre-commit = "^3.0.4"
pre-commit = "^3.3.3"
python = "^3.9"
yamllint = "^1.29.0"
yamllint = "^1.32.0"
[tool.poetry.group.dev.dependencies]
black = "^23.7.0"
sh = "^2.0.6"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 100
[tool.commitizen]
bump_message = "bump(release): v$current_version → v$new_version"
tag_format = "v$version"
@@ -27,11 +34,18 @@
version_files = ["pyproject.toml:version"]
[tool.poe.tasks]
pb = """
ansible-playbook
--vault-password-file .password_file
main.yml
-i inventory.yml
"""
[tool.poe.tasks.lint]
help = "Run linters"
[[tool.poe.tasks.lint.sequence]]
shell = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
cmd = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
[[tool.poe.tasks.lint.sequence]]
shell = "ansible-lint --force-color --config-file .ansible-lint.yml"
cmd = "ansible-lint --force-color --config-file .ansible-lint.yml"

150
scripts/update_dependencies.py Executable file
View File

@@ -0,0 +1,150 @@
#!/usr/bin/env python
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
from pathlib import Path
from textwrap import wrap
try:
import tomllib
except ModuleNotFoundError: # pragma: no cover
import tomli as tomllib # type: ignore [no-redef]
import sh
from rich.console import Console
console = Console()
def dryrun(msg: str) -> None:
"""Print a message if the dry run flag is set.
Args:
msg: Message to print
"""
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
def success(msg: str) -> None:
"""Print a success message without using logging.
Args:
msg: Message to print
"""
console.print(f"[green]SUCCESS | {msg}[/green]")
def warning(msg: str) -> None:
"""Print a warning message without using logging.
Args:
msg: Message to print
"""
console.print(f"[yellow]WARNING | {msg}[/yellow]")
def error(msg: str) -> None:
"""Print an error message without using logging.
Args:
msg: Message to print
"""
console.print(f"[red]ERROR | {msg}[/red]")
def notice(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"[bold]NOTICE | {msg}[/bold]")
def info(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"INFO | {msg}")
def usage(msg: str, width: int = 80) -> None:
"""Print a usage message without using logging.
Args:
msg: Message to print
width (optional): Width of the message
"""
for _n, line in enumerate(wrap(msg, width=width)):
if _n == 0:
console.print(f"[dim]USAGE | {line}")
else:
console.print(f"[dim] | {line}")
def debug(msg: str) -> None:
"""Print a debug message without using logging.
Args:
msg: Message to print
"""
console.print(f"[blue]DEBUG | {msg}[/blue]")
def dim(msg: str) -> None:
"""Print a message in dimmed color.
Args:
msg: Message to print
"""
console.print(f"[dim]{msg}[/dim]")
# Load the pyproject.toml file
pyproject = Path(__file__).parents[1] / "pyproject.toml"
if not pyproject.exists():
console.print("pyproject.toml file not found")
raise SystemExit(1)
with pyproject.open("rb") as f:
try:
data = tomllib.load(f)
except tomllib.TOMLDecodeError as e:
raise SystemExit(1) from e
# Get the latest versions of all dependencies
info("Getting latest versions of dependencies...")
packages: dict = {}
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
package, current, latest = line.split()[:3]
packages[package] = {"current_version": current, "new_version": latest}
if not packages:
success("All dependencies are up to date")
raise SystemExit(0)
dependencies = data["tool"]["poetry"]["dependencies"]
groups = data["tool"]["poetry"]["group"]
for p in dependencies:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", _fg=True)
for group in groups:
for p in groups[group]["dependencies"]:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
sh.poetry("update", _fg=True)
success("All dependencies are up to date")
raise SystemExit(0)

View File

@@ -6,42 +6,42 @@
# 1. Copies a backup and restore shellscript to /usr/local/bin
# 2. Edits the sudoers file to allow the script to be invoked with sudo privileges
- name: copy backup shellscript to server
- name: Copy backup shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: copy restore shellscript to server
- name: Copy restore shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: ensure nomad user can run sudo with the restore script
- name: Ensure nomad user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- name: ensure my user can run sudo with the restore script
- name: Ensure my user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"

View File

@@ -6,159 +6,159 @@
- name: "Mount storage on Raspberry Pis"
when: "'pis' in group_names"
block:
- name: ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: Ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: Remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
- name: Mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
# --------------------------------- Mount on Macs
# https://gist.github.com/l422y/8697518
- name: "Mount storage on Macs"
when: "'macs' in group_names"
block:
- name: create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
- name: Create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
- name: add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: Add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: Add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: Remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: Add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: Add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: Add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: Add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount

View File

@@ -4,356 +4,362 @@
- name: Set variables needed to install consul
block:
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: "Stop Consul"
block:
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
register: consul_unload
failed_when: consul_unload.rc != 0
changed_when: consul_unload.rc == 0
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Create 'consul' user and group"
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
block:
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Create Consul /opt storage and copy certificates"
block:
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Template out Consul configuration file"
block:
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Install Consul binary"
block:
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: "Validate consul config"
ansible.builtin.command:
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
register: consul_config_valid
changed_when: false
failed_when: consul_config_valid.rc != 0
when:
- inventory_hostname != 'synology'
- inventory_hostname != 'synology'
- name: "Copy system.d or launchctl service files"
block:
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: "Start Consul"
block:
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
register: consul_loaded
changed_when: consul_loaded.rc == 0
failed_when: consul_loaded.rc > 0
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: "Copy Consul service checks to synology"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
block:
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200

View File

@@ -33,5 +33,5 @@
# when:
# - ansible_facts['system_vendor'] is search("Synology")
- name: "end play"
- name: "End play"
ansible.builtin.meta: end_play

View File

@@ -4,85 +4,91 @@
- name: Check if Docker is already present
ansible.builtin.command:
cmd: docker --version
cmd: docker --version
register: docker_command_result
changed_when: docker_command_result.rc == 1
failed_when: false
- name: install docker on Debian
- name: Install docker on Debian
when: ansible_os_family == 'Debian'
block:
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
register: docker_install
failed_when: docker_install.rc > 0
changed_when: docker_install.rc == 0
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: install docker on macOS
- name: Install docker on macOS
when: "'macs' in group_names"
block:
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: Install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
when: docker_command_result.rc == 1
- name: Open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
register: docker_open_app
failed_when: docker_open_app.rc > 0
changed_when: docker_open_app.rc == 0
when: docker_command_result.rc == 1
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
- name: end play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1
- name: End play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1

View File

@@ -8,46 +8,46 @@
- name: "Set local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "Set NFS mount location (pis)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set NFS mount location location (macs)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "set consul configuration directory (synology)"
- name: "Set consul configuration directory (synology)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
- name: "set consul configuration directory (pis)"
- name: "Set consul configuration directory (pis)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "set consul configuration directory (macs)"
- name: "Set consul configuration directory (macs)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
when:
- "'macs' in group_names"
- "'macs' in group_names"

View File

@@ -4,29 +4,29 @@
#
# NOTE: This task exists due to the arillso.logrotate failing completely on macOS
- name: add service_backups.log to logrotate
- name: Add service_backups.log to logrotate
become: true
vars:
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
ansible.builtin.import_role:
name: arillso.logrotate
name: arillso.logrotate
failed_when: false
ignore_errors: true
when:
- "'macs' not in group_names"
- is_cluster_leader
- "'macs' not in group_names"
- is_cluster_leader

View File

@@ -4,243 +4,243 @@
- name: "Set variables needed to install Nomad"
block:
- name: "set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: "Set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: "Create Nomad user and group (Debian)"
when: ansible_os_family == 'Debian'
block:
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Create Nomad /opt storage"
block:
- name: "create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: "Create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: "set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Template out the configuration file"
block:
- name: "create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: Copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: "set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: Install or Update Nomad
block:
- name: "set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: "Set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: Install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: "Copy system.d or launchctrl service files"
block:
- name: ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: Create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: Create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: "start nomad, if stopped"
- name: "Start nomad, if stopped"
ansible.builtin.shell:
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
ignore_errors: true
failed_when: false

View File

@@ -10,67 +10,67 @@
- name: "Sync Nomad Jobs"
block:
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: "Sync docker compose files"
- name: Sync docker compose files
when: is_docker_compose_client
block:
- name: confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: Confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"
- name: Synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"

View File

@@ -4,64 +4,64 @@
- name: "Update and install APT packages"
when:
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
block:
- name: update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: Update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: "upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "Upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Update and install Homebrew packages"
when:
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
block:
- name: upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: Upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: Install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: Homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: Unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true
- name: Install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true

View File

@@ -5,36 +5,37 @@
- name: "Check if pull_all_repos exists"
ansible.builtin.stat:
path: "~/bin/pull_all_repos"
path: "~/bin/pull_all_repos"
check_mode: false
register: pull_script_check
- name: "Check if ~/repos exists"
ansible.builtin.stat:
path: "~/repos"
path: "~/repos"
check_mode: false
register: repos_directory_check
- name: "run pull_all_repos script"
- name: "Run pull_all_repos script"
ansible.builtin.command:
cmd: "~/bin/pull_all_repos --directory ~/repos"
cmd: "~/bin/pull_all_repos --directory ~/repos"
register: pull_script_output
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
failed_when: pull_script_output.rc > 1
changed_when: pull_script_output.rc == 0
- name: "Output from pull_all_repos"
ansible.builtin.debug:
msg: "{{ pull_script_output.stdout }}"
msg: "{{ pull_script_output.stdout }}"
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable

View File

@@ -1,12 +1,12 @@
---
# TASK DESCRIPTION:
# Always runs fist. Confirms we can actually use Ansible
- name: sanity - user mode
- name: Sanity - user mode
become: false
ansible.builtin.debug:
msg: "sanity check: user mode"
msg: "Sanity check: user mode"
- name: sanity - become mode
- name: Sanity - become mode
become: true
ansible.builtin.debug:
msg: "sanity check: become mode"
msg: "Sanity check: become mode"

View File

@@ -4,90 +4,92 @@
#
# NOTE: This is depreciated, I no longer use Prometheus and have migrated to Telegraf
- name: populate service facts
- name: Populate service facts
ansible.builtin.service_facts:
- name: stop node_exporter
- name: Stop node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
state: stopped
name: node_exporter
state: stopped
when: ansible_facts.services["node_exporter.service"] is defined
- name: Ensure group "prometheus" exists
become: true
ansible.builtin.group:
name: prometheus
state: present
name: prometheus
state: present
- name: Add the user 'prometheus' with group 'prometheus'
become: true
ansible.builtin.user:
name: prometheus
group: prometheus
groups: docker
append: true
name: prometheus
group: prometheus
groups: docker
append: true
# --------------- Install or Update Prometheus
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: false
need_prometheus_install: false
- name: Check if node_exporter is installed
ansible.builtin.stat:
path: /usr/local/bin/node_exporter
path: /usr/local/bin/node_exporter
register: prometheus_binary_file_location
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- not prometheus_binary_file_location.stat.exists
- not prometheus_binary_file_location.stat.exists
- name: Check current version of Prometheus
ansible.builtin.shell: /usr/local/bin/node_exporter --version 3>&1 1>&2 2>&3 | head -n1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_prometheus_version
failed_when: false
changed_when: false
check_mode: false
when:
- need_prometheus_install is false
- need_prometheus_install is false
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- name: install node_exporter
- name: Install node_exporter
become: true
ansible.builtin.unarchive:
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
when:
- need_prometheus_install is true
- need_prometheus_install is true
- name: create node_exporter service
- name: Create node_exporter service
become: true
ansible.builtin.template:
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
- name: start node_exporter
- name: Start node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
daemon_reload: true
enabled: true
state: started
name: node_exporter
daemon_reload: true
enabled: true
state: started
when:
- "'nostart' not in ansible_run_tags"
- "'nostart' not in ansible_run_tags"

View File

@@ -4,186 +4,187 @@
- name: "Set variables"
block:
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "Set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "Set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: "Install ffmpeg and HandbrakeCLI"
block:
- name: "ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "ensure tdarr directory exists"
- name: "Ensure tdarr directory exists"
become: true
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
- name: "Install tdarr"
block:
- name: "set_fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "Set fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
changed_when: tdarr_install.rc == 0
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: "configure tdarr"
- name: "Configure tdarr"
block:
- name: update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: Update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: Update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: Check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: mount shared storage
- name: Mount shared storage
ansible.builtin.import_tasks: cluster_storage.yml

View File

@@ -5,146 +5,146 @@
# --------------------------------- Set variables depending on system type
- name: "Configure variables"
block:
- name: "set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Install/upgrade Telegraf"
block:
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: Install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
# - name: give telegraf access to docker
# become: true
@@ -162,115 +162,115 @@
- name: "Install speedtest"
when: "'pis' in group_names"
block:
- name: "set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: "Set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Configure Telegraf"
block:
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: Template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')

View File

@@ -2,7 +2,7 @@ version: '3.9'
services:
consul:
image: consul:{{ consul_version }}
image: hashicorp/consul:{{ consul_version }}
hostname: consul
container_name: consul
network_mode: "host"

View File

@@ -206,9 +206,9 @@ plugin "raw_exec" {
plugin "docker" {
config {
allow_caps = [ "ALL" ]
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name"]
volumes {
enabled = true
}

View File

@@ -7,9 +7,16 @@ ConditionFileNotEmpty={{ nomad_configuration_dir }}/nomad.hcl
[Service]
{# {% if 'linode' in group_names %} #}
User=nomad
Group=nomad
{# User=nomad #}
{# Group=nomad #}
{# {% endif %} #}
{# NOTE: Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled.
https://github.com/hashicorp/nomad/pull/16063
#}
User=root
Group=root
ExecReload=/bin/kill -HUP $MAINPID
ExecStart=/usr/local/bin/nomad agent -config {{ nomad_configuration_dir }}
KillMode=process

View File

@@ -32,104 +32,182 @@ job "recyclarr" {
task "recyclarr" {
env {
TZ = "America/New_York"
TZ = "America/New_York"
RECYCLARR_APP_DATA = "/local"
}
// user = "${meta.PUID}:${meta.PGID}"
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:2"
image = "ghcr.io/recyclarr/recyclarr:{{ recyclarr_version }}"
hostname = "${NOMAD_TASK_NAME}"
init = true
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
} // docker config
// template {
// destination = "local/recyclarr.yml"
// env = false
// change_mode = "restart"
// perms = "644"
// data = <<-EOH
// ---
// # yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
template {
destination = "local/recyclarr.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
# yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
// # A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
// # values below as needed for your instance. You will be required to update the API Key and URL for
// # each instance you want to use.
// #
// # Many optional settings have been omitted to keep this template simple.
// #
// # For more details on the configuration, see the Configuration Reference on the wiki here:
// # https://github.com/recyclarr/recyclarr/wiki/Configuration-Reference
# A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
# values below as needed for your instance. You will be required to update the API Key and URL for
# each instance you want to use.
#
# Many optional settings have been omitted to keep this template simple. Note that there's no "one
# size fits all" configuration. Please refer to the guide to understand how to build the appropriate
# configuration based on your hardware setup and capabilities.
#
# For any lines that mention uncommenting YAML, you simply need to remove the leading hash (`#`).
# The YAML comments will already be at the appropriate indentation.
#
# For more details on the configuration, see the Configuration Reference on the wiki here:
# https://recyclarr.dev/wiki/reference/config-reference
// # Configuration specific to Sonarr
// sonarr:
// # Set the URL/API Key to your actual instance
# Configuration specific to Sonarr
sonarr:
series:
base_url: https://sonarr.{{ homelab_domain_name }}/
api_key: {{ sonarr_api_key }}
delete_old_custom_formats: true
// {% raw -%}
// - base_url: http://{{ range service "sonarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Quality definitions from the guide to sync to Sonarr. Choices: series, anime
quality_definition:
type: series
// # Quality definitions from the guide to sync to Sonarr. Choice: anime, series, hybrid
// quality_definition: series
# Release profiles from the guide to sync to Sonarr v3 (Sonarr v4 does not use this!)
# Use `recyclarr list release-profiles` for values you can put here.
# https://trash-guides.info/Sonarr/Sonarr-Release-Profile-RegEx/
release_profiles:
- trash_ids:
- EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
- 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
- 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
strict_negative_scores: false
// # Release profiles from the guide to sync to Sonarr.
// # You can optionally add tags and make negative scores strictly ignored
// release_profiles:
// # Series
// - trash_ids:
// - EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
// - 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
// - 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
// # Anime (Uncomment below if you want it)
// # - trash_ids:
// # - d428eda85af1df8904b4bbe4fc2f537c # Anime - First release profile
// # - 6cd9e10bb5bb4c63d2d7cd3279924c7b # Anime - Second release profile
- trash_ids:
- 76e060895c5b8a765c310933da0a5357 # Optionals
filter:
include:
- cec8880b847dd5d31d29167ee0112b57 # Golden rule
- 436f5a7d08fbf02ba25cb5e5dfe98e55 # Ignore Dolby Vision without HDR10 fallback.
# - f3f0f3691c6a1988d4a02963e69d11f2 # Ignore The Group -SCENE
# - 5bc23c3a055a1a5d8bbe4fb49d80e0cb # Ignore so called scene releases
- 538bad00ee6f8aced8e0db5218b8484c # Ignore Bad Dual Audio Groups
- 4861d8238f9234606df6721df6e27deb # Ignore AV1
- bc7a6383cbe88c3ee2d6396e1aacc0b3 # Prefer HDR
- 6f2aefa61342a63387f2a90489e90790 # Dislike retags: rartv, rarbg, eztv, TGx
- 19cd5ecc0a24bf493a75e80a51974cdd # Dislike retagged groups
- 6a7b462c6caee4a991a9d8aa38ce2405 # Dislike release ending: en
- 236a3626a07cacf5692c73cc947bc280 # Dislike release containing: 1-
# - fa47da3377076d82d07c4e95b3f13d07 # Prefer Dolby Vision
// # Configuration specific to Radarr.
// radarr:
// # Set the URL/API Key to your actual instance
// {% raw -%}
// - base_url: http://{{ range service "radarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Configuration specific to Radarr.
radarr:
movies:
# Set the URL/API Key to your actual instance
base_url: https://radarr.{{ homelab_domain_name }}/
api_key: {{ radarr_api_key }}
delete_old_custom_formats: true
replace_existing_custom_formats: true
// # Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
// quality_definition:
// type: movie
# Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
quality_definition:
type: movie
preferred_ratio: 0.5
// # Set to 'true' to automatically remove custom formats from Radarr when they are removed from
// # the guide or your configuration. This will NEVER delete custom formats you manually created!
// delete_old_custom_formats: false
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores: true
- name: "720p/1080p Remux"
reset_unmatched_scores: true
// custom_formats:
// # A list of custom formats to sync to Radarr. Must match the "trash_id" in the guide JSON.
// - trash_ids:
// - ed38b889b31be83fda192888e2286d83 # BR-DISK
// - 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
// - 90a6f9a284dff5103f6346090e6280c8 # LQ
// - dc98083864ea246d05a42df0d05f81cc # x265 (720/1080p)
// - b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
custom_formats:
# Use `recyclarr list custom-formats radarr` for values you can put here.
# https://trash-guides.info/Radarr/Radarr-collection-of-custom-formats/
// # Uncomment the below properties to specify one or more quality profiles that should be
// # updated with scores from the guide for each custom format. Without this, custom formats
// # are synced to Radarr but no scores are set in any quality profiles.
// # quality_profiles:
// # - name: Quality Profile 1
// # - name: Quality Profile 2
// # #score: -9999 # Optional score to assign to all CFs. Overrides scores in the guide.
// # #reset_unmatched_scores: true # Optionally set other scores to 0 if they are not listed in 'names' above.
// EOH
// }
- trash_ids:
# Movie versions
- eca37840c13c6ef2dd0262b141a5482f # 4K Remaster
- 570bc9ebecd92723d2d21500f4be314c # Remaster
- 0f12c086e289cf966fa5948eac571f44 # Hybrid
- 9d27d9d2181838f76dee150882bdc58c # Masters of Cinema
- e0c07d59beb37348e975a930d5e50319 # Criterion Collection
- 957d0f44b592285f26449575e8b1167e # Special Edition
- eecf3a857724171f968a66cb5719e152 # IMAX
- 9f6cbff8cfe4ebbc1bde14c7b7bec0de # IMAX Enhanced
# Unwanted
- b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
- ed38b889b31be83fda192888e2286d83 # BR-DISK
- 90a6f9a284dff5103f6346090e6280c8 # LQ
- bfd8eb01832d646a0a89c4deb46f8564 # Upscaled
- 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
- 923b6abef9b17f937fab56cfcf89e1f1 # DV (WEBDL)
- b6832f586342ef70d9c128d40c07b872 # Bad Dual Groups
- ae9b7c9ebde1f3bd336a8cbd1ec4c5e5 # No-RlsGroup
- 7357cf5161efbf8c4d5d0c30b4815ee2 # Obfuscated
- 5c44f52a8714fdd79bb4d98e2673be1f # Retags
- c465ccc73923871b3eb1802042331306 # Line/Mic Dubbed
# Misc
- e7718d7a3ce595f289bfee26adc178f5 # Repack/Proper
- ae43b294509409a6a13919dedd4764c4 # Repack2
# HQ Release Groups
- ed27ebfef2f323e964fb1f61391bcb35 # HD Bluray Tier 01
- c20c8647f2746a1f4c4262b0fbbeeeae # HD Bluray Tier 02
- c20f169ef63c5f40c2def54abaf4438e # WEB Tier 01
- 403816d65392c79236dcb6dd591aeda4 # WEB Tier 02
- af94e0fe497124d1f9ce732069ec8c3b # WEB Tier 03
quality_profiles:
- name: "720p/1080p"
- name: "720p/1080p Remux"
# HDR FORMATS
# ########################
- trash_ids:
- 3a3ff47579026e76d6504ebea39390de # Remux Tier 01
- 9f98181fe5a3fbeb0cc29340da2a468a # Remux Tier 02
- e61e28db95d22bedcadf030b8f156d96 # HDR
- 2a4d9069cc1fe3242ff9bdaebed239bb # HDR (undefined)
quality_profiles:
- name: "720p/1080p"
score: -100
- name: "720p/1080p Remux"
# AUDIO FORMATS
# ########################
- trash_ids:
- 6fd7b090c3f7317502ab3b63cc7f51e3 # 6.1 Surround
- e77382bcfeba57cb83744c9c5449b401 # 7.1 Surround
- f2aacebe2c932337fe352fa6e42c1611 # 9.1 Surround
quality_profiles:
- name: "720p/1080p"
score: -50
- name: "720p/1080p Remux"
score: -50
- trash_ids:
- 89dac1be53d5268a7e10a19d3c896826 # 2.0 Stereo
quality_profiles:
- name: "720p/1080p"
score: 120
- trash_ids:
- 77ff61788dfe1097194fd8743d7b4524 # 5.1 Surround
quality_profiles:
- name: "720p/1080p"
score: 80
- name: "720p/1080p Remux"
score: 80
EOH
}
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
resources {
cpu = 100 # MHz
memory = 300 # MB
} // resources
} // task

View File

@@ -221,7 +221,7 @@ job "reverse-proxy" {
resources {
cpu = 200 # MHz
memory = 110 # MB
memory = 1000 # MB
}
} // task authelia

View File

@@ -0,0 +1,65 @@
job "valentina" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "valentina" {
count = 1
restart {
attempts = 0
delay = "30s"
}
task "valentina" {
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "INFO"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
VALENTINA_OWNER_CHANNELS = "{{ valentina_owner_channels }}"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{valentina_version}}"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // job

259
vault.yml
View File

@@ -1,118 +1,143 @@
$ANSIBLE_VAULT;1.1;AES256
35366634393265303030366466303232616338633038313738633637383439356439616536666230
6566643530623337323034306366613935663334313934310a636235653531316237393231376362
33663935366131663137363465666363336630386362313333633762656461636239366234633832
3538353463356335360a323030643238323034343666376230356465396639636563316532373638
35366637366663303164376661636563346330313932343462326239626264633262303739383831
31333134613534643265643433323065303833326662346466633931373337326233303633363032
66333336373865313333626566386665653833343638376264356430383764316134333231366466
62336534666565343839393237356139393738396333393337666631303461373362343664396665
63343161613462653866616566363631346566636639626138316539353362616261666337386635
62356262363564376334336163613035643336656331653562306433363161393435343431663137
66663936623834666364303333386335353961373031383164623766323836383462363231396263
34343662336637633262333530623039376534643966653839346236363166646564613333633366
33363534616466393137366234633030663036613263383733313235353364613864316139356330
38343439346661613136316235326430326437646135636637343665663031393262653661396331
39653739666364666564633364636231323237366265323631333234306631386362666135336461
35646564643631666663336237636435626338346663633038353964303764626236373561323763
34643565656462323764623263383037663735323364396437653332376137356263633963306332
39633339366236313063643665356366346138616434316332643731666634366336623064653361
63393134643630313632396434643131646464323737343133613364333465393834656236616134
30313961346236326563616263373463616432393962663262616232356663636439643731383930
32326664306563623665633164373932356163356361663465363362303661396662386630323137
32383333656435613762393430303163383135393037363763333139633239666639303538623134
32386635663962363939373365613138316435366433303863326561613463306338396136393965
34333961383035383135333561313331336565383031356133626530306163333666333564353262
38646434643234303363383965636339323633326330663736393461383461303661353365663631
33343831356135653139633463336330646634363639326635653863343632663466336639313962
65306438613933386664336138613066326364343738633531356664343664646464396162343861
36663030643762343938633564373531663430303536643665613532313630636461646235666335
62613634656232373936363439363766316561373937386261613861396566303834376134666564
35396330636166316239336433323939363839636361643630353263663233303166313863636364
34363134363161643234643134663361373237316466626363646264643530343064666464393166
66366561356336616663393064376162643731343532663436646432366331643066396232393432
63336633313963383132333639626130623737346137646561303338623136306361656630396364
36306234643161643864313334316634396233313831613830393865353763653963656632363865
34346439356166363839343063313263396437366163343734326162346166353465313163313236
35343531333438303561393137323831303063353466666463303835653630353630393836393236
32643035636335363137303134333735343964646130306339663137646261366635353632613533
35303636373465633831353439376464386132616238613336366134383037376165396365353436
66633937656162346661326136343266313937393436353532656634366535653762633930393239
62383862356165336435616666346238646666613066323262323530356534373262633861646466
66643935363334623264373338663362623439313138666338363732386666383739636162653763
32666439316632653633363266343365393366373834323065353335613563306135383432613433
61633835326565386662313265356536613237313364366163313562393836613061616432316638
33376531663533376435383437393539663565616439666438646232663732663063343666646631
36366364353339323262666630363932616461323833306666616365343530646536326363613232
65313031333064396662363736316137656161393865383135366539636432386539623837353634
62313638666564396462666334616365323932396236633932613362633166346265613161363863
39396464663966353565393662363633366237323066306436616437363666666635343265666435
32363162643761666639336464383430366565323862353161303338333232326335303462653938
61333162306132633637653736623033373164343463333933666438326534303730613862383035
39393939323561333738653465306165643461366534313537633162313638393630393361623432
34306264316565333334303633323836343162373738636161656565313134643262343533666434
37623962626263353062333939633662316663316238636331646230313861363364326636653365
37343761626437663832346266333634666361333361313638626639633934646335613062626365
31656132643034303032623365613530306436383437633761636238336139373739313836393336
66653066633962333730643034653032626530663731633462393937326236656362356236666333
62343139646139303433393163613037623963633230366236396434643163316664616435386436
61616237366130663662643162613730303033376334333066393432333032613830316262333763
31663936663239653361633634323736306363323864666635633465376363323838326366663630
39383463343038666564653663616161336266313563633731623335373732343732383164623431
39333262346539373937386531386466373863323232653265383064643863303638326566313765
30376565643462643439316431306331346438633331616437613762323138363061336630353661
61653139333962373261323063386231346266323762306433613363643230366265366239623832
38353562393064633537373761643539313234333136333530343536393033313131393932633637
63383066623930376561656436396564353264643630636332653862613933636630333633396130
33346638663033636436626631323330336430313738313465323737386434613538346564633938
62653236643033313062336664323335656132383430313831636334326430383938653762313835
65656430656363653735343738326534616335636130646539363066393436383961316135346262
30656131353733616562613239663965383864313263653063393635623838633538303433323437
61636162363663306166343464333534316131346231303663336365303363656635363066353131
38623263366136396466383538323637626236633163663033613934303766613931313135613132
39346237656635303166363031353866663833303537666330346130353563373763623530373464
65666338313039633732626564393161663335613264306637646332643133356135613366353264
62376466613037326463623131373937303039356138336337333163303636303335333736376563
63313730623664356165653861303139313039616231326136313634623136613365313466373561
35313466313539383838373838386131653638653430613863313430626139353465626636386539
37383034326561393666383566326364623337376432633864613630353662663665336232313064
31396531313937373336383438646234343661643534316332633163653532633565613136343235
31653739633035303430626364303961626433323835653638653839396231663662636666393563
30346162326561636365393831333435333362356432646161633463313963346537643631393736
66343966313361333934616534313037636238613830623938623563393230376635316631636534
62376136386137313737646135376463343831663162616566373764643930386539306231613964
65383633313037323234616364623139623834303466306666613334346131633531643932623362
31623337636434623531376437623033306235346366376436396336346634303138613262373464
30363735633663363364613139666164383436306666363362346633346663346366393634333335
61326234303734626465616530346339303536636665626436623237383434636362393034346562
64633765363531346138376166393030666433396339333662663036313031306434626236383664
63636633316138653433366337303033636330333761626162373435633062366639396362376537
30313436353964613838323332353137383433323265343831356334393238666438323735313630
66313534646636633866313533353533643531356266643433353137653130386165353936616438
31353331383461313130383035663837646439323366623935386236663262653165313432326639
65316339356661623436386537353335343332616362323463613966383736306638396630653437
62376232353763336365613438383936646265623261306338613663343864363839313663343030
37396537626435303036613531396239353439663930363263373632333536376364336436383961
31613237393430326663366531633633613362373265646437303530656564383830366164643465
31333162373037323836396234383265333832376461383530383139353562666635386661383262
31323162363834376432313766393965373763313664383966346464313865343261333030653033
37326564663836323963663735353432653938373632356564653830616562656333383563366432
32366464316265613565613830633264613634313134373530386562313163656434356164356139
62643065613638323735366332316366383236653762373436393631363039636333346431666137
33643239323062343537353061646138346661643262303363326137356461356439663166653739
39636534653935376433633761373630656566393535373962353762646165663566613235646134
32313838383538363532643965376464626361663431393165663238373762636337366434666437
32636234613264666633366663616639386236386333623766383735383431323964343965643362
39326266366162333266343133646335653837393962633731613230653665366462393931613462
33633966626132633832653634626633393238643238393064646233663064346333653164623336
34373634376335383639346338663830653061386161306134336530376161333637333733666533
63626464393435626361323333656639616431333638383163626662323733613564613430323532
31343262616133333965633462366636333762623764326231346437666634663339393563666664
65653536333834643937326464333464353135363738663031303162396535616139663535336535
65343062646465373831303235303933343030346562633561653534313263333033313531656430
64623833653832323134333138663966313939303739376131383133366131323530353961633765
63386436373262313334323932646232616435646665323736356433376332653530326230346331
63613163343365623937336564643431653963383333363664663934633962316663306537376236
62333531353833326232613565666563613864653364363333613737663965366133383231623839
65323161613533343130316635636630633931633936666662303330326262376233376562633865
3930
65346232313532653034646336636265363834383939386431623931333038626531326333393338
6162656139623334626366363365643234663736653461320a636138373537343836616539363638
33393935343138366463306265306230393833323131363031356564396564626534303639306237
6464663361633733640a656164356635386631383931343736333863373832366138313566396262
33333232326137316262313365633466643432343264303237646435353166333562333633343939
63613162376639653638623939366462613436333662353365633263376366643833353761303536
36373163666636396237386164366462356238663964653636636635303636653233313832313936
61646433333263343864633063363336636639666365303937643237336461383861316238363130
38373539333431383863313465386532623466663264643161653366346637633563383366383838
64383132346166306562353031393237373264613961366461373439393935313065646339613234
32346535373636646562393934653839323335666233303431643465326563626132623630663761
65343365353962636335626135663135353231633561353663366261393862393164356464386266
63666235626332383030363230306635393839393365636533383137633337396265333130663339
61313236646335396261353136376361396433666531346331303434363236643066316236353664
34323337336634623735323463323739616265336532613466346264316666313038363734373437
32353930313436323238366166386238356235333533393561376430313965616537313964313065
65373334326637316237653532336166356664313236323837323236353738303630303736343365
66313135373664653837663839626138653236663164396538373964633935666163656664316162
36326531646331623134353166333135333464363038626638633663363262363737313131623464
62303532393731623135313263633165303261306532346635613731326538386235346334383532
30356363353363643665313564393530303138373232303763633865633535393534313034643337
61336635313537323633643933383732313566386439616664346135396434303639323232323962
61646635323063656434346134633931376636303662303965383062633634313464353934636333
38306134386566646662666663346132646335336338653563623530316131393935653233386232
63353932646634623262343831643439313664343631623763353565616639613539363333666237
35393935383762343564323039323865643964643138313434316436666133313333656364373134
61376464646634373263306635313962353033396237356466373536653264656330316232313434
39666166646533396562323931303363653431356438356531643534303764633535306262613039
34383266336231313033626261633961336636613764366563626163386433346638343631306632
61333761323331376462313062363934333135393561323538353561393164323230343538646638
64613362666138656565376437646434383534633431663665326133393866636632336464393365
36356261636539663033303366306135326433353437663565633236663939613564346638316133
30363962346635353138373739336537626234363531383539656166376661363531643030373939
31326534616638346362653261366232333363646137373337323539626465643135636233396537
30633733333663396262323430343336646233663432353633333832396535326161623835623962
61303334663438643238666134653732306662363234313965623236386139316233346239306439
37333334646464363461646538343335373434376439326563303331323164386535393638343565
64333739386436336565613164396532333239623130656262323135643231323930646437326336
30653234636430666236336665383733343661323162316238623062336437326363376432353835
37633266373538633339323632363136656135326161623561653639373638326462306232326339
31313765306263663364303237396664666661336162643633313866393333393961623937633739
30323361616134336535633633663461663932323230306239346239663661623931646162653839
63303666313630393165313333303937353637323633623965626538663339353964643365303137
65326138316164313838326264643332323665353939323631313535376262356361643137353064
38346239343636653132653638623066633632383433356263326636333334353234636666333632
37663235393964656364643431313034343036303034643161653730383066363764316165323563
33366131393937306663643136326432373032373232633133623961376163633133343833376330
62333135396236346432326439306333373964643832356264613434353338363533396236653930
63313931363735396661346162656430333762383262303434373766323462363462393564383439
35356461386131633665393966376535353965313834396139373463353137636636366433626432
33373662356634653666396666646361616265616539356439386566613730613234393464663262
38376232616633626239363832383736326330643337626666363432353237623330353339393364
64333366383866383934663932346630656462633237613864653835333534323839383064393161
39333634333865313465643236373739326662663330646561613266633761646564373266353736
34343632333064643134663036353832633737663166646337636532313339636130353966643733
36383964633238663533346139306566643731313739613162323436356339393363396438373237
37663762616134353736663931373765383532663463333464393334313962343539663137313065
39356531303636313761653331343231663335313630336466653130663361666330393031336262
30313263666633626262383331636532343064373637303737336533663434616630623832666564
66626538356231373831616464336639646165373163303237643334643034386265326235353535
61346436316462613963653739316330643939303366323032353030383131373132653332376431
34316633383037643239303861353831636331633239343636313436376639306533333561633735
36393436346236646562336139613563363033666533393238653332646435323464346337633939
36366165313261373332613431313762373438623334663263643438383034396533666561623562
33383433623831336634336665663732356565353631396430663231663766626137633364373937
61346336643835666361663565353161626134646234373764316362636430343862656336363630
64363963646437636637306562643036376439303134333939383732373938353865616536383431
36653962323563343839393935653330373532333338373037653531363965396339616538623764
31346365663936353561366539336533396330633334663435323432356638386564396232363462
62353231326337663861383161363231326666326637303736363139643732326138613535663833
30633762383161323830643266333365663161303261363135323634613235623333383630653630
36396335373161643762326235323665313065353764383238613733663537636462626464626436
65393034643562626632643963323633323037373735333066376266396265356562373565633966
36343838383463383834636564333937316464306532633462613866636665636663323932623132
62653031633337353739326138376333336463623133623438663165333862613439383963663830
62343235626432366337613830623234613830646535613639616634623265393636633766306235
65303461626564633731313538333562616464383933386365303062356231636436616338643061
37326232623531623637306439373365303062383631336339633837353936336534336433643164
33613038303731363863393539366463363164356233383332623062303761353431653265646465
34323735313832393461653536653962346566363164336332633335336262613738336439363330
38393666363937323161396166383239613163323563363039646662346433363932303332663661
32396363353435316333663766356533376464373465666230656638343334653930393561353533
63656232373161643863346263396336323063316262653438363532646164303438313337383663
35343263393462373137666362346566303237353332393761653434303532613734653137613638
66383230336530393163333337336562666236326365333464663830303362643433643335323634
39353734633331643230376336656562666538363030313433653266623962623561633761383233
61323761356236646536303836303264623036346438396530303433626234643561653638333138
36376637623630353362313834343439336532643231393737343565613836333862636631303530
63396166366565396561386263613766613165616464313539383661613437376230626130353930
61313536383066656533623331666466623032633865343961623430356533646166366632313037
37336162346636353733376533646162333732313464313236316363313635323361623964616536
32353333623031356638343130343139333663613062643362323737643639393438343837303135
33323963663532346439613733653434623664636565323965643138343732326662343462383932
38353365643539633465326331323065316165653637313437613162613034346530653831313834
62636134343364623061393066383361343662616564306536306236643065623837383034356138
33393736303232653736633061326564633661656132666235313031316463393938313565306533
33633032303762656437363431313563653964626262666336643863373237633438613431366266
33626262656564303466393965656638663264373430333037373839353530666562343964366233
34326230646161646534303234666363643366643832356661623330393836386139383036663666
32643965346363653762353636386331646461333131343532303333343363336534303537613563
35633462666538336262343166336564376533373435333964393236306537336665626539633932
38346134316138313435326633396163303964613763346333343635636136363330613864323065
32646663333834383433373139363230633464663236653365333739326535343539336664326336
61303636376665366535313266656561366261343839663763356335323162363363313963616338
62363833356139643364386636333662313164386138343833383034646166363933666431303433
35316333326633653166386332353865666235666631666535313635336535353339663261666634
64623730303935363964356664623834623034343832663739643661303537353139646530316233
34393837393932306538306238346162623566653333613566363130376536333334303264373034
39373965636666626636343862616530306363613766643030396332613033616630306332613762
63373736383766336161313431353038366131323430663565323039383737313137396134343361
33613462346238656435626462633332653836336631336439383963363639653434303530333137
32626634373932643137366562643839663634363334353235346436396239346664316461636435
31346365363362636330646538386564316531643339353136306161316238353632366337303062
61623961643462363233343465333334393234383631313931643161616135303666633033326438
36343432656631623239306338383463623964353538363865353336653865643062653931363234
63366132373266343238313562353333386161333162383138316133613434666533303039616262
35623862623566616332663831346232366665373237653739396130356135636363306464333166
64346438363864303439366563316466643066393934306633366135376666353430333462393563
32663132623037646165306534396531346534383037646531303431386265366639636435643034
31303465383039623035623231323461303864613665383363633734353130623661353463646464
65623132343138663138666631373731356531643665333732326561623331386339396366333334
30373635326538636366373664363837386335616265366663323563363636396130323337303932
62623736363466353835633963316437623936373836323765353464383765336236386266323537
38373339383133613266313165616436346238353633303866396138663937363661616166626262
31376564356531636233623863303161313532666466346535393465353036343734613538363962
32666237643034346537663631613133373734653238333766336362383765646461626462383539
32383765626263386331376631316363323734373334346564633463623062646331653335386461
32303436663365386237316135356365356361386566333262386133306465623963383036333063
35303538363333353730313639643333636432356565633632336430313066613031306664303339
62383734383432646437623236303334653239363664383265313832663335303137396464613638
33386663383436386434663736646536326334626161316263383232653665313737316438656165
31623839613766646263346435663138353339366132383432663638373661643638623636333962
66653861643662623436626266653266336434646532353061363336396363656439366336383532
61636236396136356635643465383762303465373361613137653833666662633338353864393161
38396161393638616639373765623966313361653731623939653437363037373166616534356265
64636362333061396330373537663839626462643430353035386134613633313835643661656330
31633766393336333039623166636331353639623766363035666231616432313966663835626435
33326162643263313637316236316530303666613933613864336230323135643331306232643639
32313266366134613865303136316436313961303032373963333736323933616436616363663162
62643163663537316230373030643664623030623161636534303961376662373863306565333135
31323437356464653038643030353936356566613139313031613535666464353966306534316131
3534633538336465313563643332653431356430623438643739