Compare commits

26 Commits

Author SHA1 Message Date
Nathaniel Landau
041649cc5e build(deps): bump dependencies 2023-09-15 16:34:28 -04:00
Nathaniel Landau
ce0cb6c5f1 fix(valentina): add new env vars 2023-09-03 15:50:24 -04:00
Nathaniel Landau
98d9a5a86f build(precommit): add typos checks 2023-08-30 08:34:07 -04:00
Nathaniel Landau
f7ba237d0d build(deps): update dependencies 2023-08-28 09:14:37 -04:00
Nathaniel Landau
e134616692 fix(recyclarr): migrate to v5 2023-08-28 08:49:03 -04:00
Nathaniel Landau
9194190591 build(deps): bump dependencies 2023-08-08 09:30:49 -04:00
Nathaniel Landau
2bb55f3d51 fix(valentina): update environment variables 2023-08-08 09:26:58 -04:00
Nathaniel Landau
7365e8b3d6 fix(ansible): update transfer method config 2023-08-08 09:25:55 -04:00
Nathaniel Landau
87c2a4e1b4 fix(consul): pull image from hashicorp/consul 2023-08-08 09:25:04 -04:00
Nathaniel Landau
edd9704258 feat: add valentain discord bot 2023-06-18 13:27:38 -04:00
Nathaniel Landau
cb4a0e9f8a build: add exclude_paths to ansible-lint 2023-05-24 14:30:01 -04:00
Nathaniel Landau
57c1a42f66 fix(nomad): bump to v1.5.6 2023-05-22 10:03:11 -04:00
Nathaniel Landau
8499f5029b fix(proxy): update traefik version and bump RAM allocation 2023-05-22 09:34:19 -04:00
Nathaniel Landau
47288456a5 build(deps): bump dependencies 2023-05-22 09:33:10 -04:00
Nathaniel Landau
0f35061a2c fix(recyclarr): move config to Nomad task template 2023-04-25 11:40:17 -04:00
Nathaniel Landau
2842e27282 build(deps): bump dependencies 2023-04-25 11:32:55 -04:00
Nathaniel Landau
d36212b7d7 style: pass ansible-lint 2023-04-25 11:32:29 -04:00
Nathaniel Landau
76f4af703e build: poetry venv in project folder 2023-03-31 10:14:14 -04:00
Nathaniel Landau
9bb7eeb439 fix: bump versions 2023-03-29 16:18:26 -04:00
Nathaniel Landau
5526024244 fix(nomad): run nomad as root user to enable docker plugin on raspberry pis
Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled. More info: https://github.com/hashicorp/nomad/pull/16063
2023-03-16 22:44:52 -04:00
Nathaniel Landau
ec52175c5c fix(nomad): authelia requires additional capabilities 2023-03-16 21:48:38 -04:00
Nathaniel Landau
be56f2a308 fix: revert to nomad v1.4.6 2023-03-13 21:29:09 -04:00
Nathaniel Landau
a757ff0cf2 build: add ignore file for anslible-lint 2023-03-13 10:25:20 -04:00
Nathaniel Landau
d6c155bef1 fix: bump versions 2023-03-12 16:52:16 -04:00
Nathaniel Landau
440d570c87 fix: bump authelia version 2023-02-12 14:41:05 -05:00
Nathaniel Landau
049267cec7 ci: add poe pb to run playbook 2023-02-12 14:40:47 -05:00
40 changed files with 2631 additions and 2834 deletions

32
.ansible-lint-ignore Normal file
View File

@@ -0,0 +1,32 @@
# This file contains ignores rule violations for ansible-lint
handlers/main.yml ignore-errors
handlers/main.yml name[casing]
main.yml name[casing]
main.yml name[missing]
tasks/backups.yml name[casing]
tasks/cluster_storage.yml name[casing]
tasks/consul.yml command-instead-of-module
tasks/consul.yml name[template]
tasks/consul.yml no-changed-when
tasks/debug.yml name[casing]
tasks/docker.yml name[casing]
tasks/docker.yml no-changed-when
tasks/interpolated_variables.yml name[casing]
tasks/logrotate.yml ignore-errors
tasks/logrotate.yml name[casing]
tasks/nomad.yml name[casing]
tasks/nomad.yml name[template]
tasks/orchestration_jobs.yml name[casing]
tasks/packages.yml ignore-errors
tasks/packages.yml name[casing]
tasks/pull_repositories.yml name[casing]
tasks/pull_repositories.yml no-changed-when
tasks/sanity.yml name[casing]
tasks/service_prometheus_nodeExporter.yml name[casing]
tasks/service_prometheus_nodeExporter.yml no-changed-when
tasks/tdarr.yml name[casing]
tasks/tdarr.yml no-changed-when
tasks/telegraf.yml name[casing]
tasks/telegraf.yml name[template]
tasks/telegraf.yml package-latest
vault.yml yaml[document-start]

View File

@@ -10,9 +10,10 @@ exclude_paths:
- galaxy-roles/
- .cz.yaml
- vault.yml
- .venv/
- ansible_collections/
skip_list:
- command-instead-of-shell
- name[template]
- ignore-errors
- meta-incorrect
@@ -21,10 +22,11 @@ skip_list:
- role-name
- unnamed-task
- var-naming
- name[casing]
- latest[git]
warn_list:
- experimental
- risky-file-permissions
- command-instead-of-module
- no-changed-when
- command-instead-of-shell

View File

@@ -1,7 +1,7 @@
---
repos:
- repo: "https://github.com/commitizen-tools/commitizen"
rev: v2.40.0
rev: 3.9.0
hooks:
- id: "commitizen"
@@ -31,7 +31,7 @@ repos:
args: [--markdown-linebreak-ext=md]
- repo: "https://github.com/adrienverge/yamllint.git"
rev: v1.29.0
rev: v1.32.0
hooks:
- id: yamllint
files: \.(yaml|yml)$
@@ -43,6 +43,11 @@ repos:
)\.(yaml|yml)$
entry: yamllint --strict --config-file .yamllint.yml
- repo: "https://github.com/crate-ci/typos"
rev: v1.16.11
hooks:
- id: typos
- repo: local
hooks:
- id: vault-pre-commit
@@ -50,10 +55,14 @@ repos:
entry: scripts/ansible-vault-precommit.sh
language: system
# This calls a custom script. Remove if you don't need it.
- id: stopwords
name: check stopwords
entry: scripts/stopwords.sh
name: stopwords
entry: git-stopwords
# args: ["-v"]
language: system
pass_filenames: true
types: [text]
- id: ansible-lint
name: running ansible-lint
@@ -68,12 +77,6 @@ repos:
files: \.sh\.j2$
entry: shellcheck -x --exclude=1009,1054,1056,1072,1073,1083,2001,2148
- id: "run-shellscripts-bats-tests"
name: run bats unit tests
language: system
files: \.bats$
entry: bats -t
- id: "ansible-encryption-check"
name: Ansible Encryption Check
language: system

8
.typos.toml Normal file
View File

@@ -0,0 +1,8 @@
[default]
default.locale = "en_us"
[default.extend-words]
Hashi = "Hashi" # Hashicorpt
[files]
extend-exclude = ["galaxy-roles/"]

View File

@@ -9,4 +9,4 @@ display_skipped_hosts = False
vault_password_file = ./.password_file
[ssh_connection]
scp_if_ssh = True
transfer_method = smart

View File

@@ -1,14 +1,16 @@
---
# ---------------------------------- SOFTWARE VERSIONS
authelia_version: 4.37.3
consul_version: 1.14.2
influxdb_version: 1.8.10
nomad_version: 1.4.3
prometheus_verssion: 1.1.2
authelia_version: 4.37.5
consul_version: 1.16.1
influxdb_version: 1.11.1
nomad_version: 1.6.2
prometheus_verssion: 2.46.0
recyclarr_version: 5.3.1
speedtest_cli_version: 1.2.0
tdarr_installer_version: 2.00.13
telegraf_version: 1.25.0
traefik_version: "v2.9.6"
telegraf_version: 1.27.2
traefik_version: "v2.10.4"
valentina_version: 1.9.3
# ---------------------------------- SERVICE STATIC PORT MAPPINGS
authelia_port: "9091"

View File

@@ -3,80 +3,96 @@
- name: Mount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cv
cmd: automount -cv
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount"
- name: Mount and unmount shared storage on Mac
become: true
ansible.builtin.command:
cmd: automount -cvu
cmd: automount -cvu
register: automount_output
failed_when: automount_output.rc > 0
changed_when: automount_output.rc == 0
when:
- "'macs' in group_names"
- not ansible_check_mode
- "'macs' in group_names"
- not ansible_check_mode
listen: "mac_run_automount_unmount"
##################################### TELEGRAF
- name: (Re)Start telegraf (Debian)
become: true
ansible.builtin.service:
name: telegraf
state: restarted
name: telegraf
state: restarted
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
listen: restart_telegraf
- name: (Re)Start telegraf
ansible.builtin.shell:
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
cmd: /usr/local/bin/brew services restart telegraf
executable: /usr/local/bin/bash
ignore_errors: true
register: telegraf_service
failed_when: telegraf_service.rc > 0
changed_when: telegraf_service.rc == 0
when:
- ansible_os_family == 'Darwin'
- ansible_os_family == 'Darwin'
listen: restart_telegraf
##################################### NOMAD
- name: restart nomad (Debian)
- name: Restart nomad (Debian)
become: true
ansible.builtin.systemd:
name: nomad
enabled: true
state: restarted
name: nomad
enabled: true
state: restarted
register: nomad_service
failed_when: nomad_service.rc > 0
changed_when: nomad_service.rc == 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "unload nomad agent (MacOSX)"
- name: "Unload nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
failed_when: false
cmd: "launchctl unload -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "load the nomad agent (MacOSX)"
- name: "Load the nomad agent (MacOSX)"
ansible.builtin.command:
cmd: "launchctl load -w {{ nomad_plist_macos }}"
cmd: "launchctl load -w {{ nomad_plist_macos }}"
register: nomad_service
changed_when: nomad_service.rc == 0
failed_when: nomad_service.rc > 0
when:
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
- ansible_os_family == 'Darwin'
- "'nostart' not in ansible_run_tags"
listen: "restart nomad"
- name: "ensure nomad is really running"
- name: "Ensure nomad is really running"
ansible.builtin.shell:
cmd: "sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "set -o pipefail && sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
failed_when: node_status_response.rc > 0
changed_when: false
changed_when: node_status_response.rc == 0
when: "'nostart' not in ansible_run_tags"
listen: "restart nomad"
# - name: "Ensure sure Nomad service is really running"

134
main.yml
View File

@@ -4,76 +4,76 @@
serial: 1
vars_files:
- default_variables.yml
- vault.yml
- default_variables.yml
- vault.yml
pre_tasks:
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
- name: Run sanity checks
ansible.builtin.import_tasks: tasks/sanity.yml
tags: ["always", "sanity"]
- name: Populate service facts
ansible.builtin.service_facts:
tags: ["nomad", "consul"]
- name: Run debug tasks
ansible.builtin.import_tasks: tasks/debug.yml
tags: [never, debug]
- name: Populate device specific variables
ansible.builtin.import_tasks: tasks/interpolated_variables.yml
tags: ["always"]
- name: Ensure we have up-to-date packages
ansible.builtin.import_tasks: tasks/packages.yml
tags: ["packages", "update"]
- name: Set clean nomad_jobs_dir variable
ansible.builtin.set_fact:
clean_nomad_jobs: true
tags: ["never", "clean"]
tasks:
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
- name: Configure cluster NFS mounts
ansible.builtin.import_tasks: tasks/cluster_storage.yml
tags: ["storage"]
when:
- is_nomad_client or is_nomad_server or is_shared_storage_client
- name: Install Docker
ansible.builtin.import_tasks: tasks/docker.yml
tags: ["docker"]
when: "'nas' not in group_names"
- name: Install and Upgrade Consul
ansible.builtin.import_tasks: tasks/consul.yml
tags: ["consul"]
when: is_consul_client or is_consul_server
- name: Install and Upgrade Nomad
ansible.builtin.import_tasks: tasks/nomad.yml
tags: ["nomad"]
when: is_nomad_client or is_nomad_server
- name: Orchestration Jobs
ansible.builtin.import_tasks: tasks/orchestration_jobs.yml
tags: ["jobs", "update"]
- name: Prometheus Node Exporter
ansible.builtin.import_tasks: tasks/service_prometheus_nodeExporter.yml
tags: ["prometheus_exporter"]
when:
- is_prometheus_node
- "'pis' in group_names"
- name: Install backup scripts
ansible.builtin.import_tasks: tasks/backups.yml
tags: ["backup", "backups"]
when: is_nomad_client or is_nomad_server
- name: Install and configure Telegraf
ansible.builtin.import_tasks: tasks/telegraf.yml
tags: ["telegraf"]
when: is_telegraf_client
- name: Pull repositories
ansible.builtin.import_tasks: tasks/pull_repositories.yml
tags: ["never", "update", "repos"]
- name: Configure log rotate
ansible.builtin.import_tasks: tasks/logrotate.yml
tags: ["logrotate"]
when: is_cluster_leader
- name: Install and configure tdarr
ansible.builtin.import_tasks: tasks/tdarr.yml
tags: ["tdarr"]
when: is_tdarr_server or is_tdarr_node
handlers:
- ansible.builtin.import_tasks: handlers/main.yml
- ansible.builtin.import_tasks: handlers/main.yml

955
poetry.lock generated

File diff suppressed because it is too large Load Diff

2
poetry.toml Normal file
View File

@@ -0,0 +1,2 @@
[virtualenvs]
in-project = true

View File

@@ -7,18 +7,26 @@
version = "0.2.0"
[tool.poetry.dependencies]
ansible = "^7.2.0"
ansible-lint = { version = "^6.12.1", markers = "platform_system != 'Windows'" }
ansible = "^8.3.0"
ansible-lint = { version = "^6.18.0", markers = "platform_system != 'Windows'" }
commitizen = "^2.40.0"
poethepoet = "^0.18.1"
pre-commit = "^3.0.4"
pre-commit = "^3.3.3"
python = "^3.9"
yamllint = "^1.29.0"
typos = "^1.16.8"
yamllint = "^1.32.0"
[tool.poetry.group.dev.dependencies]
black = "^23.7.0"
sh = "^2.0.6"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 100
[tool.commitizen]
bump_message = "bump(release): v$current_version → v$new_version"
tag_format = "v$version"
@@ -27,11 +35,18 @@
version_files = ["pyproject.toml:version"]
[tool.poe.tasks]
pb = """
ansible-playbook
--vault-password-file .password_file
main.yml
-i inventory.yml
"""
[tool.poe.tasks.lint]
help = "Run linters"
[[tool.poe.tasks.lint.sequence]]
shell = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
cmd = "yamllint --strict --config-file .yamllint.yml tasks/ handlers/ main.yml inventory.yml default_variables.yml"
[[tool.poe.tasks.lint.sequence]]
shell = "ansible-lint --force-color --config-file .ansible-lint.yml"
cmd = "ansible-lint --force-color --config-file .ansible-lint.yml"

View File

@@ -1,821 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2317
_mainScript_() {
_customStopWords_() {
# DESC: Check if any specified stop words are in the commit diff. If found, the pre-commit hook will exit with a non-zero exit code.
# ARGS:
# $1 (Required): Path to file
# OUTS:
# 0: Success
# 1: Failure
# USAGE:
# _customStopWords_ "/path/to/file.sh"
# NOTE:
# Requires a plaintext stopword file located at
# `~/.git_stop_words` containing one stopword per line.
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local _gitDiffTmp
local FILE_TO_CHECK="${1}"
_gitDiffTmp="${TMP_DIR}/${RANDOM}.${RANDOM}.${RANDOM}.diff.txt"
if [ -f "${STOP_WORD_FILE}" ]; then
if [[ $(basename "${STOP_WORD_FILE}") == "$(basename "${FILE_TO_CHECK}")" ]]; then
debug "$(basename "${1}"): Don't check stop words file for stop words."
return 0
fi
debug "$(basename "${FILE_TO_CHECK}"): Checking for stop words..."
# remove blank lines from stopwords file
sed '/^$/d' "${STOP_WORD_FILE}" >"${TMP_DIR}/pattern_file.txt"
# Check for stopwords
if git diff --cached -- "${FILE_TO_CHECK}" | grep i -q "new file mode"; then
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${FILE_TO_CHECK}"; then
return 1
else
return 0
fi
else
# Add diff to a temporary file
git diff --cached -- "${FILE_TO_CHECK}" | grep '^+' >"${_gitDiffTmp}"
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${_gitDiffTmp}"; then
return 1
else
return 0
fi
fi
else
notice "Could not find git stopwords file expected at '${STOP_WORD_FILE}'. Continuing..."
return 0
fi
}
# Don;t lint binary files
if [[ ${ARGS[0]} =~ \.(jpg|jpeg|gif|png|exe|zip|gzip|tiff|tar|dmg|ttf|otf|m4a|mp3|mkv|mov|avi|eot|svg|woff2?|aac|wav|flac|pdf|doc|xls|ppt|7z|bin|dmg|dat|sql|ico|mpe?g)$ ]]; then
_safeExit_ 0
fi
if ! _customStopWords_ "${ARGS[0]}"; then
error "Stop words found in ${ARGS[0]}"
_safeExit_ 1
fi
}
# end _mainScript_
# ################################## Flags and defaults
# Required variables
LOGFILE="${HOME}/logs/$(basename "$0").log"
QUIET=false
LOGLEVEL=ERROR
VERBOSE=false
FORCE=false
DRYRUN=false
declare -a ARGS=()
# Script specific
LOGLEVEL=NONE
STOP_WORD_FILE="${HOME}/.git_stop_words"
shopt -s nocasematch
# ################################## Custom utility functions (Pasted from repository)
# ################################## Functions required for this template to work
_setColors_() {
# DESC:
# Sets colors use for alerts.
# ARGS:
# None
# OUTS:
# None
# USAGE:
# printf "%s\n" "${blue}Some text${reset}"
if tput setaf 1 >/dev/null 2>&1; then
bold=$(tput bold)
underline=$(tput smul)
reverse=$(tput rev)
reset=$(tput sgr0)
if [[ $(tput colors) -ge 256 ]] >/dev/null 2>&1; then
white=$(tput setaf 231)
blue=$(tput setaf 38)
yellow=$(tput setaf 11)
green=$(tput setaf 82)
red=$(tput setaf 9)
purple=$(tput setaf 171)
gray=$(tput setaf 250)
else
white=$(tput setaf 7)
blue=$(tput setaf 38)
yellow=$(tput setaf 3)
green=$(tput setaf 2)
red=$(tput setaf 9)
purple=$(tput setaf 13)
gray=$(tput setaf 7)
fi
else
bold="\033[4;37m"
reset="\033[0m"
underline="\033[4;37m"
# shellcheck disable=SC2034
reverse=""
white="\033[0;37m"
blue="\033[0;34m"
yellow="\033[0;33m"
green="\033[1;32m"
red="\033[0;31m"
purple="\033[0;35m"
gray="\033[0;37m"
fi
}
_alert_() {
# DESC:
# Controls all printing of messages to log files and stdout.
# ARGS:
# $1 (required) - The type of alert to print
# (success, header, notice, dryrun, debug, warning, error,
# fatal, info, input)
# $2 (required) - The message to be printed to stdout and/or a log file
# $3 (optional) - Pass '${LINENO}' to print the line number where the _alert_ was triggered
# OUTS:
# stdout: The message is printed to stdout
# log file: The message is printed to a log file
# USAGE:
# [_alertType] "[MESSAGE]" "${LINENO}"
# NOTES:
# - The colors of each alert type are set in this function
# - For specified alert types, the funcstac will be printed
local _color
local _alertType="${1}"
local _message="${2}"
local _line="${3-}" # Optional line number
[[ $# -lt 2 ]] && fatal 'Missing required argument to _alert_'
if [[ -n ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line}) $(_printFuncStack_)"
elif [[ -n ${_line} && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line})"
elif [[ -z ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}$(_printFuncStack_)"
fi
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_color="${bold}${red}"
elif [ "${_alertType}" == "info" ]; then
_color="${gray}"
elif [ "${_alertType}" == "warning" ]; then
_color="${red}"
elif [ "${_alertType}" == "success" ]; then
_color="${green}"
elif [ "${_alertType}" == "debug" ]; then
_color="${purple}"
elif [ "${_alertType}" == "header" ]; then
_color="${bold}${white}${underline}"
elif [ "${_alertType}" == "notice" ]; then
_color="${bold}"
elif [ "${_alertType}" == "input" ]; then
_color="${bold}${underline}"
elif [ "${_alertType}" = "dryrun" ]; then
_color="${blue}"
else
_color=""
fi
_writeToScreen_() {
("${QUIET}") && return 0 # Print to console when script is not 'quiet'
[[ ${VERBOSE} == false && ${_alertType} =~ ^(debug|verbose) ]] && return 0
if ! [[ -t 1 || -z ${TERM-} ]]; then # Don't use colors on non-recognized terminals
_color=""
reset=""
fi
if [[ ${_alertType} == header ]]; then
printf "${_color}%s${reset}\n" "${_message}"
else
printf "${_color}[%7s] %s${reset}\n" "${_alertType}" "${_message}"
fi
}
_writeToScreen_
_writeToLog_() {
[[ ${_alertType} == "input" ]] && return 0
[[ ${LOGLEVEL} =~ (off|OFF|Off) ]] && return 0
if [ -z "${LOGFILE-}" ]; then
LOGFILE="$(pwd)/$(basename "$0").log"
fi
[ ! -d "$(dirname "${LOGFILE}")" ] && mkdir -p "$(dirname "${LOGFILE}")"
[[ ! -f ${LOGFILE} ]] && touch "${LOGFILE}"
# Don't use colors in logs
local _cleanmessage
_cleanmessage="$(printf "%s" "${_message}" | sed -E 's/(\x1b)?\[(([0-9]{1,2})(;[0-9]{1,3}){0,2})?[mGK]//g')"
# Print message to log file
printf "%s [%7s] %s %s\n" "$(date +"%b %d %R:%S")" "${_alertType}" "[$(/bin/hostname)]" "${_cleanmessage}" >>"${LOGFILE}"
}
# Write specified log level data to logfile
case "${LOGLEVEL:-ERROR}" in
ALL | all | All)
_writeToLog_
;;
DEBUG | debug | Debug)
_writeToLog_
;;
INFO | info | Info)
if [[ ${_alertType} =~ ^(error|fatal|warning|info|notice|success) ]]; then
_writeToLog_
fi
;;
NOTICE | notice | Notice)
if [[ ${_alertType} =~ ^(error|fatal|warning|notice|success) ]]; then
_writeToLog_
fi
;;
WARN | warn | Warn)
if [[ ${_alertType} =~ ^(error|fatal|warning) ]]; then
_writeToLog_
fi
;;
ERROR | error | Error)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
FATAL | fatal | Fatal)
if [[ ${_alertType} =~ ^fatal ]]; then
_writeToLog_
fi
;;
OFF | off)
return 0
;;
*)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
esac
} # /_alert_
error() { _alert_ error "${1}" "${2-}"; }
warning() { _alert_ warning "${1}" "${2-}"; }
notice() { _alert_ notice "${1}" "${2-}"; }
info() { _alert_ info "${1}" "${2-}"; }
success() { _alert_ success "${1}" "${2-}"; }
dryrun() { _alert_ dryrun "${1}" "${2-}"; }
input() { _alert_ input "${1}" "${2-}"; }
header() { _alert_ header "${1}" "${2-}"; }
debug() { _alert_ debug "${1}" "${2-}"; }
fatal() {
_alert_ fatal "${1}" "${2-}"
_safeExit_ "1"
}
_printFuncStack_() {
# DESC:
# Prints the function stack in use. Used for debugging, and error reporting.
# ARGS:
# None
# OUTS:
# stdout: Prints [function]:[file]:[line]
# NOTE:
# Does not print functions from the alert class
local _i
declare -a _funcStackResponse=()
for ((_i = 1; _i < ${#BASH_SOURCE[@]}; _i++)); do
case "${FUNCNAME[${_i}]}" in
_alert_ | _trapCleanup_ | fatal | error | warning | notice | info | debug | dryrun | header | success)
continue
;;
*)
_funcStackResponse+=("${FUNCNAME[${_i}]}:$(basename "${BASH_SOURCE[${_i}]}"):${BASH_LINENO[_i - 1]}")
;;
esac
done
printf "( "
printf %s "${_funcStackResponse[0]}"
printf ' < %s' "${_funcStackResponse[@]:1}"
printf ' )\n'
}
_safeExit_() {
# DESC:
# Cleanup and exit from a script
# ARGS:
# $1 (optional) - Exit code (defaults to 0)
# OUTS:
# None
if [[ -d ${SCRIPT_LOCK-} ]]; then
if command rm -rf "${SCRIPT_LOCK}"; then
debug "Removing script lock"
else
warning "Script lock could not be removed. Try manually deleting ${yellow}'${SCRIPT_LOCK}'"
fi
fi
if [[ -n ${TMP_DIR-} && -d ${TMP_DIR-} ]]; then
if [[ ${1-} == 1 && -n "$(ls "${TMP_DIR}")" ]]; then
command rm -r "${TMP_DIR}"
else
command rm -r "${TMP_DIR}"
debug "Removing temp directory"
fi
fi
trap - INT TERM EXIT
exit "${1:-0}"
}
_trapCleanup_() {
# DESC:
# Log errors and cleanup from script when an error is trapped. Called by 'trap'
# ARGS:
# $1: Line number where error was trapped
# $2: Line number in function
# $3: Command executing at the time of the trap
# $4: Names of all shell functions currently in the execution call stack
# $5: Scriptname
# $6: $BASH_SOURCE
# USAGE:
# trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM ERR
# OUTS:
# Exits script with error code 1
local _line=${1-} # LINENO
local _linecallfunc=${2-}
local _command="${3-}"
local _funcstack="${4-}"
local _script="${5-}"
local _sourced="${6-}"
# Replace the cursor in-case 'tput civis' has been used
tput cnorm
if declare -f "fatal" &>/dev/null && declare -f "_printFuncStack_" &>/dev/null; then
_funcstack="'$(printf "%s" "${_funcstack}" | sed -E 's/ / < /g')'"
if [[ ${_script##*/} == "${_sourced##*/}" ]]; then
fatal "${7-} command: '${_command}' (line: ${_line}) [func: $(_printFuncStack_)]"
else
fatal "${7-} command: '${_command}' (func: ${_funcstack} called at line ${_linecallfunc} of '${_script##*/}') (line: ${_line} of '${_sourced##*/}') "
fi
else
printf "%s\n" "Fatal error trapped. Exiting..."
fi
if declare -f _safeExit_ &>/dev/null; then
_safeExit_ 1
else
exit 1
fi
}
_makeTempDir_() {
# DESC:
# Creates a temp directory to house temporary files
# ARGS:
# $1 (Optional) - First characters/word of directory name
# OUTS:
# Sets $TMP_DIR variable to the path of the temp directory
# USAGE:
# _makeTempDir_ "$(basename "$0")"
[ -d "${TMP_DIR-}" ] && return 0
if [ -n "${1-}" ]; then
TMP_DIR="${TMPDIR:-/tmp/}${1}.${RANDOM}.${RANDOM}.$$"
else
TMP_DIR="${TMPDIR:-/tmp/}$(basename "$0").${RANDOM}.${RANDOM}.${RANDOM}.$$"
fi
(umask 077 && mkdir "${TMP_DIR}") || {
fatal "Could not create temporary directory! Exiting."
}
debug "\$TMP_DIR=${TMP_DIR}"
}
# shellcheck disable=SC2120
_acquireScriptLock_() {
# DESC:
# Acquire script lock to prevent running the same script a second time before the
# first instance exits
# ARGS:
# $1 (optional) - Scope of script execution lock (system or user)
# OUTS:
# exports $SCRIPT_LOCK - Path to the directory indicating we have the script lock
# Exits script if lock cannot be acquired
# NOTE:
# If the lock was acquired it's automatically released in _safeExit_()
local _lockDir
if [[ ${1-} == 'system' ]]; then
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").lock"
else
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").${UID}.lock"
fi
if command mkdir "${_lockDir}" 2>/dev/null; then
readonly SCRIPT_LOCK="${_lockDir}"
debug "Acquired script lock: ${yellow}${SCRIPT_LOCK}${purple}"
else
if declare -f "_safeExit_" &>/dev/null; then
error "Unable to acquire script lock: ${yellow}${_lockDir}${red}"
fatal "If you trust the script isn't running, delete the lock dir"
else
printf "%s\n" "ERROR: Could not acquire script lock. If you trust the script isn't running, delete: ${_lockDir}"
exit 1
fi
fi
}
_setPATH_() {
# DESC:
# Add directories to $PATH so script can find executables
# ARGS:
# $@ - One or more paths
# OPTS:
# -x - Fail if directories are not found
# OUTS:
# 0: Success
# 1: Failure
# Adds items to $PATH
# USAGE:
# _setPATH_ "/usr/local/bin" "${HOME}/bin" "$(npm bin)"
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _failIfNotFound=false
while getopts ":xX" opt; do
case ${opt} in
x | X) _failIfNotFound=true ;;
*)
{
error "Unrecognized option '${1}' passed to _backupFile_" "${LINENO}"
return 1
}
;;
esac
done
shift $((OPTIND - 1))
local _newPath
for _newPath in "$@"; do
if [ -d "${_newPath}" ]; then
if ! printf "%s" "${PATH}" | grep -Eq "(^|:)${_newPath}($|:)"; then
if PATH="${_newPath}:${PATH}"; then
debug "Added '${_newPath}' to PATH"
else
debug "'${_newPath}' already in PATH"
fi
else
debug "_setPATH_: '${_newPath}' already exists in PATH"
fi
else
debug "_setPATH_: can not find: ${_newPath}"
if [[ ${_failIfNotFound} == true ]]; then
return 1
fi
continue
fi
done
return 0
}
_useGNUutils_() {
# DESC:
# Add GNU utilities to PATH to allow consistent use of sed/grep/tar/etc. on MacOS
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds GNU utilities to the path
# USAGE:
# # if ! _useGNUUtils_; then exit 1; fi
# NOTES:
# GNU utilities can be added to MacOS using Homebrew
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _setPATH_ \
"/usr/local/opt/gnu-tar/libexec/gnubin" \
"/usr/local/opt/coreutils/libexec/gnubin" \
"/usr/local/opt/gnu-sed/libexec/gnubin" \
"/usr/local/opt/grep/libexec/gnubin" \
"/usr/local/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-sed/libexec/gnubin" \
"/opt/homebrew/opt/grep/libexec/gnubin" \
"/opt/homebrew/opt/coreutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-tar/libexec/gnubin"; then
return 0
else
return 1
fi
}
_homebrewPath_() {
# DESC:
# Add homebrew bin dir to PATH
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds homebrew bin directory to PATH
# USAGE:
# # if ! _homebrewPath_; then exit 1; fi
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _uname=$(command -v uname); then
if "${_uname}" | tr '[:upper:]' '[:lower:]' | grep -q 'darwin'; then
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
else
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
}
_parseOptions_() {
# DESC:
# Iterates through options passed to script and sets variables. Will break -ab into -a -b
# when needed and --foo=bar into --foo bar
# ARGS:
# $@ from command line
# OUTS:
# Sets array 'ARGS' containing all arguments passed to script that were not parsed as options
# USAGE:
# _parseOptions_ "$@"
# Iterate over options
local _optstring=h
declare -a _options
local _c
local i
while (($#)); do
case $1 in
# If option is of type -ab
-[!-]?*)
# Loop over each character starting with the second
for ((i = 1; i < ${#1}; i++)); do
_c=${1:i:1}
_options+=("-${_c}") # Add current char to options
# If option takes a required argument, and it's not the last char make
# the rest of the string its argument
if [[ ${_optstring} == *"${_c}:"* && -n ${1:i+1} ]]; then
_options+=("${1:i+1}")
break
fi
done
;;
# If option is of type --foo=bar
--?*=*) _options+=("${1%%=*}" "${1#*=}") ;;
# add --endopts for --
--) _options+=(--endopts) ;;
# Otherwise, nothing special
*) _options+=("$1") ;;
esac
shift
done
set -- "${_options[@]-}"
unset _options
# Read the options and set stuff
# shellcheck disable=SC2034
while [[ ${1-} == -?* ]]; do
case $1 in
# Custom options
# Common options
-h | --help)
_usage_
_safeExit_
;;
--loglevel)
shift
LOGLEVEL=${1}
;;
--logfile)
shift
LOGFILE="${1}"
;;
-n | --dryrun) DRYRUN=true ;;
-v | --verbose) VERBOSE=true ;;
-q | --quiet) QUIET=true ;;
--force) FORCE=true ;;
--endopts)
shift
break
;;
*)
if declare -f _safeExit_ &>/dev/null; then
fatal "invalid option: $1"
else
printf "%s\n" "ERROR: Invalid option: $1"
exit 1
fi
;;
esac
shift
done
if [[ -z ${*} || ${*} == null ]]; then
ARGS=()
else
ARGS+=("$@") # Store the remaining user input as arguments.
fi
}
_columns_() {
# DESC:
# Prints a two column output from a key/value pair.
# Optionally pass a number of 2 space tabs to indent the output.
# ARGS:
# $1 (required): Key name (Left column text)
# $2 (required): Long value (Right column text. Wraps around if too long)
# $3 (optional): Number of 2 character tabs to indent the command (default 1)
# OPTS:
# -b Bold the left column
# -u Underline the left column
# -r Reverse background and foreground colors
# OUTS:
# stdout: Prints the output in columns
# NOTE:
# Long text or ANSI colors in the first column may create display issues
# USAGE:
# _columns_ "Key" "Long value text" [tab level]
[[ $# -lt 2 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _style=""
while getopts ":bBuUrR" opt; do
case ${opt} in
b | B) _style="${_style}${bold}" ;;
u | U) _style="${_style}${underline}" ;;
r | R) _style="${_style}${reverse}" ;;
*) fatal "Unrecognized option '${1}' passed to ${FUNCNAME[0]}. Exiting." ;;
esac
done
shift $((OPTIND - 1))
local _key="${1}"
local _value="${2}"
local _tabLevel="${3-}"
local _tabSize=2
local _line
local _rightIndent
local _leftIndent
if [[ -z ${3-} ]]; then
_tabLevel=0
fi
_leftIndent="$((_tabLevel * _tabSize))"
local _leftColumnWidth="$((30 + _leftIndent))"
if [ "$(tput cols)" -gt 180 ]; then
_rightIndent=110
elif [ "$(tput cols)" -gt 160 ]; then
_rightIndent=90
elif [ "$(tput cols)" -gt 130 ]; then
_rightIndent=60
elif [ "$(tput cols)" -gt 120 ]; then
_rightIndent=50
elif [ "$(tput cols)" -gt 110 ]; then
_rightIndent=40
elif [ "$(tput cols)" -gt 100 ]; then
_rightIndent=30
elif [ "$(tput cols)" -gt 90 ]; then
_rightIndent=20
elif [ "$(tput cols)" -gt 80 ]; then
_rightIndent=10
else
_rightIndent=0
fi
local _rightWrapLength=$(($(tput cols) - _leftColumnWidth - _leftIndent - _rightIndent))
local _first_line=0
while read -r _line; do
if [[ ${_first_line} -eq 0 ]]; then
_first_line=1
else
_key=" "
fi
printf "%-${_leftIndent}s${_style}%-${_leftColumnWidth}b${reset} %b\n" "" "${_key}${reset}" "${_line}"
done <<<"$(fold -w${_rightWrapLength} -s <<<"${_value}")"
}
_usage_() {
cat <<USAGE_TEXT
${bold}$(basename "$0") [OPTION]... [FILE]...${reset}
Custom pre-commit hook script. This script is intended to be used as part of the pre-commit pipeline managed within .pre-commit-config.yaml.
${bold}${underline}Options:${reset}
$(_columns_ -b -- '-h, --help' "Display this help and exit" 2)
$(_columns_ -b -- "--loglevel [LEVEL]" "One of: FATAL, ERROR (default), WARN, INFO, NOTICE, DEBUG, ALL, OFF" 2)
$(_columns_ -b -- "--logfile [FILE]" "Full PATH to logfile. (Default is '\${HOME}/logs/$(basename "$0").log')" 2)
$(_columns_ -b -- "-n, --dryrun" "Non-destructive. Makes no permanent changes." 2)
$(_columns_ -b -- "-q, --quiet" "Quiet (no output)" 2)
$(_columns_ -b -- "-v, --verbose" "Output more information. (Items echoed to 'verbose')" 2)
$(_columns_ -b -- "--force" "Skip all user interaction. Implied 'Yes' to all actions." 2)
${bold}${underline}Example Usage:${reset}
${gray}# Run the script and specify log level and log file.${reset}
$(basename "$0") -vn --logfile "/path/to/file.log" --loglevel 'WARN'
USAGE_TEXT
}
# ################################## INITIALIZE AND RUN THE SCRIPT
# (Comment or uncomment the lines below to customize script behavior)
trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM
# Trap errors in subshells and functions
set -o errtrace
# Exit on error. Append '||true' if you expect an error
set -o errexit
# Use last non-zero exit code in a pipeline
set -o pipefail
# Confirm we have BASH greater than v4
[ "${BASH_VERSINFO:-0}" -ge 4 ] || {
printf "%s\n" "ERROR: BASH_VERSINFO is '${BASH_VERSINFO:-0}'. This script requires BASH v4 or greater."
exit 1
}
# Make `for f in *.txt` work when `*.txt` matches zero files
shopt -s nullglob globstar
# Set IFS to preferred implementation
IFS=$' \n\t'
# Run in debug mode
# set -o xtrace
# Initialize color constants
_setColors_
# Disallow expansion of unset variables
set -o nounset
# Force arguments when invoking the script
# [[ $# -eq 0 ]] && _parseOptions_ "-h"
# Parse arguments passed to script
_parseOptions_ "$@"
# Create a temp directory '$TMP_DIR'
_makeTempDir_ "$(basename "$0")"
# Acquire script lock
# _acquireScriptLock_
# Add Homebrew bin directory to PATH (MacOS)
# _homebrewPath_
# Source GNU utilities from Homebrew (MacOS)
# _useGNUutils_
# Run the main logic script
_mainScript_
# Exit cleanly
_safeExit_

150
scripts/update_dependencies.py Executable file
View File

@@ -0,0 +1,150 @@
#!/usr/bin/env python
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
from pathlib import Path
from textwrap import wrap
try:
import tomllib
except ModuleNotFoundError: # pragma: no cover
import tomli as tomllib # type: ignore [no-redef]
import sh
from rich.console import Console
console = Console()
def dryrun(msg: str) -> None:
"""Print a message if the dry run flag is set.
Args:
msg: Message to print
"""
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
def success(msg: str) -> None:
"""Print a success message without using logging.
Args:
msg: Message to print
"""
console.print(f"[green]SUCCESS | {msg}[/green]")
def warning(msg: str) -> None:
"""Print a warning message without using logging.
Args:
msg: Message to print
"""
console.print(f"[yellow]WARNING | {msg}[/yellow]")
def error(msg: str) -> None:
"""Print an error message without using logging.
Args:
msg: Message to print
"""
console.print(f"[red]ERROR | {msg}[/red]")
def notice(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"[bold]NOTICE | {msg}[/bold]")
def info(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"INFO | {msg}")
def usage(msg: str, width: int = 80) -> None:
"""Print a usage message without using logging.
Args:
msg: Message to print
width (optional): Width of the message
"""
for _n, line in enumerate(wrap(msg, width=width)):
if _n == 0:
console.print(f"[dim]USAGE | {line}")
else:
console.print(f"[dim] | {line}")
def debug(msg: str) -> None:
"""Print a debug message without using logging.
Args:
msg: Message to print
"""
console.print(f"[blue]DEBUG | {msg}[/blue]")
def dim(msg: str) -> None:
"""Print a message in dimmed color.
Args:
msg: Message to print
"""
console.print(f"[dim]{msg}[/dim]")
# Load the pyproject.toml file
pyproject = Path(__file__).parents[1] / "pyproject.toml"
if not pyproject.exists():
console.print("pyproject.toml file not found")
raise SystemExit(1)
with pyproject.open("rb") as f:
try:
data = tomllib.load(f)
except tomllib.TOMLDecodeError as e:
raise SystemExit(1) from e
# Get the latest versions of all dependencies
info("Getting latest versions of dependencies...")
packages: dict = {}
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
package, current, latest = line.split()[:3]
packages[package] = {"current_version": current, "new_version": latest}
if not packages:
success("All dependencies are up to date")
raise SystemExit(0)
dependencies = data["tool"]["poetry"]["dependencies"]
groups = data["tool"]["poetry"]["group"]
for p in dependencies:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", _fg=True)
for group in groups:
for p in groups[group]["dependencies"]:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
sh.poetry("update", _fg=True)
success("All dependencies are up to date")
raise SystemExit(0)

View File

@@ -6,42 +6,42 @@
# 1. Copies a backup and restore shellscript to /usr/local/bin
# 2. Edits the sudoers file to allow the script to be invoked with sudo privileges
- name: copy backup shellscript to server
- name: Copy backup shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
src: scripts/service_backups.sh.j2
dest: /usr/local/bin/service_backups
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: copy restore shellscript to server
- name: Copy restore shellscript to server
become: true
ansible.builtin.template:
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
src: scripts/service_restore.sh.j2
dest: /usr/local/bin/service_restore
mode: 0755
when:
- is_nomad_client or is_nomad_server
- is_nomad_client or is_nomad_server
- name: ensure nomad user can run sudo with the restore script
- name: Ensure nomad user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- name: ensure my user can run sudo with the restore script
- name: Ensure my user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- is_nomad_client or is_nomad_server
- "'pis' in group_names"

View File

@@ -6,159 +6,159 @@
- name: "Mount storage on Raspberry Pis"
when: "'pis' in group_names"
block:
- name: ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: Ensure local mount points exist
become: true
ansible.builtin.file:
path: "{{ item.local }}"
state: directory
mode: 0777
# owner: "{{ ansible_user_uid }}"
# group: "{{ ansible_user_gid }}"
loop: "{{ rpi_nfs_mounts_list }}"
- name: remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: Remove old nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: absent
loop: "{{ rpi_nfs_mounts_remove }}"
- name: mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
- name: Mount all nfs drives
become: true
ansible.posix.mount:
path: "{{ item.local }}"
src: "{{ item.src }}"
fstype: nfs
opts: defaults,hard,intr,timeo=14
state: mounted
boot: true
loop: "{{ rpi_nfs_mounts_list }}"
# --------------------------------- Mount on Macs
# https://gist.github.com/l422y/8697518
- name: "Mount storage on Macs"
when: "'macs' in group_names"
block:
- name: create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
- name: Create mount_point
become: true
ansible.builtin.file:
path: "{{ mac_storage_mount_point }}"
state: directory
mode: 0755
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
# I ran into problems getting this to run successfully. If errors occur, add the line manually using:
# $ sudo nano /private/etc/auto_master
- name: add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: Add NFS shared drives to macs
when: mac_autofs_type == 'nfs'
block:
- name: Add auto_nfs to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_nfs"
line: "/- auto_nfs -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_nfs_mounts_list if mac_nfs_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: Remove old mounts from /etc/auto_nfs
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_nfs
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=nfs,bg,intr,noowners,rw,vers=4 nfs://{{ item.src }}"
state: absent
unsafe_writes: true
mode: 0644
notify: mac_run_automount_unmount
loop: "{{ mac_nfs_mounts_remove if mac_nfs_mounts_remove is iterable else [] }}"
- name: add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: Add AFP shared drives to macs
when: mac_autofs_type == 'afp'
block:
- name: Add auto_afp to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_afp"
line: "/- auto_afp -nobrowse,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_afp
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_afp
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: Add SMB shared drives to macs
when: mac_autofs_type == 'smb'
block:
- name: Add auto_smb to "/private/etc/auto_master"
become: true
ansible.builtin.lineinfile:
path: /private/etc/auto_master
regexp: "auto_smb"
line: "/- auto_smb -noowners,nosuid"
unsafe_writes: true
- name: add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: Add mounts to /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=smbfs,soft,noowners,nosuid,rw ://{{ smb_username }}:{{ smb_password }}@{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_list if mac_afp_or_smb_mounts_list is iterable else [] }}"
notify: mac_run_automount
- name: remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount
- name: Remove mounts from /etc/auto_smb
become: true
ansible.builtin.lineinfile:
create: true
path: /private/etc/auto_smb
regexp: "{{ item.src }}"
line: "{{ item.local }} -fstype=afp,rw afp://{{ item.src }}"
state: present
unsafe_writes: true
mode: 0644
loop: "{{ mac_afp_or_smb_mounts_remove if mac_afp_or_smb_mounts_remove is iterable else [] }}"
notify: mac_run_automount_unmount

View File

@@ -4,356 +4,362 @@
- name: Set variables needed to install consul
block:
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "{{ rpi_usb_drive_mount_point }}/opt/consul"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root disk for consul /opt (Debian)"
ansible.builtin.set_fact:
consul_opt_dir: "/opt/consul"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
consul_opt_dir: "/Users/{{ ansible_user }}/Library/consul"
when:
- mac_intel or mac_arm
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Use ~/volume1/docker/consul/data for /opt files (synology)"
ansible.builtin.set_fact:
consul_opt_dir: "/volume1/docker/consul/data"
when:
- inventory_hostname == 'synology'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (armv7l)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (aarch64)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_amd64.zip"
when: mac_intel
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: "Set variable: Set Consul download Binary (MacOSX)"
ansible.builtin.set_fact:
consul_download_uri: "https://releases.hashicorp.com/consul/{{ consul_version }}/consul_{{ consul_version }}_darwin_arm64.zip"
when: mac_arm
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: Assert that we can install Consul
ansible.builtin.assert:
that:
- consul_download_uri is defined
- consul_opt_dir is defined
fail_msg: "Unable to install consul on this host"
when: inventory_hostname != 'synology'
- name: "Stop Consul"
block:
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Stop consul systemd service (Debian)"
become: true
ansible.builtin.systemd:
name: consul
state: stopped
when:
- ansible_os_family == 'Debian'
- ansible_facts.services["consul.service"] is defined
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Check if plist file exists (MacOSX)"
ansible.builtin.stat:
path: "{{ consul_plist_macos }}"
register: consul_file
when:
- ansible_os_family == 'Darwin'
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Unload consul agent (MacOSX)"
become: true
ansible.builtin.command:
cmd: "launchctl unload {{ consul_plist_macos }}"
register: consul_unload
failed_when: consul_unload.rc != 0
changed_when: consul_unload.rc == 0
when:
- ansible_os_family == 'Darwin'
- consul_file.stat.exists
- name: "Create 'consul' user and group"
when:
- ansible_os_family == 'Debian'
- ansible_os_family == 'Debian'
block:
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Ensure group 'consul' exists (Debian)"
become: true
ansible.builtin.group:
name: consul
state: present
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Add the user 'consul' with group 'consul' (Debian)"
become: true
ansible.builtin.user:
name: consul
group: consul
- name: "Create Consul /opt storage and copy certificates"
block:
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: "Create {{ consul_opt_dir }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ consul_opt_dir }}"
- "{{ consul_opt_dir }}/logs"
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to servers
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
when:
- is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: Copy certs to clients
become: true
ansible.builtin.copy:
src: certs/consul/consul-agent-ca.pem
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
mode: 0755
when:
- is_consul_client
- not is_consul_server
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (debian)"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Template out Consul configuration file"
block:
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ interpolated_consul_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
state: directory
mode: 0755
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: Copy consul base config file
become: true
ansible.builtin.template:
src: consul.hcl.j2
dest: "{{ interpolated_consul_configuration_dir }}/consul.hcl"
mode: 0644
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to consul:consul (Debian)"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: consul
group: consul
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- inventory_hostname == 'synology'
- name: "Install Consul binary"
block:
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: "Set fact: need install consul?"
ansible.builtin.set_fact:
need_consul_install: false
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: Check if Consul is installed
ansible.builtin.stat:
path: /usr/local/bin/consul
register: consul_binary_file_location
when:
- consul_download_uri is defined
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not consul_binary_file_location.stat.exists
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: Check current version of Consul
ansible.builtin.shell:
cmd: /usr/local/bin/consul --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
changed_when: false
register: installed_consul_version
check_mode: false
when:
- consul_download_uri is defined
- not need_consul_install
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: "Set fact: need consul install?"
ansible.builtin.set_fact:
need_consul_install: true
when:
- consul_download_uri is defined
- not need_consul_install
- installed_consul_version.stdout is version(consul_version, '<')
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: Install Consul
become: true
ansible.builtin.unarchive:
src: "{{ consul_download_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- consul_download_uri is defined
- need_consul_install
- name: "Validate consul config"
ansible.builtin.command:
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
cmd: "/usr/local/bin/consul validate {{ interpolated_consul_configuration_dir }}"
register: consul_config_valid
changed_when: false
failed_when: consul_config_valid.rc != 0
when:
- inventory_hostname != 'synology'
- inventory_hostname != 'synology'
- name: "Copy system.d or launchctl service files"
block:
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ consul_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul launchd service (MacOSX)
ansible.builtin.template:
src: consul.launchd.j2
dest: "{{ consul_plist_macos }}"
mode: 0644
when:
- ansible_os_family == 'Darwin'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: Create Consul service (Debian)
become: true
ansible.builtin.template:
src: consul.service.j2
dest: /etc/systemd/system/consul.service
mode: 0644
when:
- ansible_os_family == 'Debian'
- name: "Start Consul"
block:
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Load the Consul agent (MacOSX)
ansible.builtin.command:
cmd: "launchctl load -w {{ consul_plist_macos }}"
register: consul_loaded
changed_when: consul_loaded.rc == 0
failed_when: consul_loaded.rc > 0
when:
- mac_intel or mac_arm
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Start Consul (Debian)
become: true
ansible.builtin.systemd:
name: consul
enabled: true
state: started
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running
changed_when: false
failed_when: is_consul_really_running.rc != 0
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: "Copy Consul service checks to synology"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
block:
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Copy config file
ansible.builtin.template:
src: consul_services/consul_synology_checks.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/service_checks.json"
mode: 0644
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Reload configuration file
ansible.builtin.uri:
url: "http://{{ synology_second_ip }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
check_mode: false
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
check_mode: false
when: consul_agent_reload_http_response.status != 200

View File

@@ -33,5 +33,5 @@
# when:
# - ansible_facts['system_vendor'] is search("Synology")
- name: "end play"
- name: "End play"
ansible.builtin.meta: end_play

View File

@@ -4,85 +4,91 @@
- name: Check if Docker is already present
ansible.builtin.command:
cmd: docker --version
cmd: docker --version
register: docker_command_result
changed_when: docker_command_result.rc == 1
failed_when: false
- name: install docker on Debian
- name: Install docker on Debian
when: ansible_os_family == 'Debian'
block:
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker local filesystem storage directory"
ansible.builtin.file:
path: "{{ rpi_localfs_service_storage }}"
mode: 0755
state: directory
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Download Docker install convenience script
ansible.builtin.get_url:
url: "https://get.docker.com/"
dest: /tmp/get-docker.sh
mode: 0775
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
when: docker_command_result.rc == 1
- name: Run Docker install convenience script
ansible.builtin.command: /tmp/get-docker.sh
environment:
CHANNEL: stable
register: docker_install
failed_when: docker_install.rc > 0
changed_when: docker_install.rc == 0
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Make sure Docker CE is the version specified
ansible.builtin.apt:
name: "docker-ce"
state: present
when: docker_command_result.rc == 1
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure Docker is started
ansible.builtin.service:
name: docker
state: started
enabled: true
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: Ensure docker users are added to the docker group
become: true
ansible.builtin.user:
name: "{{ ansible_user }}"
groups: docker
append: true
when: docker_command_result.rc == 1
- name: install docker on macOS
- name: Install docker on macOS
when: "'macs' in group_names"
block:
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: "Add docker directory to ~/Library"
ansible.builtin.file:
path: "{{ mac_localfs_service_storage }}"
mode: 0755
state: directory
- name: install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: Install base homebrew packages
community.general.homebrew:
name: docker
state: present
update_homebrew: false
upgrade_all: false
when: docker_command_result.rc == 1
- name: open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
when: docker_command_result.rc == 1
- name: Open docker application
ansible.builtin.command:
cmd: open /Applications/Docker.app
register: docker_open_app
failed_when: docker_open_app.rc > 0
changed_when: docker_open_app.rc == 0
when: docker_command_result.rc == 1
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
- name: Must install Docker manually
ansible.builtin.debug:
msg: |
Docker must be installed manually on MacOS. Log in to mac to install then rerun playbook
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
Be certain to configure the following:
- run on login
- add '{{ mac_storage_mount_point }}' to mountable file system directories
when: docker_command_result.rc == 1
- name: end play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1
- name: End play
ansible.builtin.meta: end_play
when: docker_command_result.rc == 1

View File

@@ -8,46 +8,46 @@
- name: "Set local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ rpi_localfs_service_storage }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
interpolated_localfs_service_storage: "{{ mac_localfs_service_storage }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "Set NFS mount location (pis)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
interpolated_nfs_service_storage: "{{ rpi_nfs_mount_point }}"
changed_when: false
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "Set NFS mount location location (macs)"
ansible.builtin.set_fact:
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
interpolated_nfs_service_storage: "{{ mac_storage_mount_point }}"
changed_when: false
when:
- "'macs' in group_names"
- "'macs' in group_names"
- name: "set consul configuration directory (synology)"
- name: "Set consul configuration directory (synology)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ synology_consul_configuration_dir }}"
when:
- inventory_hostname == 'synology'
- inventory_hostname == 'synology'
- name: "set consul configuration directory (pis)"
- name: "Set consul configuration directory (pis)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ rpi_consul_configuration_dir }}"
when:
- "'pis' in group_names"
- "'pis' in group_names"
- name: "set consul configuration directory (macs)"
- name: "Set consul configuration directory (macs)"
ansible.builtin.set_fact:
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
interpolated_consul_configuration_dir: "{{ mac_consul_configuration_dir }}"
when:
- "'macs' in group_names"
- "'macs' in group_names"

View File

@@ -4,29 +4,29 @@
#
# NOTE: This task exists due to the arillso.logrotate failing completely on macOS
- name: add service_backups.log to logrotate
- name: Add service_backups.log to logrotate
become: true
vars:
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
logrotate_applications:
- name: service_backups
definitions:
- logs:
- "{{ rpi_nfs_mount_point }}/pi-cluster/logs/service_backups.log"
options:
- rotate 1
- size 100k
- missingok
- notifempty
- su root root
- extension .log
- compress
- nodateext
- nocreate
- delaycompress
ansible.builtin.import_role:
name: arillso.logrotate
name: arillso.logrotate
failed_when: false
ignore_errors: true
when:
- "'macs' not in group_names"
- is_cluster_leader
- "'macs' not in group_names"
- is_cluster_leader

View File

@@ -4,243 +4,243 @@
- name: "Set variables needed to install Nomad"
block:
- name: "set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "Set variable: check if we have a mounted USB drive (Debian)"
ansible.builtin.stat:
path: "{{ rpi_usb_drive_mount_point }}"
register: have_usb_drive
changed_when: false
when:
- ansible_os_family == 'Debian'
- name: "set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "Set variable: Use USB drive for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "{{ rpi_usb_drive_mount_point }}/opt/nomad"
when:
- ansible_os_family == 'Debian'
- have_usb_drive.stat.exists
- name: "set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "Set variable: Use root dist for nomad /opt (Debian)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/opt/nomad"
when:
- ansible_os_family == 'Debian'
- not have_usb_drive.stat.exists
- name: "set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: Use ~/library for /opt files (macOSX)"
ansible.builtin.set_fact:
nomad_opt_dir_location: "/Users/{{ ansible_user }}/Library/nomad"
when:
- ansible_os_family == 'Darwin'
- name: "set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set Nomad download Binary (armv7l)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set Nomad download Binary (aarch64)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_linux_arm64.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: "Set variable: Set Nomad download Binary (MacOSX)"
ansible.builtin.set_fact:
nomad_download_file_uri: "https://releases.hashicorp.com/nomad/{{ nomad_version }}/nomad_{{ nomad_version }}_darwin_amd64.zip"
when:
- mac_intel
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: Assert that we can install Nomad
ansible.builtin.assert:
that:
- nomad_download_file_uri is defined
- nomad_opt_dir_location is defined
fail_msg: "Unable to install Nomad on this host"
- name: "Create Nomad user and group (Debian)"
when: ansible_os_family == 'Debian'
block:
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Ensure group 'nomad' exists (Debian)"
become: true
ansible.builtin.group:
name: nomad
state: present
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add the user 'nomad' with group 'nomad' (Debian)"
become: true
ansible.builtin.user:
name: nomad
group: nomad
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Add user 'nomad' to docker and sudo groups (Debian)"
become: true
ansible.builtin.user:
user: nomad
groups: docker,sudo
append: true
- name: "Create Nomad /opt storage"
block:
- name: "create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: "Create {{ nomad_opt_dir_location }} directories"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
recurse: true
mode: 0755
loop:
- "{{ nomad_opt_dir_location }}/logs"
- "{{ nomad_opt_dir_location }}/plugins"
- "{{ nomad_opt_dir_location }}/certs"
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy server certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
notify: "restart nomad"
when: is_nomad_server
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: Copy client certs
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
notify: "restart nomad"
when: is_nomad_client
- name: "set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: nomad
group: nomad
recurse: true
when: ansible_os_family == 'Debian'
- name: "set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when: ansible_os_family != 'Debian'
- name: "Template out the configuration file"
block:
- name: "create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: "Create {{ nomad_configuration_dir }}"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
state: directory
mode: 0755
- name: copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: Copy base config file
become: true
ansible.builtin.template:
src: nomad.hcl.j2
dest: "{{ nomad_configuration_dir }}/nomad.hcl"
mode: 0644
notify: "restart nomad"
- name: "set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to nomad:nomad (Debian)"
become: true
ansible.builtin.file:
path: "{{ nomad_configuration_dir }}"
owner: nomad
group: nomad
recurse: true
when:
- ansible_os_family == 'Debian'
- name: Install or Update Nomad
block:
- name: "set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: "Set fact: do we need a nomad install?"
ansible.builtin.set_fact:
need_nomad_install: false
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: Check if nomad is installed
ansible.builtin.stat:
path: /usr/local/bin/nomad
register: nomad_binary_file_location
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not nomad_binary_file_location.stat.exists
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: Check current version of Nomad
ansible.builtin.shell: /usr/local/bin/nomad --version | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_nomad_version
check_mode: false
changed_when: false
when:
- not need_nomad_install
- name: "set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: "Set fact: do we need a nomad install"
ansible.builtin.set_fact:
need_nomad_install: true
when:
- not need_nomad_install
- current_nomad_version.stdout is version(nomad_version, '<')
- name: install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: Install Nomad
become: true
ansible.builtin.unarchive:
src: "{{ nomad_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
notify: "restart nomad"
when:
- need_nomad_install
- name: "Copy system.d or launchctrl service files"
block:
- name: ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: Ensure /Library/LaunchAgents exists (MacOSX)
ansible.builtin.file:
path: "{{ nomad_plist_macos | dirname }}"
state: directory
mode: 0755
when:
- ansible_os_family == 'Darwin'
- name: create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: Create nomad launchd service (MacOSX)
ansible.builtin.template:
src: nomad.launchd.j2
dest: "{{ nomad_plist_macos }}"
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Darwin'
- name: create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: Create nomad service (Debian)
become: true
ansible.builtin.template:
src: nomad.service.j2
dest: /etc/systemd/system/nomad.service
mode: 0644
notify: "restart nomad"
when:
- ansible_os_family == 'Debian'
- name: "start nomad, if stopped"
- name: "Start nomad, if stopped"
ansible.builtin.shell:
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
cmd: "/usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
register: node_status_response
ignore_errors: true
failed_when: false

View File

@@ -10,67 +10,67 @@
- name: "Sync Nomad Jobs"
block:
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: Remove nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: absent
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- clean_nomad_jobs
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: (Re)Create nomad jobs directory
ansible.builtin.file:
path: "{{ nomad_jobfile_location }}"
state: directory
mode: 0755
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (jinja)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.j2"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Synchronize nomad job templates (hcl)
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ nomad_jobfile_location }}/{{ item | basename }}"
mode: 0644
with_fileglob: "templates/nomad_jobs/*.hcl"
when:
- is_nomad_client or is_nomad_server or ("'macs' in group_names")
- "'nas' not in group_names"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: Ensure we have local storage folders
become: true
ansible.builtin.file:
path: "{{ interpolated_localfs_service_storage }}/{{ item }}"
state: directory
mode: 0777
group: "{{ ansible_user_gid }}"
owner: "{{ ansible_user_uid }}"
when:
- is_nomad_client or is_nomad_server
loop: "{{ service_localfs_dirs }}"
- name: "Sync docker compose files"
- name: Sync docker compose files
when: is_docker_compose_client
block:
- name: confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: Confirm compose file dir exists
ansible.builtin.file:
path: "{{ docker_compose_file_location }}"
state: directory
mode: 0755
- name: synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"
- name: Synchronize docker-compose files
ansible.builtin.template:
src: "{{ item }}"
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"

View File

@@ -4,64 +4,64 @@
- name: "Update and install APT packages"
when:
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
- ansible_os_family != 'Darwin'
- manage_apt_packages_list
block:
- name: update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: Update APT package cache
become: true
ansible.builtin.apt:
update_cache: true
cache_valid_time: 3600
- name: "upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "Upgrade APT to the latest packages (this may take a while)"
become: true
ansible.builtin.apt:
upgrade: safe
- name: "install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Install/upgrade APT packages (this may take a while)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop: "{{ apt_packages_list }}"
register: apt_output
- name: "Update and install Homebrew packages"
when:
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
- manage_homebrew_package_list
- ansible_os_family == 'Darwin'
block:
- name: upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: Upgrade homebrew and all packages
community.general.homebrew:
update_homebrew: true
upgrade_all: true
register: homebrew_output
ignore_errors: true
- name: install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: Install base homebrew packages
community.general.homebrew:
name: "{{ homebrew_package_list | join(',') }}"
state: present
update_homebrew: false
upgrade_all: false
register: homebrew_output
- name: homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: Homebrew packages updated or installed
ansible.builtin.debug:
msg: "{{ homebrew_output.changed_pkgs }}"
- name: unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: Unchanged homebrew packages
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true
- name: Install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true

View File

@@ -5,36 +5,37 @@
- name: "Check if pull_all_repos exists"
ansible.builtin.stat:
path: "~/bin/pull_all_repos"
path: "~/bin/pull_all_repos"
check_mode: false
register: pull_script_check
- name: "Check if ~/repos exists"
ansible.builtin.stat:
path: "~/repos"
path: "~/repos"
check_mode: false
register: repos_directory_check
- name: "run pull_all_repos script"
- name: "Run pull_all_repos script"
ansible.builtin.command:
cmd: "~/bin/pull_all_repos --directory ~/repos"
cmd: "~/bin/pull_all_repos --directory ~/repos"
register: pull_script_output
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
failed_when: pull_script_output.rc > 1
changed_when: pull_script_output.rc == 0
- name: "Output from pull_all_repos"
ansible.builtin.debug:
msg: "{{ pull_script_output.stdout }}"
msg: "{{ pull_script_output.stdout }}"
when:
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable
- not ansible_check_mode
- pull_script_check.stat.exists
- pull_script_check.stat.executable
- repos_directory_check.stat.isdir is defined
- repos_directory_check.stat.isdir
- repos_directory_check.stat.writeable

View File

@@ -1,12 +1,12 @@
---
# TASK DESCRIPTION:
# Always runs fist. Confirms we can actually use Ansible
- name: sanity - user mode
- name: Sanity - user mode
become: false
ansible.builtin.debug:
msg: "sanity check: user mode"
msg: "Sanity check: user mode"
- name: sanity - become mode
- name: Sanity - become mode
become: true
ansible.builtin.debug:
msg: "sanity check: become mode"
msg: "Sanity check: become mode"

View File

@@ -4,90 +4,92 @@
#
# NOTE: This is depreciated, I no longer use Prometheus and have migrated to Telegraf
- name: populate service facts
- name: Populate service facts
ansible.builtin.service_facts:
- name: stop node_exporter
- name: Stop node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
state: stopped
name: node_exporter
state: stopped
when: ansible_facts.services["node_exporter.service"] is defined
- name: Ensure group "prometheus" exists
become: true
ansible.builtin.group:
name: prometheus
state: present
name: prometheus
state: present
- name: Add the user 'prometheus' with group 'prometheus'
become: true
ansible.builtin.user:
name: prometheus
group: prometheus
groups: docker
append: true
name: prometheus
group: prometheus
groups: docker
append: true
# --------------- Install or Update Prometheus
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: false
need_prometheus_install: false
- name: Check if node_exporter is installed
ansible.builtin.stat:
path: /usr/local/bin/node_exporter
path: /usr/local/bin/node_exporter
register: prometheus_binary_file_location
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- not prometheus_binary_file_location.stat.exists
- not prometheus_binary_file_location.stat.exists
- name: Check current version of Prometheus
ansible.builtin.shell: /usr/local/bin/node_exporter --version 3>&1 1>&2 2>&3 | head -n1 | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_prometheus_version
failed_when: false
changed_when: false
check_mode: false
when:
- need_prometheus_install is false
- need_prometheus_install is false
- name: "set fact: need to install Prometheus?"
- name: "Set fact: need to install Prometheus?"
ansible.builtin.set_fact:
need_prometheus_install: true
need_prometheus_install: true
when:
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- need_prometheus_install is false
- current_prometheus_version.stdout != prometheus_verssion
- name: install node_exporter
- name: Install node_exporter
become: true
ansible.builtin.unarchive:
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
src: "https://github.com/prometheus/node_exporter/releases/download/v{{ prometheus_verssion }}/node_exporter-{{ prometheus_verssion }}.linux-armv7.tar.gz"
dest: /usr/local/bin
group: prometheus
owner: prometheus
# reference for extra_opts: https://github.com/ansible/ansible/issues/27081
extra_opts:
- --strip=1
- --no-anchored
- "node_exporter"
remote_src: true
when:
- need_prometheus_install is true
- need_prometheus_install is true
- name: create node_exporter service
- name: Create node_exporter service
become: true
ansible.builtin.template:
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
src: node_exporter.service.j2
dest: /etc/systemd/system/node_exporter.service
mode: 0644
- name: start node_exporter
- name: Start node_exporter
become: true
ansible.builtin.systemd:
name: node_exporter
daemon_reload: true
enabled: true
state: started
name: node_exporter
daemon_reload: true
enabled: true
state: started
when:
- "'nostart' not in ansible_run_tags"
- "'nostart' not in ansible_run_tags"

View File

@@ -4,186 +4,187 @@
- name: "Set variables"
block:
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (pis)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ rpi1_tdarr_file_location }}"
changed_when: false
when:
- "'pis' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "Set tdarr local filesystem location (macs)"
ansible.builtin.set_fact:
interpolated_tdarr_dir: "{{ mac_tdarr_file_location }}"
changed_when: false
when:
- "'macs' in group_names"
- name: "set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set tdarr download Binary (armv7l)"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/linux_arm/Tdarr_Updater.zip"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "Set variable: Set tdarr download Binary (MacOSX) - Intel"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_x64/Tdarr_Updater.zip"
when:
- mac_intel
- name: "set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "Set variable: Set tdarr download Binary (MacOSX) - ARM"
ansible.builtin.set_fact:
tdarr_download_uri: "https://f000.backblazeb2.com/file/tdarrs/versions/{{ tdarr_installer_version }}/darwin_arm64/Tdarr_Updater.zip"
when:
- mac_arm
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: false
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: Assert that we can install Tdarr
ansible.builtin.assert:
that:
- tdarr_download_uri is defined
- interpolated_tdarr_dir is defined
fail_msg: "Unable to install Tdarr on this host"
- name: "Install ffmpeg and HandbrakeCLI"
block:
- name: "ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (Debian)"
become: true
ansible.builtin.apt:
pkg: "{{ item }}"
state: present
loop:
- ffmpeg
- handbrake
when: "'pis' in group_names"
- name: "ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "Ensure ffmpeg and handbrake are installed (MacOS)"
community.general.homebrew:
name: "{{ item }}"
state: present
update_homebrew: false
upgrade_all: false
loop:
- ffmpeg
- handbrake
when: "'macs' in group_names"
- name: "ensure tdarr directory exists"
- name: "Ensure tdarr directory exists"
become: true
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
path: "{{ interpolated_tdarr_dir }}"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
state: directory
- name: "Install tdarr"
block:
- name: "set_fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "Set fact: need Tdarr install?"
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/configs"
register: tdarr_exists
changed_when: false
failed_when: false
- name: "set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: "Set fact: do we need a tdarr install?"
ansible.builtin.set_fact:
need_tdarr_install: true
when: not tdarr_exists.stat.exists
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Download tdarr
ansible.builtin.unarchive:
src: "{{ tdarr_download_uri }}"
dest: "{{ interpolated_tdarr_dir }}"
remote_src: true
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Did tdarr download?
ansible.builtin.stat:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_installer_exists
failed_when: not tdarr_installer_exists.stat.exists
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Ensure correct permissions on Tdarr_Updater
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
mode: 0755
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
when: need_tdarr_install
- name: Install tdarr
ansible.builtin.command:
cmd: "{{ interpolated_tdarr_dir }}/Tdarr_Updater"
register: tdarr_install
failed_when: tdarr_install.rc > 0
changed_when: tdarr_install.rc == 0
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: Ensure correct permissions on server/node executables
ansible.builtin.file:
path: "{{ interpolated_tdarr_dir }}/{{ item }}"
mode: 0755
loop:
- Tdarr_Server/Tdarr_Server
- Tdarr_Node/Tdarr_Node
when: need_tdarr_install
- name: "configure tdarr"
- name: "Configure tdarr"
block:
- name: update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: Update server configuration file
ansible.builtin.template:
src: Tdarr_Server_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Server_Config.json"
mode: 0644
when: is_tdarr_server
- name: update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: Update node configuration file
ansible.builtin.template:
src: Tdarr_Node_Config.json.j2
dest: "{{ interpolated_tdarr_dir }}/configs/Tdarr_Node_Config.json"
mode: 0644
when: is_tdarr_node
- name: check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: Check if consul is installed?
ansible.builtin.stat:
path: "{{ interpolated_consul_configuration_dir }}"
register: consul_installed
changed_when: false
failed_when: false
when:
- is_tdarr_server
- name: move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Move consul service config into place
become: true
ansible.builtin.template:
src: consul_services/tdarr_service.json.j2
dest: "{{ interpolated_consul_configuration_dir }}/tdarr_service.json"
mode: 0644
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: Reload consul agent
ansible.builtin.uri:
url: "http://{{ ansible_host }}:8500/v1/agent/reload"
method: PUT
status_code: 200
ignore_errors: true
register: consul_agent_reload_http_response
failed_when: consul_agent_reload_http_response.status != 200
when:
- is_tdarr_server
- consul_installed.stat.exists
- name: debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: Debug when consul agent reload fails
ansible.builtin.debug:
var: consul_agent_reload_http_response.msg
when:
- is_tdarr_server
- consul_installed.stat.exists
- consul_agent_reload_http_response.status != 200
- name: mount shared storage
- name: Mount shared storage
ansible.builtin.import_tasks: cluster_storage.yml

View File

@@ -5,146 +5,146 @@
# --------------------------------- Set variables depending on system type
- name: "Configure variables"
block:
- name: "set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set variable: telegraph_binary_location (Debian)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/bin/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "Set variable: telegraph_binary_location (MacOS)"
ansible.builtin.set_fact:
telegraph_binary_location: "/usr/local/bin/telegraf"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "Set fact: telegraph_config_location (Debian)"
ansible.builtin.set_fact:
telegraph_config_location: "/etc/telegraf"
when:
- ansible_os_family == 'Debian'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/usr/local/etc"
when:
- ansible_os_family == 'Darwin'
- name: "set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Set fact: telegraph_config_location (macOS)"
ansible.builtin.set_fact:
telegraph_config_location: "/volume1/docker/telegraf/config"
when:
- inventory_hostname == 'synology'
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "Fail if arm Mac (need to update task) or variables not defined"
ansible.builtin.assert:
that:
- telegraph_binary_location is defined
- telegraph_config_location is defined
- not mac_arm
fail_msg: "Unable to install Telegraf on this host"
- name: "set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Set variable: Set speedtest download Binary (armv7l)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-armhf.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Set variable: Set speedtest download Binary (aarch64)"
ansible.builtin.set_fact:
speedtest_download_file_uri: "https://install.speedtest.net/app/cli/ookla-speedtest-{{ speedtest_cli_version }}-linux-aarch64.tgz"
when:
- ansible_os_family == 'Debian'
- ansible_architecture == 'aarch64'
- name: "Install/upgrade Telegraf"
block:
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: false
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: Check if telegraf is installed
ansible.builtin.stat:
path: "{{ telegraph_binary_location }}"
check_mode: false
register: telegraf_binary_exists
when: telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
check_mode: false
when:
- telegraph_binary_location is defined
- not telegraf_binary_exists.stat.exists
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: Check current version of telegraf
ansible.builtin.shell: "{{ telegraph_binary_location }} --version | grep -oE '[0-9]+\\.[0-9]+\\.[0-9]+'"
ignore_errors: true
register: current_telegraf_version
check_mode: false
changed_when: false
when:
- not need_telegraf_install
- telegraph_binary_location is defined
- name: "set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: "Set fact: Need telegraf install?"
ansible.builtin.set_fact:
need_telegraf_install: true
when:
- telegraph_binary_location is defined
- not need_telegraf_install
- current_telegraf_version.stdout is version(telegraf_version, '<')
- name: install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: Install telegraf (MacOS)
community.general.homebrew:
name: telegraf
state: present
notify: restart_telegraf
when:
- ansible_os_family == 'Darwin'
- need_telegraf_install
- name: install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install base apt-transport (Debian)
become: true
ansible.builtin.apt:
pkg: apt-transport-https
state: present
update_cache: true
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Download telegraf GPG key (Debian)
become: true
ansible.builtin.apt_key:
state: present
url: "https://repos.influxdata.com/influxdb.key"
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Add telegraf repository to apt (Debian)
become: true
ansible.builtin.apt_repository:
repo: deb https://repos.influxdata.com/debian bullseye stable
state: present
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
- name: Install telegraf (Debian)
become: true
ansible.builtin.apt:
pkg: telegraf
state: latest
update_cache: true
only_upgrade: true
notify: restart_telegraf
when:
- ansible_os_family == 'Debian'
- need_telegraf_install
# - name: give telegraf access to docker
# become: true
@@ -162,115 +162,115 @@
- name: "Install speedtest"
when: "'pis' in group_names"
block:
- name: "set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: "Set fact: do we need speedtest installed?"
ansible.builtin.set_fact:
need_speedtest_install: false
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: Check if speedtest is installed
ansible.builtin.stat:
path: /usr/local/bin/speedtest
register: speedtest_binary_file_location
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not speedtest_binary_file_location.stat.exists
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: Check current version of speedtest
ansible.builtin.shell: /usr/local/bin/speedtest --version | head -n1 | awk '{print $4}' | grep -oE '[0-9]+\.[0-9]+\.[0-9]+'
ignore_errors: true
register: current_speedtest_version
check_mode: false
changed_when: false
when:
- not need_speedtest_install
- name: "set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Set fact: do we need a speedtest install"
ansible.builtin.set_fact:
need_speedtest_install: true
when:
- not need_speedtest_install
- current_speedtest_version.stdout is version(speedtest_cli_version, '<')
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Install speedtest (pi)"
become: true
ansible.builtin.unarchive:
src: "{{ speedtest_download_file_uri }}"
dest: /usr/local/bin
remote_src: true
when:
- need_speedtest_install
- ansible_os_family == 'Debian'
- ansible_architecture == 'armv7l'
- name: "Configure Telegraf"
block:
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: "Ensure {{ telegraph_config_location }} exists"
become: true
ansible.builtin.file:
path: "{{ item }}"
state: directory
mode: 0755
loop:
- "{{ telegraph_config_location }}"
- "{{ telegraph_config_location }}/telegraf.d"
- name: template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: Template config files to server
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
notify: restart_telegraf
- name: template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Template leader configs (ie, configs that should be placed on a single server)
become: true
ansible.builtin.template:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
when:
- is_cluster_leader
notify: restart_telegraf
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy custom metrics script
become: true
ansible.builtin.template:
src: "scripts/telegraf_custom_metrics.sh.j2"
dest: "/usr/local/bin/telegraf_custom_metrics.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- inventory_hostname != 'synology'
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Copy speedtest script
become: true
ansible.builtin.template:
src: "scripts/telegraf_speedtest.sh.j2"
dest: "/usr/local/bin/telegraf_speedtest.sh"
mode: 0755
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
when:
- is_cluster_leader
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')
- name: Reset file ownership
become: true
ansible.builtin.file:
path: "{{ telegraph_config_location }}"
owner: "{{ ansible_user_uid }}"
group: "{{ ansible_user_gid }}"
recurse: true
when:
- (ansible_os_family == 'Darwin') or (inventory_hostname == 'synology')

View File

@@ -1,5 +1,5 @@
[Unit]
Description="HashiCorp Consul - A service mesh solution"
Description="hashiCorp Consul - A service mesh solution"
Documentation=https://www.consul.io/
Requires=network-online.target
After=network-online.target

View File

@@ -2,7 +2,7 @@ version: '3.9'
services:
consul:
image: consul:{{ consul_version }}
image: hashicorp/consul:{{ consul_version }}
hostname: consul
container_name: consul
network_mode: "host"

View File

@@ -59,7 +59,7 @@ consul {
{% endif %}
}
# ----------------------------------------- CLient Config
# ----------------------------------------- Client Config
client {
enabled = true
{% if 'pis' in group_names %}
@@ -206,9 +206,9 @@ plugin "raw_exec" {
plugin "docker" {
config {
allow_caps = [ "ALL" ]
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name"]
volumes {
enabled = true
}

View File

@@ -7,9 +7,16 @@ ConditionFileNotEmpty={{ nomad_configuration_dir }}/nomad.hcl
[Service]
{# {% if 'linode' in group_names %} #}
User=nomad
Group=nomad
{# User=nomad #}
{# Group=nomad #}
{# {% endif %} #}
{# NOTE: Nomad is running as root rather than the Nomad user due to the Docker driver not being started when cgroups v2 are enabled.
https://github.com/hashicorp/nomad/pull/16063
#}
User=root
Group=root
ExecReload=/bin/kill -HUP $MAINPID
ExecStart=/usr/local/bin/nomad agent -config {{ nomad_configuration_dir }}
KillMode=process

View File

@@ -71,7 +71,7 @@ job "icloud_backup" {
# Drive destination
root: "icloud"
smtp:
# If you want to recieve email notifications about expired/missing 2FA credentials then uncomment
# If you want to receive email notifications about expired/missing 2FA credentials then uncomment
email: "{{ email_smtp_account }}"
# optional, to email address. Default is sender email.
#to: "receiver@test.com"
@@ -120,7 +120,7 @@ job "icloud_backup" {
photos:
destination: "photos"
remove_obsolete: true
sync_inteval: 172800 # 2 days
sync_interval: 172800 # 2 days
filters:
albums:
# - "album1"

View File

@@ -37,7 +37,7 @@ job "pihole" {
// }
}
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {

View File

@@ -32,104 +32,182 @@ job "recyclarr" {
task "recyclarr" {
env {
TZ = "America/New_York"
TZ = "America/New_York"
RECYCLARR_APP_DATA = "/local"
}
// user = "${meta.PUID}:${meta.PGID}"
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:2"
image = "ghcr.io/recyclarr/recyclarr:{{ recyclarr_version }}"
hostname = "${NOMAD_TASK_NAME}"
init = true
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
} // docker config
// template {
// destination = "local/recyclarr.yml"
// env = false
// change_mode = "restart"
// perms = "644"
// data = <<-EOH
// ---
// # yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
template {
destination = "local/recyclarr.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
# yaml-language-server: $schema=https://raw.githubusercontent.com/recyclarr/recyclarr/master/schemas/config-schema.json
// # A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
// # values below as needed for your instance. You will be required to update the API Key and URL for
// # each instance you want to use.
// #
// # Many optional settings have been omitted to keep this template simple.
// #
// # For more details on the configuration, see the Configuration Reference on the wiki here:
// # https://github.com/recyclarr/recyclarr/wiki/Configuration-Reference
# A starter config to use with Recyclarr. Most values are set to "reasonable defaults". Update the
# values below as needed for your instance. You will be required to update the API Key and URL for
# each instance you want to use.
#
# Many optional settings have been omitted to keep this template simple. Note that there's no "one
# size fits all" configuration. Please refer to the guide to understand how to build the appropriate
# configuration based on your hardware setup and capabilities.
#
# For any lines that mention uncommenting YAML, you simply need to remove the leading hash (`#`).
# The YAML comments will already be at the appropriate indentation.
#
# For more details on the configuration, see the Configuration Reference on the wiki here:
# https://recyclarr.dev/wiki/reference/config-reference
// # Configuration specific to Sonarr
// sonarr:
// # Set the URL/API Key to your actual instance
# Configuration specific to Sonarr
sonarr:
series:
base_url: https://sonarr.{{ homelab_domain_name }}/
api_key: {{ sonarr_api_key }}
delete_old_custom_formats: true
// {% raw -%}
// - base_url: http://{{ range service "sonarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Quality definitions from the guide to sync to Sonarr. Choices: series, anime
quality_definition:
type: series
// # Quality definitions from the guide to sync to Sonarr. Choice: anime, series, hybrid
// quality_definition: series
# Release profiles from the guide to sync to Sonarr v3 (Sonarr v4 does not use this!)
# Use `recyclarr list release-profiles` for values you can put here.
# https://trash-guides.info/Sonarr/Sonarr-Release-Profile-RegEx/
release_profiles:
- trash_ids:
- EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
- 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
- 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
strict_negative_scores: false
// # Release profiles from the guide to sync to Sonarr.
// # You can optionally add tags and make negative scores strictly ignored
// release_profiles:
// # Series
// - trash_ids:
// - EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
// - 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
// - 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
// # Anime (Uncomment below if you want it)
// # - trash_ids:
// # - d428eda85af1df8904b4bbe4fc2f537c # Anime - First release profile
// # - 6cd9e10bb5bb4c63d2d7cd3279924c7b # Anime - Second release profile
- trash_ids:
- 76e060895c5b8a765c310933da0a5357 # Optionals
filter:
include:
- cec8880b847dd5d31d29167ee0112b57 # Golden rule
- 436f5a7d08fbf02ba25cb5e5dfe98e55 # Ignore Dolby Vision without HDR10 fallback.
# - f3f0f3691c6a1988d4a02963e69d11f2 # Ignore The Group -SCENE
# - 5bc23c3a055a1a5d8bbe4fb49d80e0cb # Ignore so called scene releases
- 538bad00ee6f8aced8e0db5218b8484c # Ignore Bad Dual Audio Groups
- 4861d8238f9234606df6721df6e27deb # Ignore AV1
- bc7a6383cbe88c3ee2d6396e1aacc0b3 # Prefer HDR
- 6f2aefa61342a63387f2a90489e90790 # Dislike retags: rartv, rarbg, eztv, TGx
- 19cd5ecc0a24bf493a75e80a51974cdd # Dislike retagged groups
- 6a7b462c6caee4a991a9d8aa38ce2405 # Dislike release ending: en
- 236a3626a07cacf5692c73cc947bc280 # Dislike release containing: 1-
# - fa47da3377076d82d07c4e95b3f13d07 # Prefer Dolby Vision
// # Configuration specific to Radarr.
// radarr:
// # Set the URL/API Key to your actual instance
// {% raw -%}
// - base_url: http://{{ range service "radarr" }}{{ .Address }}:{{ .Port }}{{ end }}
// api_key: f7e74ba6c80046e39e076a27af5a8444
// {% endraw -%}
# Configuration specific to Radarr.
radarr:
movies:
# Set the URL/API Key to your actual instance
base_url: https://radarr.{{ homelab_domain_name }}/
api_key: {{ radarr_api_key }}
delete_old_custom_formats: true
replace_existing_custom_formats: true
// # Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
// quality_definition:
// type: movie
# Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
quality_definition:
type: movie
preferred_ratio: 0.5
// # Set to 'true' to automatically remove custom formats from Radarr when they are removed from
// # the guide or your configuration. This will NEVER delete custom formats you manually created!
// delete_old_custom_formats: false
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores: true
- name: "720p/1080p Remux"
reset_unmatched_scores: true
// custom_formats:
// # A list of custom formats to sync to Radarr. Must match the "trash_id" in the guide JSON.
// - trash_ids:
// - ed38b889b31be83fda192888e2286d83 # BR-DISK
// - 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
// - 90a6f9a284dff5103f6346090e6280c8 # LQ
// - dc98083864ea246d05a42df0d05f81cc # x265 (720/1080p)
// - b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
custom_formats:
# Use `recyclarr list custom-formats radarr` for values you can put here.
# https://trash-guides.info/Radarr/Radarr-collection-of-custom-formats/
// # Uncomment the below properties to specify one or more quality profiles that should be
// # updated with scores from the guide for each custom format. Without this, custom formats
// # are synced to Radarr but no scores are set in any quality profiles.
// # quality_profiles:
// # - name: Quality Profile 1
// # - name: Quality Profile 2
// # #score: -9999 # Optional score to assign to all CFs. Overrides scores in the guide.
// # #reset_unmatched_scores: true # Optionally set other scores to 0 if they are not listed in 'names' above.
// EOH
// }
- trash_ids:
# Movie versions
- eca37840c13c6ef2dd0262b141a5482f # 4K Remaster
- 570bc9ebecd92723d2d21500f4be314c # Remaster
- 0f12c086e289cf966fa5948eac571f44 # Hybrid
- 9d27d9d2181838f76dee150882bdc58c # Masters of Cinema
- e0c07d59beb37348e975a930d5e50319 # Criterion Collection
- 957d0f44b592285f26449575e8b1167e # Special Edition
- eecf3a857724171f968a66cb5719e152 # IMAX
- 9f6cbff8cfe4ebbc1bde14c7b7bec0de # IMAX Enhanced
# Unwanted
- b8cd450cbfa689c0259a01d9e29ba3d6 # 3D
- ed38b889b31be83fda192888e2286d83 # BR-DISK
- 90a6f9a284dff5103f6346090e6280c8 # LQ
- bfd8eb01832d646a0a89c4deb46f8564 # Upscaled
- 90cedc1fea7ea5d11298bebd3d1d3223 # EVO (no WEBDL)
- 923b6abef9b17f937fab56cfcf89e1f1 # DV (WEBDL)
- b6832f586342ef70d9c128d40c07b872 # Bad Dual Groups
- ae9b7c9ebde1f3bd336a8cbd1ec4c5e5 # No-RlsGroup
- 7357cf5161efbf8c4d5d0c30b4815ee2 # Obfuscated
- 5c44f52a8714fdd79bb4d98e2673be1f # Retags
- c465ccc73923871b3eb1802042331306 # Line/Mic Dubbed
# Misc
- e7718d7a3ce595f289bfee26adc178f5 # Repack/Proper
- ae43b294509409a6a13919dedd4764c4 # Repack2
# HQ Release Groups
- ed27ebfef2f323e964fb1f61391bcb35 # HD Bluray Tier 01
- c20c8647f2746a1f4c4262b0fbbeeeae # HD Bluray Tier 02
- c20f169ef63c5f40c2def54abaf4438e # WEB Tier 01
- 403816d65392c79236dcb6dd591aeda4 # WEB Tier 02
- af94e0fe497124d1f9ce732069ec8c3b # WEB Tier 03
quality_profiles:
- name: "720p/1080p"
- name: "720p/1080p Remux"
# HDR FORMATS
# ########################
- trash_ids:
- 3a3ff47579026e76d6504ebea39390de # Remux Tier 01
- 9f98181fe5a3fbeb0cc29340da2a468a # Remux Tier 02
- e61e28db95d22bedcadf030b8f156d96 # HDR
- 2a4d9069cc1fe3242ff9bdaebed239bb # HDR (undefined)
quality_profiles:
- name: "720p/1080p"
score: -100
- name: "720p/1080p Remux"
# AUDIO FORMATS
# ########################
- trash_ids:
- 6fd7b090c3f7317502ab3b63cc7f51e3 # 6.1 Surround
- e77382bcfeba57cb83744c9c5449b401 # 7.1 Surround
- f2aacebe2c932337fe352fa6e42c1611 # 9.1 Surround
quality_profiles:
- name: "720p/1080p"
score: -50
- name: "720p/1080p Remux"
score: -50
- trash_ids:
- 89dac1be53d5268a7e10a19d3c896826 # 2.0 Stereo
quality_profiles:
- name: "720p/1080p"
score: 120
- trash_ids:
- 77ff61788dfe1097194fd8743d7b4524 # 5.1 Surround
quality_profiles:
- name: "720p/1080p"
score: 80
- name: "720p/1080p Remux"
score: 80
EOH
}
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
resources {
cpu = 100 # MHz
memory = 300 # MB
} // resources
} // task

View File

@@ -221,7 +221,7 @@ job "reverse-proxy" {
resources {
cpu = 200 # MHz
memory = 110 # MB
memory = 1000 # MB
}
} // task authelia

View File

@@ -108,7 +108,7 @@ job "TEMPLATE" {
}
}
task "await-TEMPLATEdb" {
task "await-TEMPLATEEdb" {
driver = "docker"
config {

View File

@@ -0,0 +1,69 @@
job "valentina" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "valentina" {
count = 1
restart {
attempts = 0
delay = "30s"
}
task "valentina" {
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_AWS_ACCESS_KEY_ID = "{{ valentina_aws_access_key_id }}"
VALENTINA_AWS_SECRET_ACCESS_KEY = "{{ valentina_aws_secret_access_key }}"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "DEBUG"
VALENTINA_LOG_LEVEL_AWS = "INFO"
VALENTINA_OWNER_CHANNELS = "{{ valentina_owner_channels }}"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
VALENTINA_S3_BUCKET_NAME = "{{ valentina_s3_bucket_name}}"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{valentina_version}}"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // job

View File

@@ -28,7 +28,7 @@ job "wikijs" {
}
}
task "await_db_filesytem" {
task "await_db_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -56,7 +56,7 @@ job "wikijs" {
}
} // /task
task "await_backup_filesytem" {
task "await_backup_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -180,7 +180,7 @@ group "wikijs_app_group" {
}
} // /task
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {

277
vault.yml
View File

@@ -1,118 +1,161 @@
$ANSIBLE_VAULT;1.1;AES256
35366634393265303030366466303232616338633038313738633637383439356439616536666230
6566643530623337323034306366613935663334313934310a636235653531316237393231376362
33663935366131663137363465666363336630386362313333633762656461636239366234633832
3538353463356335360a323030643238323034343666376230356465396639636563316532373638
35366637366663303164376661636563346330313932343462326239626264633262303739383831
31333134613534643265643433323065303833326662346466633931373337326233303633363032
66333336373865313333626566386665653833343638376264356430383764316134333231366466
62336534666565343839393237356139393738396333393337666631303461373362343664396665
63343161613462653866616566363631346566636639626138316539353362616261666337386635
62356262363564376334336163613035643336656331653562306433363161393435343431663137
66663936623834666364303333386335353961373031383164623766323836383462363231396263
34343662336637633262333530623039376534643966653839346236363166646564613333633366
33363534616466393137366234633030663036613263383733313235353364613864316139356330
38343439346661613136316235326430326437646135636637343665663031393262653661396331
39653739666364666564633364636231323237366265323631333234306631386362666135336461
35646564643631666663336237636435626338346663633038353964303764626236373561323763
34643565656462323764623263383037663735323364396437653332376137356263633963306332
39633339366236313063643665356366346138616434316332643731666634366336623064653361
63393134643630313632396434643131646464323737343133613364333465393834656236616134
30313961346236326563616263373463616432393962663262616232356663636439643731383930
32326664306563623665633164373932356163356361663465363362303661396662386630323137
32383333656435613762393430303163383135393037363763333139633239666639303538623134
32386635663962363939373365613138316435366433303863326561613463306338396136393965
34333961383035383135333561313331336565383031356133626530306163333666333564353262
38646434643234303363383965636339323633326330663736393461383461303661353365663631
33343831356135653139633463336330646634363639326635653863343632663466336639313962
65306438613933386664336138613066326364343738633531356664343664646464396162343861
36663030643762343938633564373531663430303536643665613532313630636461646235666335
62613634656232373936363439363766316561373937386261613861396566303834376134666564
35396330636166316239336433323939363839636361643630353263663233303166313863636364
34363134363161643234643134663361373237316466626363646264643530343064666464393166
66366561356336616663393064376162643731343532663436646432366331643066396232393432
63336633313963383132333639626130623737346137646561303338623136306361656630396364
36306234643161643864313334316634396233313831613830393865353763653963656632363865
34346439356166363839343063313263396437366163343734326162346166353465313163313236
35343531333438303561393137323831303063353466666463303835653630353630393836393236
32643035636335363137303134333735343964646130306339663137646261366635353632613533
35303636373465633831353439376464386132616238613336366134383037376165396365353436
66633937656162346661326136343266313937393436353532656634366535653762633930393239
62383862356165336435616666346238646666613066323262323530356534373262633861646466
66643935363334623264373338663362623439313138666338363732386666383739636162653763
32666439316632653633363266343365393366373834323065353335613563306135383432613433
61633835326565386662313265356536613237313364366163313562393836613061616432316638
33376531663533376435383437393539663565616439666438646232663732663063343666646631
36366364353339323262666630363932616461323833306666616365343530646536326363613232
65313031333064396662363736316137656161393865383135366539636432386539623837353634
62313638666564396462666334616365323932396236633932613362633166346265613161363863
39396464663966353565393662363633366237323066306436616437363666666635343265666435
32363162643761666639336464383430366565323862353161303338333232326335303462653938
61333162306132633637653736623033373164343463333933666438326534303730613862383035
39393939323561333738653465306165643461366534313537633162313638393630393361623432
34306264316565333334303633323836343162373738636161656565313134643262343533666434
37623962626263353062333939633662316663316238636331646230313861363364326636653365
37343761626437663832346266333634666361333361313638626639633934646335613062626365
31656132643034303032623365613530306436383437633761636238336139373739313836393336
66653066633962333730643034653032626530663731633462393937326236656362356236666333
62343139646139303433393163613037623963633230366236396434643163316664616435386436
61616237366130663662643162613730303033376334333066393432333032613830316262333763
31663936663239653361633634323736306363323864666635633465376363323838326366663630
39383463343038666564653663616161336266313563633731623335373732343732383164623431
39333262346539373937386531386466373863323232653265383064643863303638326566313765
30376565643462643439316431306331346438633331616437613762323138363061336630353661
61653139333962373261323063386231346266323762306433613363643230366265366239623832
38353562393064633537373761643539313234333136333530343536393033313131393932633637
63383066623930376561656436396564353264643630636332653862613933636630333633396130
33346638663033636436626631323330336430313738313465323737386434613538346564633938
62653236643033313062336664323335656132383430313831636334326430383938653762313835
65656430656363653735343738326534616335636130646539363066393436383961316135346262
30656131353733616562613239663965383864313263653063393635623838633538303433323437
61636162363663306166343464333534316131346231303663336365303363656635363066353131
38623263366136396466383538323637626236633163663033613934303766613931313135613132
39346237656635303166363031353866663833303537666330346130353563373763623530373464
65666338313039633732626564393161663335613264306637646332643133356135613366353264
62376466613037326463623131373937303039356138336337333163303636303335333736376563
63313730623664356165653861303139313039616231326136313634623136613365313466373561
35313466313539383838373838386131653638653430613863313430626139353465626636386539
37383034326561393666383566326364623337376432633864613630353662663665336232313064
31396531313937373336383438646234343661643534316332633163653532633565613136343235
31653739633035303430626364303961626433323835653638653839396231663662636666393563
30346162326561636365393831333435333362356432646161633463313963346537643631393736
66343966313361333934616534313037636238613830623938623563393230376635316631636534
62376136386137313737646135376463343831663162616566373764643930386539306231613964
65383633313037323234616364623139623834303466306666613334346131633531643932623362
31623337636434623531376437623033306235346366376436396336346634303138613262373464
30363735633663363364613139666164383436306666363362346633346663346366393634333335
61326234303734626465616530346339303536636665626436623237383434636362393034346562
64633765363531346138376166393030666433396339333662663036313031306434626236383664
63636633316138653433366337303033636330333761626162373435633062366639396362376537
30313436353964613838323332353137383433323265343831356334393238666438323735313630
66313534646636633866313533353533643531356266643433353137653130386165353936616438
31353331383461313130383035663837646439323366623935386236663262653165313432326639
65316339356661623436386537353335343332616362323463613966383736306638396630653437
62376232353763336365613438383936646265623261306338613663343864363839313663343030
37396537626435303036613531396239353439663930363263373632333536376364336436383961
31613237393430326663366531633633613362373265646437303530656564383830366164643465
31333162373037323836396234383265333832376461383530383139353562666635386661383262
31323162363834376432313766393965373763313664383966346464313865343261333030653033
37326564663836323963663735353432653938373632356564653830616562656333383563366432
32366464316265613565613830633264613634313134373530386562313163656434356164356139
62643065613638323735366332316366383236653762373436393631363039636333346431666137
33643239323062343537353061646138346661643262303363326137356461356439663166653739
39636534653935376433633761373630656566393535373962353762646165663566613235646134
32313838383538363532643965376464626361663431393165663238373762636337366434666437
32636234613264666633366663616639386236386333623766383735383431323964343965643362
39326266366162333266343133646335653837393962633731613230653665366462393931613462
33633966626132633832653634626633393238643238393064646233663064346333653164623336
34373634376335383639346338663830653061386161306134336530376161333637333733666533
63626464393435626361323333656639616431333638383163626662323733613564613430323532
31343262616133333965633462366636333762623764326231346437666634663339393563666664
65653536333834643937326464333464353135363738663031303162396535616139663535336535
65343062646465373831303235303933343030346562633561653534313263333033313531656430
64623833653832323134333138663966313939303739376131383133366131323530353961633765
63386436373262313334323932646232616435646665323736356433376332653530326230346331
63613163343365623937336564643431653963383333363664663934633962316663306537376236
62333531353833326232613565666563613864653364363333613737663965366133383231623839
65323161613533343130316635636630633931633936666662303330326262376233376562633865
3930
35313363316633346434383462613564663535363536343964333334613031333931666535373262
3038666461646465333264346232366563323433373266350a656363346266343662303663346536
38613361633832393432343334636435363832313461303131323031633139376464666536613361
3537326464303937630a363031663065396530346539646638663163393134616234336263656633
30306236366333333437323733623266663163613763633338316630333965323832353930643261
63613065653565363238323933616139363961383865663330623133343537656433346564396435
66623437626639626365656134363538646137373061366563393333613134333035326461333161
65613638626364633165326630346230666131653063653766343534326533663233346234363535
30663366313761336564383962333562646638356466633536373233383538313238376534343466
37613734633963313237376263326530653165613135313739373561343463633837316230313132
62366538303836636336346138376163363933306365656332393532346366313438666537343733
66653531366231326464653764623562633338326639653438356630633035383433613364623336
34346233643435323331663934643037383231613232336639306138653935306563616237393437
34663034396165623763626231326136343530336261306230393431353361393239663766333632
66303561336237623866323232356236306361323965656261313731386638653564663235343237
30303036323736376636666339343539633566626337636433303533306262336532663065383333
66653662653134393866306639613261303463636438316135646437393563663731343063623137
63316537656664666463333766356630353236616339303862316234373236613932346231323137
61306666626364653535303061393938656565306339616662376537316361653435613763376364
38303062653739316230313765323063643564353032653038326631383933363263316332653736
30626361643432666139333034376363303432343864626530393637313063383964626664366631
37373234353764333361666439333462323833313432353337323661313264303732643734373234
65373765643062303464653937326236376263376561643133666663623937356538623730326564
64326165366463646635343565633130356136386266376138396261373162646132323631613063
38653633353134323633343430303561623730663834306565623735633734366161613734353266
65656163646538343439633832353132373531613962306564386363653339393963383733383562
33333662373639633462613561316133373037306264613934616438623362303831633733326431
61633237323338613636623865623036626566353762373966313766623533393861313530313862
66613935333861303835336430346235383466643163376330396430376664386337636364313961
65626337366238353437643736663962616439663630313332363032383330333239393036356338
38646165626332356337303064633334363337356432396638313032613431616364626436623238
38363032393439653663613261363335363534343137653137613332373839633662333030636662
34666137383165346431313664333436366637623632386433393363323336666232633233346235
61633037386664353662326533313862363832376539373564623365326330303437363163613733
61313134396565356366383330313734613036356536363561633361316262653833656435666132
63343236343837373665646662396535366530356163386439326266313038663233363561303039
32363639663466643834656535373834346530643863313963653233363435336436613935363961
33383439613066333034386630653032643131393535326133356639346639623430383063303361
62323230343731643531613031363265633739376639383735366562626561333162396137343332
37393061623234663230643939326436363961386664303430623435346361373136323866653262
37353637356233643833373336303730336132623262333734363338336333316332383264343233
66633466653831343035306539666337336633663637336331643463666530333737643230396632
38323165316138376536333337656462396363643437393037383565323935363830313532313661
37373461396333353166663461313562646365333735353531633432376565326637616231323566
38373966656164646436346331633266653634653833333831383036613864393133376565653863
65306364306232356434653539303066393264636661356230623331616430306661353731313138
62313734303939393530306364393830303039663637383637353738623730336330346236613337
65373337646634613935333962393632376236306639316631356437303466363937353436393138
32616337323163616562666538386563396136646131356439616437366134613238336265306432
31653361613966363432336635623865366335353439623633336461366337393435343036393539
61363262663633366136303132333066316430316235616165646139383835386339376366383766
35306365316662396666616537633365383236666538626131356162323831616165356165353532
32396163633962393331653836353832376165613163336335386662333730626263623761303464
66396661643338613262316537396162366433313864616635343965383331343662396236393834
38313761653264343061666337343363653731393435366335323537613938313564613436383530
61623531383331343232663431306438636465343262353535396439656464336631353462623934
61383334613839393432303336613135316638613634316664653262663862356239336462643534
34323566313964346231303631313239393263623362633966613961613566316239353964333765
39633033643335393637363138326533336464323836313332383765643366313962383332306338
30656661316633366365386365636161613765333737613134333861663539313463626365363932
38366531326665316435663363356532303936313734613462616232316665356435663838306338
64346538663266613966613766376662663032633764616161303533336561646636613032383933
63396336353565343364356532363433623466396232333739666666616136636234616164363938
39316536343563343138376166653761613739366564623161633261636133613631346138613734
66313362633738313736333930666538313833666431303462643938663732323166306662306665
37623231316134323965633766376561363234666330326139663763353038323064306231646230
64306365646633363434363064623435653166666337353932623663303634366163383337306465
31383463363039306233336335626635393931623061346364633935306432373432346430366336
36626534633664356266316634363165386365366562363237636532393639343539353639386466
37633438633234393433633337303130306464356139613132623839363833633532313561323330
65353064386239383934343563386366653161623830363631653435653537653831643234363435
63613338303264626339343136346131333062366632623730376532653362386136363938343761
39363039383535663666643236363964643332363131633134316631633337666235656334343366
38656430663337313231623134333232393730343063653432383530653865633137363866646434
31386337353831383434666466326364363562316538336633366361376539316465623331643431
63353333633563646633356639633066353134623963316133333165323965653666346530656536
32303235376631343931306361363937363262343664316239363766663961653465623862643461
65626434316135373333323230616137636264306437383332623261633662623632346139633038
62393161383931626533303162363166373330653739643935653039356331346237373139633163
36353361666330346137383337383362336664633535643535666536306537626532303432393734
33656139336437346333383539383931303932353462643432643366303366666134616539633339
34313261393863366435366130373737373237343232646462373336616539316565376465366136
38343032653532383433613863346366313064383534366364393132346538663531383531323963
63353962323964333466386530316438623735356335653430376630313564626630353365363634
32353636663734653232623964663730383263366164613937663761613634303561643334323164
65336366316261626130313530393739393966623863613366656461383636653435316166366434
65663763356162646530366462636262356237643163336561376531306639626235656430376332
65353039613566633561643365396664656266633831656263643261383133373734613532316266
30386237393437646264386239613130323164343462393134653132666533306638323434336432
39336332653661363435663662373231643931313036613733666464623538616261666433643961
66613430363664363838663263313063323835386638636239316164646139613536336332336635
39663730306261303538613066643137313437393366633531373133363866306132353435376462
37663765333438656535636336633030336330383262663239313862396137666530643532636637
32643364326563386437353062303534356363383237396264306139363038326134623835373234
38336366616561643034326234346261386564656433663634613637356338363037333461303232
31653963376338386236326631643835376234356434336130393831393865383563663033663761
38623637353536393162333138376662373862636431323663623461353332363361376231393166
39633766353863323364636538623432306230653638306435336465633939663164376664383735
37366537343734363361316235313633626436376139383336313736666435656638366566323166
36663832633933346336653435393538643834393665663434326230346135306332373966646331
35383937353830316465656661663231623532636462663530346333663934333335613831323838
66613964343031646230366633656436356534633438386134313562383462646131333435346136
38373565326333376137376537303433383335306662653765656564396464336363393937663865
64393933336165306463393162643435663430343461316231656462623037333766613830323938
39643263353235363364306135623364333330633235346236313939316533353561343066653030
66666537636264346361386561633737393561316665316462633437353033623039343263363535
64396564386666373137643763396130306434306433373937316262306231643332396339326438
35623231646535306562316231366439363766653338396539336161343663623564346539643865
32323234633035643965353065386433643438393731663563363462353662356630316638363030
64656338356132616331636437636364396635613164343264643432633662343732393366653639
35666464663963396234333635303263363564393065356234363638636430623035346234333836
35353936373664636331366362336466393363633561373538623463343636346566316561653033
34396665306330333265663539393638383639303466373432383931636361346530303239333162
61623233643537386164623433636434646166633131316134356331336230646439616438333239
64386664643365633330633365646135373961386362613034353835303531643036386364646135
33613264366337383532663461663135326633643739396162383630313237623165376362346534
32363663656238616265646331393838316466663838653138353934356439616630363431386636
30623364393433313236383934353564356238316432343034313030613665363234383765303063
65326434643630646136656535646562303261623133303833373465663736646435376335643331
31653564333063646161663731316564376432663330623861306161643361636362313436643764
35643738323065626334346333616236393462363734313237393034343332316232383466343163
38363366336136623735386666653535323632363138653966646265666662326138356364613063
62383338623861393633646164393263313462633236666236383537323564383039636261626532
62396232363137663439653165666461663965613366373631653032366361626163643634666537
34656263613934303066643339343537383339393864306236353536313133346630613038346638
66383466393466346232623331303038663639333765613366343338396264343766663336643633
65653535666135656530323036666333376663316436633334356162343936636139346332316536
37343831303238393466666436396133313236363531356263326630393636303763616433666162
34306533613430386134633636663137663036366531663061303865633630633965613134646564
30613835616234353239363735366139303339326137633038616465343837666533616164386339
65373866313664653035343063656437643930316130316130666662626130373463326365396530
36313931363531636335366532373539646530623439326331616431333463393932343661653236
35373061306665313938353565626537333963613065363630646531373133613362616238653736
32656265326633623265396630393134643566636662363963393230363333666439613032363630
64323630396561353065363462396331666637306637613833313962303838653637386434613233
66353461303735386432656330646662363634643438343766646436386133316263643735623336
37376434356537313431306539643533613932323537386230373362396135636336393362336430
65326266303337393063613766313062343835333735373563626361373263353563346331663435
34353462656330326232303935383831366534373336313233663635643638326565333834363839
33346238383538303464626439316361646265393631646261343736356362616262333566366363
65383436626464363665616161636236666430393666353236333538383265313535383634366330
30363633333465336363653766333231363633626335356334336561313438336665346664356562
30356232326637616663316132356131396564363638636633623638386266336263373462383361
35663835333736336632646164356161303131633766353238666464653864366437303665343066
61373539383632613837346266393932613837383732373037316634396235373731383131623764
39383333356431613565366431653138646366633434353431313738653631313565333465363364
31316232363735383864386661303939313231663862346636643834346138653038636433373537
39343363376639626134356139363163656662343835626366353564666461636462666661656632
37373166656661653961303731396234336533316162333465626232383061656633303066663831
66653438393839366162313936623766653735613038366363366339643632313639636562316663
38373334626539343136363165626530353334623062336265346132326533336338303935616137
65393763353131613638326131633032363936313461633031326339353364363062346430316632
34313833616662396166323636623938303832353763373264373362646565356534316138303530
62326463383436356237626565346234363036373236356564643435643061663763653165626465
32366362653231663239626135366638633034643838613362393862613663336331373365303666
37666332626630343062626535366564393332323034363738653664393338396135393437383961
64333634346436326638616634623237383933666335663236633039346265383530663936613332
66316131323030363662376435343738616233396261653632636333663536303438353738313433
64666437346663623064326563333964376465373565363431323631376536313234356338643663
61343730396234393361