Compare commits

..

31 Commits

Author SHA1 Message Date
pre-commit-ci[bot]
b0dca5c982 [pre-commit.ci] pre-commit autoupdate
updates:
- [github.com/commitizen-tools/commitizen: v3.18.4 → v4.1.0](https://github.com/commitizen-tools/commitizen/compare/v3.18.4...v4.1.0)
- [github.com/pre-commit/pre-commit-hooks: v4.5.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.5.0...v5.0.0)
- [github.com/crate-ci/typos: v1.19.0 → dictgen-v0.3.1](https://github.com/crate-ci/typos/compare/v1.19.0...dictgen-v0.3.1)
- [github.com/ansible/ansible-lint: v24.2.1 → v24.12.2](https://github.com/ansible/ansible-lint/compare/v24.2.1...v24.12.2)
2025-01-06 22:20:51 +00:00
Nathaniel Landau
d9dfbb5152 fix: remove homebrew casks 2024-03-18 16:05:17 -04:00
Nathaniel Landau
6e8b39aef9 fix: favor sudoers.d over lines in /etc/sudoers 2024-03-18 16:03:44 -04:00
Nathaniel Landau
8734731355 fix: nomad handler runs correctly 2024-03-18 16:02:57 -04:00
Nathaniel Landau
9a47eb1f06 build: update dependencies 2024-03-15 21:25:01 -04:00
Nathaniel Landau
b40521919a build: update dependencies 2024-03-14 11:42:38 -04:00
Nathaniel Landau
855ff49ef1 feat: add speedtest-tracker to nomad services 2024-02-11 16:49:19 -05:00
Nathaniel Landau
12d1467369 feat: add hishtory-server to nomad services 2024-02-11 16:48:51 -05:00
Nathaniel Landau
55dea40077 fix(recyclarr): rules for sonarr v4 2024-02-09 16:26:50 -05:00
Nathaniel Landau
feb1fbedf4 fix: update job files 2024-01-09 08:53:40 -05:00
Nathaniel Landau
6b00bf557c feat: purge dangling Docker containers on job sync 2024-01-09 08:51:58 -05:00
Nathaniel Landau
ea9678eba6 build(vscode): use Ansible extension 2023-12-29 10:19:57 -05:00
Nathaniel Landau
1714dff877 feat: add gitea 2023-12-28 15:47:10 -05:00
Nathaniel Landau
7bde47d43a fix: cleanup jobs 2023-12-24 12:35:50 -05:00
Nathaniel Landau
394b34a5d1 fix: update jobs 2023-12-23 14:11:48 -05:00
Nathaniel Landau
846fb2bc31 fix: fix nomad configuration 2023-12-11 11:22:54 -05:00
Nathaniel Landau
eff9059bac feat(jobs): add jellyfin 2023-12-11 11:22:20 -05:00
Nathaniel Landau
5e35cf4400 fix: add ladders 2023-11-08 13:50:37 -05:00
Nathaniel Landau
7f94a62989 fix(sabnzbd): increase memory/cpu and add cron docker plugin 2023-10-22 16:52:40 -04:00
Nathaniel Landau
95f4c129ba build(deps): update dependencies 2023-10-21 22:24:01 -04:00
Nathaniel Landau
9a46bc9ebc feat(sabnzbd): add nomad job for sabnzbd 2023-10-21 22:19:23 -04:00
Nathaniel Landau
5b426da3ae feat: depreciate consul and use nomad service discovery 2023-10-21 22:18:23 -04:00
Nathaniel Landau
041649cc5e build(deps): bump dependencies 2023-09-15 16:34:28 -04:00
Nathaniel Landau
ce0cb6c5f1 fix(valentina): add new env vars 2023-09-03 15:50:24 -04:00
Nathaniel Landau
98d9a5a86f build(precommit): add typos checks 2023-08-30 08:34:07 -04:00
Nathaniel Landau
f7ba237d0d build(deps): update dependencies 2023-08-28 09:14:37 -04:00
Nathaniel Landau
e134616692 fix(recyclarr): migrate to v5 2023-08-28 08:49:03 -04:00
Nathaniel Landau
9194190591 build(deps): bump dependencies 2023-08-08 09:30:49 -04:00
Nathaniel Landau
2bb55f3d51 fix(valentina): update environment variables 2023-08-08 09:26:58 -04:00
Nathaniel Landau
7365e8b3d6 fix(ansible): update transfer method config 2023-08-08 09:25:55 -04:00
Nathaniel Landau
87c2a4e1b4 fix(consul): pull image from hashicorp/consul 2023-08-08 09:25:04 -04:00
71 changed files with 3631 additions and 3055 deletions

View File

@@ -12,21 +12,21 @@ exclude_paths:
- vault.yml
- .venv/
- ansible_collections/
skip_list:
- name[template]
- ignore-errors
- meta-incorrect
- meta-no-info
- package-latest
- role-name
- unnamed-task
- var-naming
- latest[git]
- yaml[indentation]
# - name[template]
# - ignore-errors
# - meta-incorrect
# - meta-no-info
# - package-latest
# - role-name
# - unnamed-task
# - var-naming
# - latest[git]
warn_list:
- experimental
- risky-file-permissions
- command-instead-of-module
- no-changed-when
- command-instead-of-shell
# warn_list:
# - experimental
# - risky-file-permissions
# - command-instead-of-module
# - no-changed-when
# - command-instead-of-shell

View File

@@ -1,12 +1,12 @@
---
repos:
- repo: "https://github.com/commitizen-tools/commitizen"
rev: 3.2.2
rev: v4.1.0
hooks:
- id: "commitizen"
- repo: "https://github.com/pre-commit/pre-commit-hooks"
rev: v4.4.0
rev: v5.0.0
hooks:
- id: check-added-large-files
- id: check-ast
@@ -31,7 +31,7 @@ repos:
args: [--markdown-linebreak-ext=md]
- repo: "https://github.com/adrienverge/yamllint.git"
rev: v1.31.0
rev: v1.35.1
hooks:
- id: yamllint
files: \.(yaml|yml)$
@@ -43,6 +43,21 @@ repos:
)\.(yaml|yml)$
entry: yamllint --strict --config-file .yamllint.yml
- repo: "https://github.com/crate-ci/typos"
rev: dictgen-v0.3.1
hooks:
- id: typos
- repo: "https://github.com/ansible/ansible-lint"
rev: v24.12.2
hooks:
- id: ansible-lint
additional_dependencies:
- ansible
args:
- --config-file
- .ansible-lint.yml
- repo: local
hooks:
- id: vault-pre-commit
@@ -50,17 +65,14 @@ repos:
entry: scripts/ansible-vault-precommit.sh
language: system
# This calls a custom script. Remove if you don't need it.
- id: stopwords
name: check stopwords
entry: scripts/stopwords.sh
name: stopwords
entry: git-stopwords
# args: ["-v"]
language: system
- id: ansible-lint
name: running ansible-lint
language: system
files: \.(yaml|yml)$
pass_filenames: false
entry: ansible-lint --force-color --config-file .ansible-lint.yml
pass_filenames: true
types: [text]
- id: "lint-shellscript-templates"
name: lint shellscript templates
@@ -68,12 +80,6 @@ repos:
files: \.sh\.j2$
entry: shellcheck -x --exclude=1009,1054,1056,1072,1073,1083,2001,2148
- id: "run-shellscripts-bats-tests"
name: run bats unit tests
language: system
files: \.bats$
entry: bats -t
- id: "ansible-encryption-check"
name: Ansible Encryption Check
language: system

9
.typos.toml Normal file
View File

@@ -0,0 +1,9 @@
[default]
default.locale = "en_us"
[default.extend-words]
Hashi = "Hashi" # Hashicorpt
hishtory = "hishtory" # Used for the hishtory package
[files]
extend-exclude = ["galaxy-roles/"]

11
.vscode/settings.json vendored
View File

@@ -1,5 +1,10 @@
{
"yaml.schemas": {
"https://raw.githubusercontent.com/ansible-community/schemas/main/f/ansible.json#/$defs/playbook": "file:///Users/natelandau/repos/ansible-homelab-config/main.yml"
}
"files.associations": {
"**/tasks/*.yml": "ansible",
"**/handlers/*.yml": "ansible",
"main.yml": "ansible",
"inventory.yml": "ansible",
"default_variables.yml": "ansible",
"vault.yml": "ansible"
}
}

View File

@@ -41,14 +41,13 @@ This playbook adds storage, services, applications, and configurations to a prev
- Custom shell scripts for backups and house keeping
* **Syncs Nomad and Docker Compose job files** to servers:
- [ASN-to-IP](https://hub.docker.com/r/ddimick/asn-to-ip) - Used by Opnsense to build firewall aliases
- [Authelia](https://www.authelia.com/) - Open-source full-featured authentication server
- [Changedetection.io](https://github.com/dgtlmoon/changedetection.io) - Website change detection monitoring and notification service
- [Diun](https://crazymax.dev/diun/) - Docker Image Update Notifier is a CLI application
- [FreshRSS](https://freshrss.org/) - A containerized RSS reader
- [Gitea](https://about.gitea.com/) - Slef-hodted Git service
- [Grafana](https://grafana.com/) - Operational dashboards
- [Grafana Loki](https://grafana.com/oss/loki/) - Log aggregation system
- [Headless Trunk](https://github.com/alpeware/chrome-headless-trunk) - Headless Chromium
- [iCloud Drive Docker](https://github.com/mandarons/icloud-drive-docker) - Backup files and photos from Apple iCloud
- [InfluxDB](https://www.influxdata.com/) - Time series database
- [Lidarr](https://lidarr.audio/) - Music collection manager

View File

@@ -1,7 +1,7 @@
[defaults]
nocows = True
roles_path = ./galaxy-roles:./roles
collections_paths = ./
collections_path = ./
inventory = ./inventory.yml
stdout_callback = yaml
any_errors_fatal = True
@@ -9,4 +9,4 @@ display_skipped_hosts = False
vault_password_file = ./.password_file
[ssh_connection]
scp_if_ssh = True
transfer_method = smart

View File

@@ -1,18 +1,22 @@
# yamllint disable rule:indentation
---
# ---------------------------------- SOFTWARE VERSIONS
authelia_version: 4.37.5
consul_version: 1.15.1
influxdb_version: 1.8.10
nomad_version: 1.5.6
prometheus_verssion: 2.42.0
backup_mongodb_version: 1.1.0
consul_version: 1.16.1
gitea_version: 1.21.6
influxdb_version: 1.11.1
nomad_version: 1.7.6
prometheus_verssion: 2.46.0
recyclarr_version: 6.0.2
speedtest_cli_version: 1.2.0
tdarr_installer_version: 2.00.13
telegraf_version: 1.25.3
traefik_version: "v2.10.1"
valentina_version: 0.3.2
telegraf_version: 1.29.5
traefik_version: 2.10.7
valentina_version: 2.2.1
sabnzbd_version: 4.2.2
# ---------------------------------- SERVICE STATIC PORT MAPPINGS
authelia_port: "9091"
influxdb_port: "8086"
tdarr_node_port: "8267"
tdarr_server_port: "8266"
@@ -21,6 +25,7 @@ tdarr_webui_port: "8265"
# ---------------------------------- DIRECTORIES FOR SERVICE LOCAL STORAGE
# These folders must be created, even if empty, to allow mounting nomad local storage end-points
service_localfs_dirs:
- gitea
- influxdb
- lidarr
- prowlarr
@@ -33,12 +38,27 @@ rpi_usb_drive_mount_point: /mnt/usbDrive
rpi_localfs_service_storage: "{{ rpi_usb_drive_mount_point }}/docker"
rpi_nfs_mount_point: /mnt
rpi_nfs_mounts_list:
- { local: "{{ rpi_nfs_mount_point }}/pi-cluster", src: "10.0.30.6:/volume1/pi-cluster" }
- { local: "{{ rpi_nfs_mount_point }}/syncthing", src: "10.0.30.6:/volume1/syncthing" }
- { local: "{{ rpi_nfs_mount_point }}/media", src: "10.0.30.6:/volume1/media" }
- { local: "{{ rpi_nfs_mount_point }}/nate", src: "10.0.30.6:/volume1/nate" }
- {
local: "{{ rpi_nfs_mount_point }}/pi-cluster",
src: "10.0.30.6:/volume1/pi-cluster",
}
- {
local: "{{ rpi_nfs_mount_point }}/syncthing",
src: "10.0.30.6:/volume1/syncthing",
}
- {
local: "{{ rpi_nfs_mount_point }}/media",
src: "10.0.30.6:/volume1/media",
}
- {
local: "{{ rpi_nfs_mount_point }}/nate",
src: "10.0.30.6:/volume1/nate",
}
rpi_nfs_mounts_remove:
- { local: "{{ rpi_nfs_mount_point }}/downloads", src: "10.0.30.6:/volume1/downloads" }
- {
local: "{{ rpi_nfs_mount_point }}/downloads",
src: "10.0.30.6:/volume1/downloads",
}
# mac_autofs_type is one of 'smb,nfs,afp'
mac_autofs_type: smb
@@ -46,18 +66,36 @@ mac_localfs_service_storage: "/Users/{{ ansible_user }}/Library/docker"
mac_storage_mount_point: /System/Volumes/Data/mnt
mac_keep_alive_file: "{{ mac_storage_mount_point }}/pi-cluster/keepalive.txt"
mac_nfs_mounts_list:
- { local: "{{ mac_storage_mount_point }}/pi-cluster", src: "10.0.0.6:/volume1/pi-cluster" }
- { local: "{{ mac_storage_mount_point }}/syncthing", src: "10.0.0.6:/volume1/syncthing" }
- { local: "{{ mac_storage_mount_point }}/media", src: "10.0.0.6:/volume1/media" }
- { local: "{{ mac_storage_mount_point }}/nate", src: "10.0.0.6:/volume1/nate" }
- {
local: "{{ mac_storage_mount_point }}/pi-cluster",
src: "10.0.0.6:/volume1/pi-cluster",
}
- {
local: "{{ mac_storage_mount_point }}/syncthing",
src: "10.0.0.6:/volume1/syncthing",
}
- {
local: "{{ mac_storage_mount_point }}/media",
src: "10.0.0.6:/volume1/media",
}
- {
local: "{{ mac_storage_mount_point }}/nate",
src: "10.0.0.6:/volume1/nate",
}
# Add mounts to remove from auto_nfs to the dict below if needed
mac_nfs_mounts_remove:
# - { local: "{{ mac_storage_mount_point }}/pi-cluster", src: "10.0.0.6:/volume1/pi-cluster" }
mac_afp_or_smb_mounts_list:
- { local: "{{ mac_storage_mount_point }}/pi-cluster", src: "10.0.0.6:/pi-cluster" }
- { local: "{{ mac_storage_mount_point }}/syncthing", src: "10.0.0.6:/syncthing" }
- {
local: "{{ mac_storage_mount_point }}/pi-cluster",
src: "10.0.0.6:/pi-cluster",
}
- {
local: "{{ mac_storage_mount_point }}/syncthing",
src: "10.0.0.6:/syncthing",
}
- { local: "{{ mac_storage_mount_point }}/media", src: "10.0.0.6:/media" }
- { local: "{{ mac_storage_mount_point }}/nate", src: "10.0.0.6:/nate" }
@@ -82,47 +120,26 @@ mac_tdarr_file_location: "/Users/{{ ansible_user }}/Library/tdarr"
# ---------------------------------- PACKAGES
apt_packages_list:
- bc
- coreutils
- curl
- dnsutils
- exa
- fzf
- git
- git-extras
- htop
- iftop
- iotop
- iperf
- jq
- less
- lnav
- logrotate
- lsof
- nano
- net-tools
- nmap
- openssh-server
- p7zip-full
- python3-pip
- rsync
- shellcheck
- unzip
- wget
- yamllint
- zsh
- tailscale
homebrew_package_list:
- ansible
- ansible-lint
- bash
- bash-completion
- bashdb
- bat
- bats-core
- coreutils
- diff-so-fancy
- exa
- ffmpeg
- findutils
- fping
@@ -136,17 +153,12 @@ homebrew_package_list:
- gnutls
- gpg
- grep
- handbrake
- htop
- httpie
- iperf
- jq
- nano
- ncurses
- nmap
- openssl
- pandoc
- prettier
- readline
- shellcheck
- shfmt
@@ -154,11 +166,4 @@ homebrew_package_list:
- sqlite
- ssh-copy-id
- tealdeer
- tree
- wget
- yamllint
- zsh
homebrew_cask_install_dir: /Applications
homebrew_casks_list:
- lingon-x

View File

@@ -53,13 +53,13 @@
- name: Restart nomad (Debian)
become: true
ansible.builtin.systemd:
ansible.builtin.systemd_service:
name: nomad
enabled: true
state: restarted
register: nomad_service
failed_when: nomad_service.rc > 0
changed_when: nomad_service.rc == 0
# failed_when: nomad_service.Result != "success"
# changed_when: nomad_service.Result == "success"
when:
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
@@ -90,9 +90,11 @@
- name: "Ensure nomad is really running"
ansible.builtin.shell:
cmd: "set -o pipefail && sleep 10 && /usr/local/bin/nomad node status -self -short | grep {{ inventory_hostname }}"
args:
executable: /bin/bash
register: node_status_response
failed_when: node_status_response.rc > 0
changed_when: node_status_response.rc == 0
changed_when: false
when: "'nostart' not in ansible_run_tags"
listen: "restart nomad"
# - name: "Ensure sure Nomad service is really running"

View File

@@ -42,7 +42,7 @@ all:
pis:
hosts:
rpi1:
ansible_host: 10.0.30.91
ansible_host: "{{ rpi1_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi1"
@@ -58,7 +58,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi2:
ansible_host: 10.0.30.92
ansible_host: "{{ rpi2_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi2"
@@ -72,7 +72,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi3:
ansible_host: 10.0.30.93
ansible_host: "{{ rpi3_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi3"
@@ -86,7 +86,7 @@ all:
manage_apt_packages_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
rpi4:
ansible_host: 10.0.30.94
ansible_host: "{{ rpi4_ip_address }}"
ansible_user: "{{ pi_username }}"
ansible_become_pass: "{{ pi_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/rpi4"
@@ -102,11 +102,11 @@ all:
macs:
hosts:
macmini:
ansible_host: 10.0.0.4
ansible_host: "{{ macmini_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/macMini"
ansible_python_interpreter: "/usr/local/bin/python3"
ansible_python_interpreter: "/Users/natelandau/.pyenv/shims/python"
ansible_port: 22
mac_intel: true
is_nomad_client: true
@@ -117,7 +117,7 @@ all:
manage_homebrew_package_list: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
imac:
ansible_host: 10.0.0.25
ansible_host: "{{ imac_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/imac"
@@ -129,7 +129,7 @@ all:
is_shared_storage_client: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
skimmbook:
ansible_host: 10.0.0.21
ansible_host: "{{ skimmbook_ip_address }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/skimmbook"
@@ -140,22 +140,11 @@ all:
is_tdarr_node: true
is_shared_storage_client: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
vpnmac:
ansible_host: 10.0.90.2
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ mac_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/skimmbook"
ansible_python_interpreter: "/Users/natelandau/.pyenv/shims/python"
ansible_port: 22
mac_arm: true
manage_homebrew_package_list: true
is_tdarr_node: true
ansible_ssh_extra_args: "-o IdentitiesOnly=yes"
nas:
hosts:
synology:
ansible_host: 10.0.0.6
synology_second_ip: 10.0.30.6
ansible_host: "{{ synology_ip_address_1 }}"
synology_second_ip: "{{ synology_ip_address_2 }}"
ansible_user: "{{ my_username }}"
ansible_become_pass: "{{ synology_become_pass }}"
ansible_ssh_private_key_file: "{{ ssh_key_location }}/synology"

View File

@@ -1,6 +1,6 @@
---
- hosts: all
name: "Running playbook"
- name: "Running playbook"
hosts: all
serial: 1
vars_files:
@@ -76,4 +76,5 @@
when: is_tdarr_server or is_tdarr_node
handlers:
- ansible.builtin.import_tasks: handlers/main.yml
- name: "Run handlers"
ansible.builtin.import_tasks: handlers/main.yml

1279
poetry.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -7,31 +7,26 @@
version = "0.2.0"
[tool.poetry.dependencies]
ansible = "^7.5.0"
ansible-lint = { version = "^6.12.1", markers = "platform_system != 'Windows'" }
commitizen = "^2.40.0"
poethepoet = "^0.18.1"
pre-commit = "^3.0.4"
python = "^3.9"
yamllint = "^1.29.0"
[tool.poetry.group.dev.dependencies]
black = "^23.3.0"
sh = "^2.0.4"
ansible = "^9.3.0"
ansible-lint = { version = "^24.2.1", markers = "platform_system != 'Windows'" }
commitizen = "^3.18.3"
jmespath = "^1.0.1"
poethepoet = "^0.25.0"
pre-commit = "^3.6.2"
python = "^3.11"
typos = "^1.19.0"
yamllint = "^1.35.1"
[build-system]
build-backend = "poetry.core.masonry.api"
requires = ["poetry-core"]
[tool.black]
line-length = 100
[tool.commitizen]
bump_message = "bump(release): v$current_version → v$new_version"
tag_format = "v$version"
update_changelog_on_bump = true
version = "0.2.0"
version_files = ["pyproject.toml:version"]
version_provider = "poetry"
[tool.poe.tasks]
pb = """

View File

@@ -1,821 +0,0 @@
#!/usr/bin/env bash
# shellcheck disable=SC2317
_mainScript_() {
_customStopWords_() {
# DESC: Check if any specified stop words are in the commit diff. If found, the pre-commit hook will exit with a non-zero exit code.
# ARGS:
# $1 (Required): Path to file
# OUTS:
# 0: Success
# 1: Failure
# USAGE:
# _customStopWords_ "/path/to/file.sh"
# NOTE:
# Requires a plaintext stopword file located at
# `~/.git_stop_words` containing one stopword per line.
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local _gitDiffTmp
local FILE_TO_CHECK="${1}"
_gitDiffTmp="${TMP_DIR}/${RANDOM}.${RANDOM}.${RANDOM}.diff.txt"
if [ -f "${STOP_WORD_FILE}" ]; then
if [[ $(basename "${STOP_WORD_FILE}") == "$(basename "${FILE_TO_CHECK}")" ]]; then
debug "$(basename "${1}"): Don't check stop words file for stop words."
return 0
fi
debug "$(basename "${FILE_TO_CHECK}"): Checking for stop words..."
# remove blank lines from stopwords file
sed '/^$/d' "${STOP_WORD_FILE}" >"${TMP_DIR}/pattern_file.txt"
# Check for stopwords
if git diff --cached -- "${FILE_TO_CHECK}" | grep i -q "new file mode"; then
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${FILE_TO_CHECK}"; then
return 1
else
return 0
fi
else
# Add diff to a temporary file
git diff --cached -- "${FILE_TO_CHECK}" | grep '^+' >"${_gitDiffTmp}"
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${_gitDiffTmp}"; then
return 1
else
return 0
fi
fi
else
notice "Could not find git stopwords file expected at '${STOP_WORD_FILE}'. Continuing..."
return 0
fi
}
# Don;t lint binary files
if [[ ${ARGS[0]} =~ \.(jpg|jpeg|gif|png|exe|zip|gzip|tiff|tar|dmg|ttf|otf|m4a|mp3|mkv|mov|avi|eot|svg|woff2?|aac|wav|flac|pdf|doc|xls|ppt|7z|bin|dmg|dat|sql|ico|mpe?g)$ ]]; then
_safeExit_ 0
fi
if ! _customStopWords_ "${ARGS[0]}"; then
error "Stop words found in ${ARGS[0]}"
_safeExit_ 1
fi
}
# end _mainScript_
# ################################## Flags and defaults
# Required variables
LOGFILE="${HOME}/logs/$(basename "$0").log"
QUIET=false
LOGLEVEL=ERROR
VERBOSE=false
FORCE=false
DRYRUN=false
declare -a ARGS=()
# Script specific
LOGLEVEL=NONE
STOP_WORD_FILE="${HOME}/.git_stop_words"
shopt -s nocasematch
# ################################## Custom utility functions (Pasted from repository)
# ################################## Functions required for this template to work
_setColors_() {
# DESC:
# Sets colors use for alerts.
# ARGS:
# None
# OUTS:
# None
# USAGE:
# printf "%s\n" "${blue}Some text${reset}"
if tput setaf 1 >/dev/null 2>&1; then
bold=$(tput bold)
underline=$(tput smul)
reverse=$(tput rev)
reset=$(tput sgr0)
if [[ $(tput colors) -ge 256 ]] >/dev/null 2>&1; then
white=$(tput setaf 231)
blue=$(tput setaf 38)
yellow=$(tput setaf 11)
green=$(tput setaf 82)
red=$(tput setaf 9)
purple=$(tput setaf 171)
gray=$(tput setaf 250)
else
white=$(tput setaf 7)
blue=$(tput setaf 38)
yellow=$(tput setaf 3)
green=$(tput setaf 2)
red=$(tput setaf 9)
purple=$(tput setaf 13)
gray=$(tput setaf 7)
fi
else
bold="\033[4;37m"
reset="\033[0m"
underline="\033[4;37m"
# shellcheck disable=SC2034
reverse=""
white="\033[0;37m"
blue="\033[0;34m"
yellow="\033[0;33m"
green="\033[1;32m"
red="\033[0;31m"
purple="\033[0;35m"
gray="\033[0;37m"
fi
}
_alert_() {
# DESC:
# Controls all printing of messages to log files and stdout.
# ARGS:
# $1 (required) - The type of alert to print
# (success, header, notice, dryrun, debug, warning, error,
# fatal, info, input)
# $2 (required) - The message to be printed to stdout and/or a log file
# $3 (optional) - Pass '${LINENO}' to print the line number where the _alert_ was triggered
# OUTS:
# stdout: The message is printed to stdout
# log file: The message is printed to a log file
# USAGE:
# [_alertType] "[MESSAGE]" "${LINENO}"
# NOTES:
# - The colors of each alert type are set in this function
# - For specified alert types, the funcstac will be printed
local _color
local _alertType="${1}"
local _message="${2}"
local _line="${3-}" # Optional line number
[[ $# -lt 2 ]] && fatal 'Missing required argument to _alert_'
if [[ -n ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line}) $(_printFuncStack_)"
elif [[ -n ${_line} && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}(line: ${_line})"
elif [[ -z ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
_message="${_message} ${gray}$(_printFuncStack_)"
fi
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_color="${bold}${red}"
elif [ "${_alertType}" == "info" ]; then
_color="${gray}"
elif [ "${_alertType}" == "warning" ]; then
_color="${red}"
elif [ "${_alertType}" == "success" ]; then
_color="${green}"
elif [ "${_alertType}" == "debug" ]; then
_color="${purple}"
elif [ "${_alertType}" == "header" ]; then
_color="${bold}${white}${underline}"
elif [ "${_alertType}" == "notice" ]; then
_color="${bold}"
elif [ "${_alertType}" == "input" ]; then
_color="${bold}${underline}"
elif [ "${_alertType}" = "dryrun" ]; then
_color="${blue}"
else
_color=""
fi
_writeToScreen_() {
("${QUIET}") && return 0 # Print to console when script is not 'quiet'
[[ ${VERBOSE} == false && ${_alertType} =~ ^(debug|verbose) ]] && return 0
if ! [[ -t 1 || -z ${TERM-} ]]; then # Don't use colors on non-recognized terminals
_color=""
reset=""
fi
if [[ ${_alertType} == header ]]; then
printf "${_color}%s${reset}\n" "${_message}"
else
printf "${_color}[%7s] %s${reset}\n" "${_alertType}" "${_message}"
fi
}
_writeToScreen_
_writeToLog_() {
[[ ${_alertType} == "input" ]] && return 0
[[ ${LOGLEVEL} =~ (off|OFF|Off) ]] && return 0
if [ -z "${LOGFILE-}" ]; then
LOGFILE="$(pwd)/$(basename "$0").log"
fi
[ ! -d "$(dirname "${LOGFILE}")" ] && mkdir -p "$(dirname "${LOGFILE}")"
[[ ! -f ${LOGFILE} ]] && touch "${LOGFILE}"
# Don't use colors in logs
local _cleanmessage
_cleanmessage="$(printf "%s" "${_message}" | sed -E 's/(\x1b)?\[(([0-9]{1,2})(;[0-9]{1,3}){0,2})?[mGK]//g')"
# Print message to log file
printf "%s [%7s] %s %s\n" "$(date +"%b %d %R:%S")" "${_alertType}" "[$(/bin/hostname)]" "${_cleanmessage}" >>"${LOGFILE}"
}
# Write specified log level data to logfile
case "${LOGLEVEL:-ERROR}" in
ALL | all | All)
_writeToLog_
;;
DEBUG | debug | Debug)
_writeToLog_
;;
INFO | info | Info)
if [[ ${_alertType} =~ ^(error|fatal|warning|info|notice|success) ]]; then
_writeToLog_
fi
;;
NOTICE | notice | Notice)
if [[ ${_alertType} =~ ^(error|fatal|warning|notice|success) ]]; then
_writeToLog_
fi
;;
WARN | warn | Warn)
if [[ ${_alertType} =~ ^(error|fatal|warning) ]]; then
_writeToLog_
fi
;;
ERROR | error | Error)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
FATAL | fatal | Fatal)
if [[ ${_alertType} =~ ^fatal ]]; then
_writeToLog_
fi
;;
OFF | off)
return 0
;;
*)
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
_writeToLog_
fi
;;
esac
} # /_alert_
error() { _alert_ error "${1}" "${2-}"; }
warning() { _alert_ warning "${1}" "${2-}"; }
notice() { _alert_ notice "${1}" "${2-}"; }
info() { _alert_ info "${1}" "${2-}"; }
success() { _alert_ success "${1}" "${2-}"; }
dryrun() { _alert_ dryrun "${1}" "${2-}"; }
input() { _alert_ input "${1}" "${2-}"; }
header() { _alert_ header "${1}" "${2-}"; }
debug() { _alert_ debug "${1}" "${2-}"; }
fatal() {
_alert_ fatal "${1}" "${2-}"
_safeExit_ "1"
}
_printFuncStack_() {
# DESC:
# Prints the function stack in use. Used for debugging, and error reporting.
# ARGS:
# None
# OUTS:
# stdout: Prints [function]:[file]:[line]
# NOTE:
# Does not print functions from the alert class
local _i
declare -a _funcStackResponse=()
for ((_i = 1; _i < ${#BASH_SOURCE[@]}; _i++)); do
case "${FUNCNAME[${_i}]}" in
_alert_ | _trapCleanup_ | fatal | error | warning | notice | info | debug | dryrun | header | success)
continue
;;
*)
_funcStackResponse+=("${FUNCNAME[${_i}]}:$(basename "${BASH_SOURCE[${_i}]}"):${BASH_LINENO[_i - 1]}")
;;
esac
done
printf "( "
printf %s "${_funcStackResponse[0]}"
printf ' < %s' "${_funcStackResponse[@]:1}"
printf ' )\n'
}
_safeExit_() {
# DESC:
# Cleanup and exit from a script
# ARGS:
# $1 (optional) - Exit code (defaults to 0)
# OUTS:
# None
if [[ -d ${SCRIPT_LOCK-} ]]; then
if command rm -rf "${SCRIPT_LOCK}"; then
debug "Removing script lock"
else
warning "Script lock could not be removed. Try manually deleting ${yellow}'${SCRIPT_LOCK}'"
fi
fi
if [[ -n ${TMP_DIR-} && -d ${TMP_DIR-} ]]; then
if [[ ${1-} == 1 && -n "$(ls "${TMP_DIR}")" ]]; then
command rm -r "${TMP_DIR}"
else
command rm -r "${TMP_DIR}"
debug "Removing temp directory"
fi
fi
trap - INT TERM EXIT
exit "${1:-0}"
}
_trapCleanup_() {
# DESC:
# Log errors and cleanup from script when an error is trapped. Called by 'trap'
# ARGS:
# $1: Line number where error was trapped
# $2: Line number in function
# $3: Command executing at the time of the trap
# $4: Names of all shell functions currently in the execution call stack
# $5: Scriptname
# $6: $BASH_SOURCE
# USAGE:
# trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM ERR
# OUTS:
# Exits script with error code 1
local _line=${1-} # LINENO
local _linecallfunc=${2-}
local _command="${3-}"
local _funcstack="${4-}"
local _script="${5-}"
local _sourced="${6-}"
# Replace the cursor in-case 'tput civis' has been used
tput cnorm
if declare -f "fatal" &>/dev/null && declare -f "_printFuncStack_" &>/dev/null; then
_funcstack="'$(printf "%s" "${_funcstack}" | sed -E 's/ / < /g')'"
if [[ ${_script##*/} == "${_sourced##*/}" ]]; then
fatal "${7-} command: '${_command}' (line: ${_line}) [func: $(_printFuncStack_)]"
else
fatal "${7-} command: '${_command}' (func: ${_funcstack} called at line ${_linecallfunc} of '${_script##*/}') (line: ${_line} of '${_sourced##*/}') "
fi
else
printf "%s\n" "Fatal error trapped. Exiting..."
fi
if declare -f _safeExit_ &>/dev/null; then
_safeExit_ 1
else
exit 1
fi
}
_makeTempDir_() {
# DESC:
# Creates a temp directory to house temporary files
# ARGS:
# $1 (Optional) - First characters/word of directory name
# OUTS:
# Sets $TMP_DIR variable to the path of the temp directory
# USAGE:
# _makeTempDir_ "$(basename "$0")"
[ -d "${TMP_DIR-}" ] && return 0
if [ -n "${1-}" ]; then
TMP_DIR="${TMPDIR:-/tmp/}${1}.${RANDOM}.${RANDOM}.$$"
else
TMP_DIR="${TMPDIR:-/tmp/}$(basename "$0").${RANDOM}.${RANDOM}.${RANDOM}.$$"
fi
(umask 077 && mkdir "${TMP_DIR}") || {
fatal "Could not create temporary directory! Exiting."
}
debug "\$TMP_DIR=${TMP_DIR}"
}
# shellcheck disable=SC2120
_acquireScriptLock_() {
# DESC:
# Acquire script lock to prevent running the same script a second time before the
# first instance exits
# ARGS:
# $1 (optional) - Scope of script execution lock (system or user)
# OUTS:
# exports $SCRIPT_LOCK - Path to the directory indicating we have the script lock
# Exits script if lock cannot be acquired
# NOTE:
# If the lock was acquired it's automatically released in _safeExit_()
local _lockDir
if [[ ${1-} == 'system' ]]; then
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").lock"
else
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").${UID}.lock"
fi
if command mkdir "${_lockDir}" 2>/dev/null; then
readonly SCRIPT_LOCK="${_lockDir}"
debug "Acquired script lock: ${yellow}${SCRIPT_LOCK}${purple}"
else
if declare -f "_safeExit_" &>/dev/null; then
error "Unable to acquire script lock: ${yellow}${_lockDir}${red}"
fatal "If you trust the script isn't running, delete the lock dir"
else
printf "%s\n" "ERROR: Could not acquire script lock. If you trust the script isn't running, delete: ${_lockDir}"
exit 1
fi
fi
}
_setPATH_() {
# DESC:
# Add directories to $PATH so script can find executables
# ARGS:
# $@ - One or more paths
# OPTS:
# -x - Fail if directories are not found
# OUTS:
# 0: Success
# 1: Failure
# Adds items to $PATH
# USAGE:
# _setPATH_ "/usr/local/bin" "${HOME}/bin" "$(npm bin)"
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _failIfNotFound=false
while getopts ":xX" opt; do
case ${opt} in
x | X) _failIfNotFound=true ;;
*)
{
error "Unrecognized option '${1}' passed to _backupFile_" "${LINENO}"
return 1
}
;;
esac
done
shift $((OPTIND - 1))
local _newPath
for _newPath in "$@"; do
if [ -d "${_newPath}" ]; then
if ! printf "%s" "${PATH}" | grep -Eq "(^|:)${_newPath}($|:)"; then
if PATH="${_newPath}:${PATH}"; then
debug "Added '${_newPath}' to PATH"
else
debug "'${_newPath}' already in PATH"
fi
else
debug "_setPATH_: '${_newPath}' already exists in PATH"
fi
else
debug "_setPATH_: can not find: ${_newPath}"
if [[ ${_failIfNotFound} == true ]]; then
return 1
fi
continue
fi
done
return 0
}
_useGNUutils_() {
# DESC:
# Add GNU utilities to PATH to allow consistent use of sed/grep/tar/etc. on MacOS
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds GNU utilities to the path
# USAGE:
# # if ! _useGNUUtils_; then exit 1; fi
# NOTES:
# GNU utilities can be added to MacOS using Homebrew
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _setPATH_ \
"/usr/local/opt/gnu-tar/libexec/gnubin" \
"/usr/local/opt/coreutils/libexec/gnubin" \
"/usr/local/opt/gnu-sed/libexec/gnubin" \
"/usr/local/opt/grep/libexec/gnubin" \
"/usr/local/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/findutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-sed/libexec/gnubin" \
"/opt/homebrew/opt/grep/libexec/gnubin" \
"/opt/homebrew/opt/coreutils/libexec/gnubin" \
"/opt/homebrew/opt/gnu-tar/libexec/gnubin"; then
return 0
else
return 1
fi
}
_homebrewPath_() {
# DESC:
# Add homebrew bin dir to PATH
# ARGS:
# None
# OUTS:
# 0 if successful
# 1 if unsuccessful
# PATH: Adds homebrew bin directory to PATH
# USAGE:
# # if ! _homebrewPath_; then exit 1; fi
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
if _uname=$(command -v uname); then
if "${_uname}" | tr '[:upper:]' '[:lower:]' | grep -q 'darwin'; then
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
else
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
return 0
else
return 1
fi
fi
}
_parseOptions_() {
# DESC:
# Iterates through options passed to script and sets variables. Will break -ab into -a -b
# when needed and --foo=bar into --foo bar
# ARGS:
# $@ from command line
# OUTS:
# Sets array 'ARGS' containing all arguments passed to script that were not parsed as options
# USAGE:
# _parseOptions_ "$@"
# Iterate over options
local _optstring=h
declare -a _options
local _c
local i
while (($#)); do
case $1 in
# If option is of type -ab
-[!-]?*)
# Loop over each character starting with the second
for ((i = 1; i < ${#1}; i++)); do
_c=${1:i:1}
_options+=("-${_c}") # Add current char to options
# If option takes a required argument, and it's not the last char make
# the rest of the string its argument
if [[ ${_optstring} == *"${_c}:"* && -n ${1:i+1} ]]; then
_options+=("${1:i+1}")
break
fi
done
;;
# If option is of type --foo=bar
--?*=*) _options+=("${1%%=*}" "${1#*=}") ;;
# add --endopts for --
--) _options+=(--endopts) ;;
# Otherwise, nothing special
*) _options+=("$1") ;;
esac
shift
done
set -- "${_options[@]-}"
unset _options
# Read the options and set stuff
# shellcheck disable=SC2034
while [[ ${1-} == -?* ]]; do
case $1 in
# Custom options
# Common options
-h | --help)
_usage_
_safeExit_
;;
--loglevel)
shift
LOGLEVEL=${1}
;;
--logfile)
shift
LOGFILE="${1}"
;;
-n | --dryrun) DRYRUN=true ;;
-v | --verbose) VERBOSE=true ;;
-q | --quiet) QUIET=true ;;
--force) FORCE=true ;;
--endopts)
shift
break
;;
*)
if declare -f _safeExit_ &>/dev/null; then
fatal "invalid option: $1"
else
printf "%s\n" "ERROR: Invalid option: $1"
exit 1
fi
;;
esac
shift
done
if [[ -z ${*} || ${*} == null ]]; then
ARGS=()
else
ARGS+=("$@") # Store the remaining user input as arguments.
fi
}
_columns_() {
# DESC:
# Prints a two column output from a key/value pair.
# Optionally pass a number of 2 space tabs to indent the output.
# ARGS:
# $1 (required): Key name (Left column text)
# $2 (required): Long value (Right column text. Wraps around if too long)
# $3 (optional): Number of 2 character tabs to indent the command (default 1)
# OPTS:
# -b Bold the left column
# -u Underline the left column
# -r Reverse background and foreground colors
# OUTS:
# stdout: Prints the output in columns
# NOTE:
# Long text or ANSI colors in the first column may create display issues
# USAGE:
# _columns_ "Key" "Long value text" [tab level]
[[ $# -lt 2 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
local opt
local OPTIND=1
local _style=""
while getopts ":bBuUrR" opt; do
case ${opt} in
b | B) _style="${_style}${bold}" ;;
u | U) _style="${_style}${underline}" ;;
r | R) _style="${_style}${reverse}" ;;
*) fatal "Unrecognized option '${1}' passed to ${FUNCNAME[0]}. Exiting." ;;
esac
done
shift $((OPTIND - 1))
local _key="${1}"
local _value="${2}"
local _tabLevel="${3-}"
local _tabSize=2
local _line
local _rightIndent
local _leftIndent
if [[ -z ${3-} ]]; then
_tabLevel=0
fi
_leftIndent="$((_tabLevel * _tabSize))"
local _leftColumnWidth="$((30 + _leftIndent))"
if [ "$(tput cols)" -gt 180 ]; then
_rightIndent=110
elif [ "$(tput cols)" -gt 160 ]; then
_rightIndent=90
elif [ "$(tput cols)" -gt 130 ]; then
_rightIndent=60
elif [ "$(tput cols)" -gt 120 ]; then
_rightIndent=50
elif [ "$(tput cols)" -gt 110 ]; then
_rightIndent=40
elif [ "$(tput cols)" -gt 100 ]; then
_rightIndent=30
elif [ "$(tput cols)" -gt 90 ]; then
_rightIndent=20
elif [ "$(tput cols)" -gt 80 ]; then
_rightIndent=10
else
_rightIndent=0
fi
local _rightWrapLength=$(($(tput cols) - _leftColumnWidth - _leftIndent - _rightIndent))
local _first_line=0
while read -r _line; do
if [[ ${_first_line} -eq 0 ]]; then
_first_line=1
else
_key=" "
fi
printf "%-${_leftIndent}s${_style}%-${_leftColumnWidth}b${reset} %b\n" "" "${_key}${reset}" "${_line}"
done <<<"$(fold -w${_rightWrapLength} -s <<<"${_value}")"
}
_usage_() {
cat <<USAGE_TEXT
${bold}$(basename "$0") [OPTION]... [FILE]...${reset}
Custom pre-commit hook script. This script is intended to be used as part of the pre-commit pipeline managed within .pre-commit-config.yaml.
${bold}${underline}Options:${reset}
$(_columns_ -b -- '-h, --help' "Display this help and exit" 2)
$(_columns_ -b -- "--loglevel [LEVEL]" "One of: FATAL, ERROR (default), WARN, INFO, NOTICE, DEBUG, ALL, OFF" 2)
$(_columns_ -b -- "--logfile [FILE]" "Full PATH to logfile. (Default is '\${HOME}/logs/$(basename "$0").log')" 2)
$(_columns_ -b -- "-n, --dryrun" "Non-destructive. Makes no permanent changes." 2)
$(_columns_ -b -- "-q, --quiet" "Quiet (no output)" 2)
$(_columns_ -b -- "-v, --verbose" "Output more information. (Items echoed to 'verbose')" 2)
$(_columns_ -b -- "--force" "Skip all user interaction. Implied 'Yes' to all actions." 2)
${bold}${underline}Example Usage:${reset}
${gray}# Run the script and specify log level and log file.${reset}
$(basename "$0") -vn --logfile "/path/to/file.log" --loglevel 'WARN'
USAGE_TEXT
}
# ################################## INITIALIZE AND RUN THE SCRIPT
# (Comment or uncomment the lines below to customize script behavior)
trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM
# Trap errors in subshells and functions
set -o errtrace
# Exit on error. Append '||true' if you expect an error
set -o errexit
# Use last non-zero exit code in a pipeline
set -o pipefail
# Confirm we have BASH greater than v4
[ "${BASH_VERSINFO:-0}" -ge 4 ] || {
printf "%s\n" "ERROR: BASH_VERSINFO is '${BASH_VERSINFO:-0}'. This script requires BASH v4 or greater."
exit 1
}
# Make `for f in *.txt` work when `*.txt` matches zero files
shopt -s nullglob globstar
# Set IFS to preferred implementation
IFS=$' \n\t'
# Run in debug mode
# set -o xtrace
# Initialize color constants
_setColors_
# Disallow expansion of unset variables
set -o nounset
# Force arguments when invoking the script
# [[ $# -eq 0 ]] && _parseOptions_ "-h"
# Parse arguments passed to script
_parseOptions_ "$@"
# Create a temp directory '$TMP_DIR'
_makeTempDir_ "$(basename "$0")"
# Acquire script lock
# _acquireScriptLock_
# Add Homebrew bin directory to PATH (MacOS)
# _homebrewPath_
# Source GNU utilities from Homebrew (MacOS)
# _useGNUutils_
# Run the main logic script
_mainScript_
# Exit cleanly
_safeExit_

View File

@@ -1,150 +0,0 @@
#!/usr/bin/env python
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
from pathlib import Path
from textwrap import wrap
try:
import tomllib
except ModuleNotFoundError: # pragma: no cover
import tomli as tomllib # type: ignore [no-redef]
import sh
from rich.console import Console
console = Console()
def dryrun(msg: str) -> None:
"""Print a message if the dry run flag is set.
Args:
msg: Message to print
"""
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
def success(msg: str) -> None:
"""Print a success message without using logging.
Args:
msg: Message to print
"""
console.print(f"[green]SUCCESS | {msg}[/green]")
def warning(msg: str) -> None:
"""Print a warning message without using logging.
Args:
msg: Message to print
"""
console.print(f"[yellow]WARNING | {msg}[/yellow]")
def error(msg: str) -> None:
"""Print an error message without using logging.
Args:
msg: Message to print
"""
console.print(f"[red]ERROR | {msg}[/red]")
def notice(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"[bold]NOTICE | {msg}[/bold]")
def info(msg: str) -> None:
"""Print a notice message without using logging.
Args:
msg: Message to print
"""
console.print(f"INFO | {msg}")
def usage(msg: str, width: int = 80) -> None:
"""Print a usage message without using logging.
Args:
msg: Message to print
width (optional): Width of the message
"""
for _n, line in enumerate(wrap(msg, width=width)):
if _n == 0:
console.print(f"[dim]USAGE | {line}")
else:
console.print(f"[dim] | {line}")
def debug(msg: str) -> None:
"""Print a debug message without using logging.
Args:
msg: Message to print
"""
console.print(f"[blue]DEBUG | {msg}[/blue]")
def dim(msg: str) -> None:
"""Print a message in dimmed color.
Args:
msg: Message to print
"""
console.print(f"[dim]{msg}[/dim]")
# Load the pyproject.toml file
pyproject = Path(__file__).parents[1] / "pyproject.toml"
if not pyproject.exists():
console.print("pyproject.toml file not found")
raise SystemExit(1)
with pyproject.open("rb") as f:
try:
data = tomllib.load(f)
except tomllib.TOMLDecodeError as e:
raise SystemExit(1) from e
# Get the latest versions of all dependencies
info("Getting latest versions of dependencies...")
packages: dict = {}
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
package, current, latest = line.split()[:3]
packages[package] = {"current_version": current, "new_version": latest}
if not packages:
success("All dependencies are up to date")
raise SystemExit(0)
dependencies = data["tool"]["poetry"]["dependencies"]
groups = data["tool"]["poetry"]["group"]
for p in dependencies:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", _fg=True)
for group in groups:
for p in groups[group]["dependencies"]:
if p in packages:
notice(
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
)
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
sh.poetry("update", _fg=True)
success("All dependencies are up to date")
raise SystemExit(0)

View File

@@ -24,24 +24,18 @@
when:
- is_nomad_client or is_nomad_server
- name: Ensure nomad user can run sudo with the restore script
- name: "SUDO: Confirm users can run service_backups"
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
path: "/etc/sudoers.d/010_{{ item }}-backups-nopasswd"
line: "{{ item }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
state: present
line: "nomad ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
validate: "/usr/sbin/visudo -cf %s"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"
- name: Ensure my user can run sudo with the restore script
become: true
ansible.builtin.lineinfile:
path: /etc/sudoers
state: present
line: "{{ ansible_user }} ALL=(ALL) NOPASSWD: /usr/local/bin/service_backups, /usr/local/bin/service_restore"
create: true
mode: "0440"
validate: "/usr/sbin/visudo -cf %s"
loop:
- nomad
- "{{ ansible_user }}"
when:
- is_nomad_client or is_nomad_server
- "'pis' in group_names"

View File

@@ -1,3 +1,4 @@
# yamllint disable rule:indentation
---
# TASK DESCRIPTION:
# Downloads, installs, and configures Hashicorp Consul.
@@ -117,7 +118,7 @@
- name: "Create Consul /opt storage and copy certificates"
block:
- name: "Create {{ consul_opt_dir }} directories"
- name: "Create {{ consul_opt_dir }} directories" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ item }}"
@@ -130,16 +131,25 @@
- "{{ consul_opt_dir }}/plugins"
- "{{ consul_opt_dir }}/certs"
- name: Copy certs to servers
- name: Copy certs to servers # noqa
become: true
ansible.builtin.copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: "certs/consul/consul-agent-ca.pem", dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem" }
- { src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem", dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem" }
- {
src: "certs/consul/consul-agent-ca.pem",
dest: "{{ consul_opt_dir }}/certs/consul-agent-ca.pem",
}
- {
src: "certs/consul/{{ datacenter_name }}-server-consul-0.pem",
dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0.pem",
}
- {
src: "certs/consul/{{ datacenter_name }}-server-consul-0-key.pem",
dest: "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem",
}
when:
- is_consul_server
@@ -163,7 +173,7 @@
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ consul_opt_dir }}"
@@ -199,7 +209,7 @@
when:
- ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}"
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }}" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ interpolated_consul_configuration_dir }}"
@@ -209,7 +219,7 @@
when:
- mac_intel or mac_arm or inventory_hostname == 'synology'
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)"
- name: "Set owner of root consul dir to {{ ansible_user_uid }}:{{ ansible_user_gid }} (synology)" # noqa: name[template]
become: true
ansible.builtin.file:
path: /volume1/docker/consul/
@@ -328,7 +338,7 @@
- ansible_os_family == 'Debian'
- "'nostart' not in ansible_run_tags"
- name: Make sure Consul service is really running
- name: Make sure Consul service is really running # noqa: command-instead-of-module
ansible.builtin.command:
cmd: systemctl is-active consul
register: is_consul_really_running

View File

@@ -4,7 +4,7 @@
#
# NOTE: This task exists due to the arillso.logrotate failing completely on macOS
- name: Add service_backups.log to logrotate
- name: Add service_backups.log to logrotate # noqa: ignore-errors
become: true
vars:
logrotate_applications:

View File

@@ -1,3 +1,4 @@
# yamllint disable rule:indentation
---
# TASK DESCRIPTION:
# Downloads, installs, and configures Hashicorp Nomad.
@@ -83,7 +84,7 @@
- name: "Create Nomad /opt storage"
block:
- name: "Create {{ nomad_opt_dir_location }} directories"
- name: "Create {{ nomad_opt_dir_location }} directories" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ item }}"
@@ -102,9 +103,18 @@
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/server.pem, dest: "{{ nomad_opt_dir_location }}/certs/server.pem" }
- { src: certs/nomad/server-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem" }
- {
src: certs/nomad/nomad-ca.pem,
dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem",
}
- {
src: certs/nomad/server.pem,
dest: "{{ nomad_opt_dir_location }}/certs/server.pem",
}
- {
src: certs/nomad/server-key.pem,
dest: "{{ nomad_opt_dir_location }}/certs/server-key.pem",
}
notify: "restart nomad"
when: is_nomad_server
@@ -115,9 +125,18 @@
dest: "{{ item.dest }}"
mode: 0755
loop:
- { src: certs/nomad/nomad-ca.pem, dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem" }
- { src: certs/nomad/client.pem, dest: "{{ nomad_opt_dir_location }}/certs/client.pem" }
- { src: certs/nomad/client-key.pem, dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem" }
- {
src: certs/nomad/nomad-ca.pem,
dest: "{{ nomad_opt_dir_location }}/certs/nomad-ca.pem",
}
- {
src: certs/nomad/client.pem,
dest: "{{ nomad_opt_dir_location }}/certs/client.pem",
}
- {
src: certs/nomad/client-key.pem,
dest: "{{ nomad_opt_dir_location }}/certs/client-key.pem",
}
notify: "restart nomad"
when: is_nomad_client
@@ -130,7 +149,7 @@
recurse: true
when: ansible_os_family == 'Debian'
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)"
- name: "Set owner of files to {{ ansible_user_uid }}:{{ ansible_user_gid }} (MacOSX)" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ nomad_opt_dir_location }}"

View File

@@ -74,3 +74,15 @@
dest: "{{ docker_compose_file_location }}/{{ item | basename | regex_replace('.j2$', '') }}"
mode: 0644
with_fileglob: "../templates/docker_compose_files/*.j2"
- name: "Prune docker caches"
community.docker.docker_prune:
containers: true
images: true
images_filters:
dangling: false
networks: true
volumes: true
builder_cache: true
when:
- is_docker_compose_client or is_nomad_client or is_nomad_server

View File

@@ -54,14 +54,14 @@
ansible.builtin.debug:
msg: "{{ homebrew_output.unchanged_pkgs }}"
- name: Install homebrew casks
community.general.homebrew_cask:
name: "{{ item }}"
state: present
install_options: "appdir=/Applications"
accept_external_apps: true
upgrade_all: false
update_homebrew: false
greedy: false
loop: "{{ homebrew_casks_list }}"
ignore_errors: true
# - name: Install homebrew casks # noqa: ignore-errors
# community.general.homebrew_cask:
# name: "{{ item }}"
# state: present
# install_options: "appdir=/Applications"
# accept_external_apps: true
# upgrade_all: false
# update_homebrew: false
# greedy: false
# loop: "{{ homebrew_casks_list }}"
# ignore_errors: true

View File

@@ -1,3 +1,4 @@
# yamllint disable rule:indentation
---
# TASK DESCRIPTION:
# Downloads, installs, and configures Telegraf
@@ -206,7 +207,7 @@
- name: "Configure Telegraf"
block:
- name: "Ensure {{ telegraph_config_location }} exists"
- name: "Ensure {{ telegraph_config_location }} exists" # noqa: name[template]
become: true
ansible.builtin.file:
path: "{{ item }}"
@@ -223,10 +224,22 @@
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/base_config.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.conf" }
- { src: "telegraf/custom_metrics.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf" }
- { src: "telegraf/nomad.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf" }
- { src: "telegraf/docker.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf" }
- {
src: "telegraf/base_config.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.conf",
}
- {
src: "telegraf/custom_metrics.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/custom_metrics.conf",
}
- {
src: "telegraf/nomad.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/nomad.conf",
}
- {
src: "telegraf/docker.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/docker.conf",
}
notify: restart_telegraf
- name: Template leader configs (ie, configs that should be placed on a single server)
@@ -236,9 +249,18 @@
dest: "{{ item.dest }}"
mode: "644"
loop:
- { src: "telegraf/leader.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf" }
- { src: "telegraf/speedtest.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf" }
- { src: "telegraf/pingHosts.conf.j2", dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf" }
- {
src: "telegraf/leader.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/leader.conf",
}
- {
src: "telegraf/speedtest.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/speedtest.conf",
}
- {
src: "telegraf/pingHosts.conf.j2",
dest: "{{ telegraph_config_location }}/telegraf.d/pingHosts.conf",
}
when:
- is_cluster_leader
notify: restart_telegraf

View File

@@ -5,11 +5,11 @@
{% if is_consul_server %}
"server" = true
"ui_config" = {
"enabled" = true
"enabled" = true
}
{% else %}
"ui_config" = {
"enabled" = false
"enabled" = false
}
{% endif %}
@@ -28,15 +28,15 @@
# ----------------------------------------- Networking
"addresses" = {
"dns" = "0.0.0.0"
"grpc" = "0.0.0.0"
"http" = "0.0.0.0"
"https" = "0.0.0.0"
"dns" = "0.0.0.0"
"grpc" = "0.0.0.0"
"http" = "0.0.0.0"
"https" = "0.0.0.0"
}
"ports" = {
"dns" = 8600
"http" = 8500
"server" = 8300
"dns" = 8600
"http" = 8500
"server" = 8300
}
{% if 'linode' in group_names %}
@@ -57,7 +57,7 @@
{% if 'linode' in group_names %}
"retry_join" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].linode_private_ip }}"{% if not loop.last %}, {% endif %}{% endfor %}]
{% else %}
"retry_join" = [{% for h in groups['lan'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
"retry_join" = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
{% if is_consul_server %}
{% if 'linode' in group_names %}
"join_wan" = [{% for h in groups['linode-cluster'] if hostvars[h].is_consul_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
@@ -81,7 +81,7 @@
"key_file" = "{{ consul_opt_dir }}/certs/{{ datacenter_name }}-server-consul-0-key.pem"
{% endif %}
"auto_encrypt" = {
"allow_tls" = true
"allow_tls" = true
}
{% else %} {# Consul Clients #}
"verify_incoming" = false
@@ -93,14 +93,14 @@
"ca_file" = "{{ consul_opt_dir }}/certs/consul-agent-ca.pem"
{% endif %}
"auto_encrypt" = {
"tls" = true
"tls" = true
}
{% endif %}
"acl" = {
enabled = false
default_policy = "allow"
enable_token_persistence = true
default_policy = "allow"
enable_token_persistence = true
enabled = false
}
# ----------------------------------------- Cluster Operations

View File

@@ -1,5 +1,5 @@
[Unit]
Description="HashiCorp Consul - A service mesh solution"
Description="hashiCorp Consul - A service mesh solution"
Documentation=https://www.consul.io/
Requires=network-online.target
After=network-online.target

View File

@@ -9,8 +9,7 @@
"traefik.http.routers.sabnzbd.entryPoints=web,websecure",
"traefik.http.routers.sabnzbd.service=sabnzbd",
"traefik.http.routers.sabnzbd.tls=true",
"traefik.http.routers.sabnzbd.tls.certresolver=cloudflare",
"traefik.http.routers.sabnzbd.middlewares=authelia@file"
"traefik.http.routers.sabnzbd.tls.certresolver=cloudflare"
],
"checks": [{
"id": "sabnzbd-http-check",
@@ -21,6 +20,27 @@
"failures_before_critical": 3
}]
},
{
"name": "jellyfin",
"id": "jellyfin",
"tags": [
"traefik.enable=true",
"traefik.http.services.jellyfin.loadbalancer.server.port=8096",
"traefik.http.routers.jellyfin.rule=Host(`jellyfin.{{ homelab_domain_name }}`)",
"traefik.http.routers.jellyfin.entryPoints=web,websecure",
"traefik.http.routers.jellyfin.service=jellyfin",
"traefik.http.routers.jellyfin.tls=true",
"traefik.http.routers.jellyfin.tls.certresolver=cloudflare"
],
"checks": [{
"id": "jellyfin-http-check",
"http": "http://{{ synology_second_ip }}:8096",
"interval": "30s",
"timeout": "5s",
"success_before_passing": 3,
"failures_before_critical": 3
}]
},
{
"name": "synology",
"id": "synology",

View File

@@ -1,11 +0,0 @@
version: '3.9'
services:
asn-to-ip:
image: ddimick/asn-to-ip:latest
hostname: asn-to-ip
container_name: asn-to-ip
network_mode: "bridge"
ports:
- 5151:5000
restart: unless-stopped

View File

@@ -2,7 +2,7 @@ version: '3.9'
services:
consul:
image: consul:{{ consul_version }}
image: hashicorp/consul:{{ consul_version }}
hostname: consul
container_name: consul
network_mode: "host"

View File

@@ -0,0 +1,17 @@
version: '3.9'
services:
jellyfin:
image: lscr.io/linuxserver/jellyfin:latest
hostname: jellyfin
container_name: jellyfin
network_mode: "host"
environment:
- "TZ=America/New_York"
- "PGID=101"
- "PUID={{ ansible_user_uid }}"
volumes:
- /volume1/pi-cluster/jellyfin:/config
- /volume1/media/media/movies:/data/movies
- /volume1/media/media/tv:/data/tv
restart: unless-stopped

View File

@@ -2,7 +2,7 @@ version: '3.9'
services:
sabnzbd:
image: ghcr.io/linuxserver/sabnzbd
image: ghcr.io/linuxserver/sabnzbd:{{ sabnzbd_version }}
hostname: sabnzbd
container_name: sabnzbd
network_mode: "bridge"
@@ -10,13 +10,15 @@ services:
- "TZ=America/New_York"
- "PGID=101"
- "PUID={{ ansible_user_uid }}"
#- "DOCKER_MODS=linuxserver/mods:universal-cron"
volumes:
- /var/services/homes/{{ my_username }}:/{{ my_username }}
- /volume1/nate:/nate
- /volume1/media/downloads/nzb:/nzbd
- /volume1/media/downloads/temp:/incomplete-downloads
- /volume1/media/downloads/complete:/downloads
- /volume1/docker/sabnzbd:/config
- /volume1/pi-cluster/sabnzbd:/config
- /volume1/pi-cluster/sabnzbd/startup-scripts:/custom-cont-init.d
ports:
- 8080:8080
- 9090:9090

View File

@@ -5,28 +5,28 @@ datacenter = "{{ datacenter_name }}"
# ----------------------------------------- Files and Logs
data_dir = "{{ nomad_opt_dir_location }}"
plugin_dir = "{{ nomad_opt_dir_location }}/plugins"
log_level = "warn"
log_file = "{{ nomad_opt_dir_location }}/logs/nomad.log"
log_rotate_max_files = 5
enable_syslog = false
log_file = "{{ nomad_opt_dir_location }}/logs/nomad.log"
log_level = "warn"
log_rotate_max_files = 5
plugin_dir = "{{ nomad_opt_dir_location }}/plugins"
# ----------------------------------------- Networking
bind_addr = "0.0.0.0" # the default
advertise {
{% if 'linode' in group_names %}
http = "{{ linode_private_ip }}:4646"
rpc = "{{ linode_private_ip }}:4647"
serf = "{{ linode_private_ip }}:4648" # non-default ports may be specified
http = "{{ linode_private_ip }}:4646"
rpc = "{{ linode_private_ip }}:4647"
serf = "{{ linode_private_ip }}:4648" # non-default ports may be specified
{% elif 'synology' in group_names %}
http = "{{ synology_second_ip }}:4646"
rpc = "{{ synology_second_ip }}:4647"
serf = "{{ synology_second_ip }}:4648" # non-default ports may be specified
http = "{{ synology_second_ip }}:4646"
rpc = "{{ synology_second_ip }}:4647"
serf = "{{ synology_second_ip }}:4648" # non-default ports may be specified
{% else %}
http = "{{ ansible_host }}:4646"
rpc = "{{ ansible_host }}:4647"
serf = "{{ ansible_host }}:4648" # non-default ports may be specified
http = "{{ ansible_host }}:4646"
rpc = "{{ ansible_host }}:4647"
serf = "{{ ansible_host }}:4648" # non-default ports may be specified
{% endif %}
}
@@ -48,170 +48,171 @@ consul {
{% if is_nomad_server %}
tags = [
"traefik.enable=true",
"traefik.http.routers.nomad-server.entryPoints=web,websecure",
"traefik.http.routers.nomad-server.service=nomad-server",
"traefik.http.routers.nomad-server.rule=Host(`nomad.{{ homelab_domain_name }}`)",
"traefik.http.routers.nomad-server.tls=true",
"traefik.http.routers.nomad-server.middlewares=authelia@file,redirectScheme@file",
"traefik.http.services.nomad-server.loadbalancer.server.port=4646"
"traefik.enable=true",
"traefik.http.routers.nomad-server.entryPoints=web,websecure",
"traefik.http.routers.nomad-server.service=nomad-server",
"traefik.http.routers.nomad-server.rule=Host(`nomad.{{ homelab_domain_name }}`)",
"traefik.http.routers.nomad-server.tls=true",
"traefik.http.routers.nomad-server.middlewares=redirectScheme@file",
"traefik.http.services.nomad-server.loadbalancer.server.port=4646"
]
{% endif %}
}
# ----------------------------------------- CLient Config
# ----------------------------------------- Client Config
client {
enabled = true
enabled = true
{% if 'pis' in group_names %}
node_class = "rpi"
node_class = "rpi"
{% elif 'macs' in group_names %}
node_class = "mac"
node_class = "mac"
{% elif 'synology' in group_names %}
node_class = "synology"
node_class = "synology"
{% endif %}
reserved {
cpu = 250
memory = 100
reserved_ports = "22"
}
reserved {
cpu = 250
memory = 100
reserved_ports = "22"
}
{% if not is_nomad_server %}
{% if 'linode' in group_names %}
server_join {
retry_join = [{% for h in groups['linode'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
retry_max = 3
retry_interval = "15s"
}
server_join {
retry_join = [{% for h in groups['linode'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
retry_max = 3
retry_interval = "15s"
}
{% else %}
server_join {
retry_join = [{% for h in groups['lan'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
retry_max = 3
retry_interval = "15s"
}
servers = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
server_join {
retry_join = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
retry_max = 3
retry_interval = "15s"
}
{% endif %}
{% endif %}
meta {
# These are variables that can be used in Nomad job files
PUID = "{{ ansible_user_uid }}"
PGID = "{{ ansible_user_gid }}"
nfsStorageRoot = "{{ interpolated_nfs_service_storage }}"
localStorageRoot = "{{ interpolated_localfs_service_storage }}"
{% if 'macs' in group_names %}
restoreCommand = "/usr/local/bin/service_restore"
restoreCommand1 = "--verbose"
restoreCommand2 = "--job"
restoreCommand3 = ""
backupCommand = "/usr/local/bin/service_backups"
backupCommandArg1 = "--verbose"
backupCommandArg2 = "--loglevel=INFO"
backupCommandArg3 = ""
backupAllocArg1 = "--verbose"
backupAllocArg2 = "--loglevel=INFO"
backupAllocArg3 = "--allocation"
backupAllocArg4 = "--delete"
backupAllocArg5 = "--job"
backupAllocArg6 = ""
{% else %}
restoreCommand = "sudo"
restoreCommand1 = "/usr/local/bin/service_restore"
restoreCommand2 = "--job"
restoreCommand3 = "--verbose"
backupCommand = "sudo"
backupCommandArg1 = "/usr/local/bin/service_backups"
backupCommandArg2 = "--verbose"
backupCommandArg3 = "--loglevel=INFO"
backupAllocArg1 = "/usr/local/bin/service_backups"
backupAllocArg2 = "--verbose"
backupAllocArg3 = "--loglevel=INFO"
backupAllocArg4 = "--allocation"
backupAllocArg5 = "--job"
backupAllocArg6 = "--delete"
{% endif %}
}
meta {
# These are variables that can be used in Nomad job files
PUID = "{{ ansible_user_uid }}"
PGID = "{{ ansible_user_gid }}"
nfsStorageRoot = "{{ interpolated_nfs_service_storage }}"
localStorageRoot = "{{ interpolated_localfs_service_storage }}"
{% if 'macs' in group_names %}
restoreCommand = "/usr/local/bin/service_restore"
restoreCommand1 = "--verbose"
restoreCommand2 = "--job"
restoreCommand3 = ""
backupCommand = "/usr/local/bin/service_backups"
backupCommandArg1 = "--verbose"
backupCommandArg2 = "--loglevel=INFO"
backupCommandArg3 = ""
backupAllocArg1 = "--verbose"
backupAllocArg2 = "--loglevel=INFO"
backupAllocArg3 = "--allocation"
backupAllocArg4 = "--delete"
backupAllocArg5 = "--job"
backupAllocArg6 = ""
{% else %}
restoreCommand = "sudo"
restoreCommand1 = "/usr/local/bin/service_restore"
restoreCommand2 = "--job"
restoreCommand3 = "--verbose"
backupCommand = "sudo"
backupCommandArg1 = "/usr/local/bin/service_backups"
backupCommandArg2 = "--verbose"
backupCommandArg3 = "--loglevel=INFO"
backupAllocArg1 = "/usr/local/bin/service_backups"
backupAllocArg2 = "--verbose"
backupAllocArg3 = "--loglevel=INFO"
backupAllocArg4 = "--allocation"
backupAllocArg5 = "--job"
backupAllocArg6 = "--delete"
{% endif %}
}
} # /client
{% if is_nomad_server %}
# ----------------------------------------- Server Config
server {
enabled = true
encrypt = "{{ nomad_encryption_key }}"
enabled = true
encrypt = "{{ nomad_encryption_key }}"
{% if 'linode' in group_names %}
bootstrap_expect = 1
bootstrap_expect = 1
{% else %}
bootstrap_expect = 3
bootstrap_expect = 3
{% endif %}
node_gc_threshold = "15m"
job_gc_interval = "15m"
job_gc_threshold = "6h"
heartbeat_grace = "60s"
min_heartbeat_ttl = "20s"
raft_protocol = "3"
node_gc_threshold = "15m"
job_gc_interval = "15m"
job_gc_threshold = "6h"
heartbeat_grace = "60s"
min_heartbeat_ttl = "20s"
raft_protocol = "3"
server_join {
retry_join = [{% for h in groups['lan'] if hostvars[h].is_nomad_server == true %}"{{ hostvars[h].ansible_host }}"{% if not loop.last %}, {% endif %}{% endfor %}]
retry_max = 3
retry_interval = "15s"
}
server_join {
retry_join = ["{{ rpi1_ip_address }}", "{{ rpi2_ip_address }}", "{{ rpi3_ip_address }}"]
retry_max = 3
retry_interval = "15s"
}
}
autopilot {
cleanup_dead_servers = true
last_contact_threshold = "200ms"
max_trailing_logs = 250
server_stabilization_time = "10s"
enable_redundancy_zones = false
disable_upgrade_migration = false
enable_custom_upgrades = false
cleanup_dead_servers = true
disable_upgrade_migration = false
enable_custom_upgrades = false
enable_redundancy_zones = false
last_contact_threshold = "200ms"
max_trailing_logs = 250
server_stabilization_time = "10s"
}
{% endif %}
{% if is_nomad_server and is_nomad_client %}
client {
enabled = true
enabled = true
}
{% endif %}
# ----------------------------------------- Telemety
telemetry = {
publish_allocation_metrics = true
publish_node_metrics = true
collection_interval = "10s"
filter_default = false
datadog_address = "localhost:8125"
prefix_filter = [
"+nomad.client.allocations.running",
"+nomad.client.allocations.terminal",
"+nomad.client.allocs.cpu.allocated",
"+nomad.client.allocs.cpu.total_percent",
"+nomad.client.allocs.memory.allocated",
"+nomad.client.allocs.memory.swap",
"+nomad.client.allocs.memory.usage",
"+nomad.nomad.job_status.dead",
"+nomad.nomad.job_status.running",
"+nomad.nomad.job_status.pending",
"+nomad.nomad.job_summary.running",
"+nomad.nomad.job_summary.complete",
"+nomad.nomad.job_summary.lost",
"+nomad.nomad.job_summary.failed"]
collection_interval = "10s"
datadog_address = "localhost:8125"
filter_default = false
publish_allocation_metrics = true
publish_node_metrics = true
prefix_filter = [
"+nomad.client.allocations.running",
"+nomad.client.allocations.terminal",
"+nomad.client.allocs.cpu.allocated",
"+nomad.client.allocs.cpu.total_percent",
"+nomad.client.allocs.memory.allocated",
"+nomad.client.allocs.memory.swap",
"+nomad.client.allocs.memory.usage",
"+nomad.nomad.job_status.dead",
"+nomad.nomad.job_status.running",
"+nomad.nomad.job_status.pending",
"+nomad.nomad.job_summary.running",
"+nomad.nomad.job_summary.complete",
"+nomad.nomad.job_summary.lost",
"+nomad.nomad.job_summary.failed"
]
}
# ----------------------------------------- Plugins
plugin "raw_exec" {
config {
enabled = true
}
config {
enabled = true
}
}
plugin "docker" {
config {
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name"]
volumes {
enabled = true
}
allow_caps = ["all"]
allow_privileged = true
extra_labels = ["job_name", "job_id", "task_group_name", "task_name", "namespace", "node_name", "node_id"]
volumes {
enabled = true
}
}
}

View File

@@ -1,21 +1,21 @@
job "backup_local_filesystems" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "sysbatch"
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "sysbatch"
periodic {
cron = "0 */8 * * * *"
prohibit_overlap = true
time_zone = "America/New_York"
}
task "do_backups" {
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.backupCommand}"
args = ["${meta.backupCommandArg1}", "${meta.backupCommandArg2}", "${meta.backupCommandArg3}"]
periodic {
cron = "0 */8 * * * *"
prohibit_overlap = true
time_zone = "America/New_York"
}
} // /task do_backups
task "do_backups" {
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.backupCommand}"
args = ["${meta.backupCommandArg1}", "${meta.backupCommandArg2}", "${meta.backupCommandArg3}"]
}
} // /task do_backups
} //job

View File

@@ -57,6 +57,7 @@ job "changedetection" {
service {
port = "webUI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`changes.{{ homelab_domain_name }}`)",
@@ -75,7 +76,6 @@ job "changedetection" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -73,6 +73,7 @@ job "chronograf" {
service {
port = "chronografPort"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -91,7 +92,6 @@ job "chronograf" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -63,6 +63,7 @@ job "code" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -82,7 +83,6 @@ job "code" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -1,109 +1,110 @@
job "diagnostics" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "rpi1"
}
group "diagnostics" {
count = 1
restart {
attempts = 0
delay = "30s"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "macmini"
}
network {
port "whoami" {
to = 80
}
}
group "diagnostics" {
task "diagnostics" {
count = 1
// env {
// KEY = "VALUE"
// }
driver = "docker"
config {
image = "alpine:latest"
hostname = "${NOMAD_JOB_NAME}"
args = [
"/bin/sh",
"-c",
"chmod 755 /local/bootstrap.sh && /local/bootstrap.sh"
]
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/backups/config_backups:/backups",
"${meta.localStorageRoot}:/docker"
]
} // docker config
template {
destination = "local/bootstrap.sh"
data = <<EOH
#!/bin/sh
apk update
apk add --no-cache bash
apk add --no-cache bind-tools
apk add --no-cache curl
apk add --no-cache git
apk add --no-cache jq
apk add --no-cache openssl
apk add --no-cache iperf3
apk add --no-cache nano
apk add --no-cache wget
tail -f /dev/null # Keep container running
EOH
}
} // task diagnostics
task "whoami" {
driver = "docker"
config {
image = "containous/whoami:latest"
hostname = "${NOMAD_TASK_NAME}"
ports = ["whoami"]
} // /docker config
service {
port = "whoami"
name = "${NOMAD_JOB_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
]
check {
type = "http"
path = "/"
interval = "90s"
timeout = "15s"
}
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
resources {
cpu = 25 # MHz
memory = 10 # MB
restart {
attempts = 0
delay = "30s"
}
} // /task whoami
network {
port "whoami" {
to = 80
}
}
} // group
task "diagnostics" {
// env {
// KEY = "VALUE"
// }
driver = "docker"
config {
image = "alpine:latest"
hostname = "${NOMAD_JOB_NAME}"
args = [
"/bin/sh",
"-c",
"chmod 755 /local/bootstrap.sh && /local/bootstrap.sh"
]
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/tmp:/diagnostics",
"${meta.localStorageRoot}:/docker"
]
} // docker config
template {
destination = "local/bootstrap.sh"
data = <<EOH
#!/bin/sh
apk update
apk add --no-cache bash
apk add --no-cache bind-tools
apk add --no-cache curl
apk add --no-cache git
apk add --no-cache jq
apk add --no-cache openssl
apk add --no-cache iperf3
apk add --no-cache nano
apk add --no-cache wget
tail -f /dev/null # Keep container running
EOH
}
} // task diagnostics
// task "whoami" {
// driver = "docker"
// config {
// image = "containous/whoami:latest"
// hostname = "${NOMAD_TASK_NAME}"
// ports = ["whoami"]
// } // /docker config
// service {
// port = "whoami"
// name = "${NOMAD_JOB_NAME}"
// provider = "nomad"
// tags = [
// "traefik.enable=true",
// "traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
// "traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
// "traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
// "traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
// "traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
// ]
// check {
// type = "http"
// path = "/"
// interval = "90s"
// timeout = "15s"
// }
// check_restart {
// limit = 2
// grace = "1m"
// }
// }
// resources {
// cpu = 25 # MHz
// memory = 10 # MB
// }
// } // /task whoami
} // group
} // job

View File

@@ -54,6 +54,7 @@ job "freshrss" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`rss.{{ homelab_domain_name }}`)",
@@ -73,7 +74,6 @@ job "freshrss" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -0,0 +1,404 @@
job "gitea" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
constraint {
distinct_hosts = true
}
group "gitea" {
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi"
// }
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "webui" {
to = "3000"
}
port "ssh" {
to = "22"
}
}
task "create_filesystem" {
// Copy the most recent backup into place on the local computer. sonarr will not work with
// its database in an NFS share
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.restoreCommand}"
args = [
"${meta.restoreCommand1}",
"${meta.restoreCommand2}",
"${NOMAD_JOB_NAME}",
"${meta.restoreCommand3}"
]
}
lifecycle {
hook = "prestart"
sidecar = false
}
} // /task create_filesystem
task "gitea" {
env {
GITEA__mailer__ENABLED = true
GITEA__mailer__FROM = "gitea@{{ homelab_domain_name }}"
GITEA__mailer__PASSWD = "{{ gitea_smtp_password }}"
GITEA__mailer__PROTOCOL = "smtp+starttls"
GITEA__mailer__SMTP_ADDR = "{{ email_smtp_host }}"
GITEA__mailer__SMTP_PORT = "{{ email_smtp_port_starttls }}"
GITEA__mailer__SUBJECT_PREFIX = "[Gitea]"
GITEA__mailer__USER = "{{ email_smtp_account }}"
GITEA__repository__DEFAULT_REPO_UNITS = "repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects,repo.packages" # add `repo.actions` to the list if enabling actions
GITEA__server__DOMAIN = "{{ homelab_domain_name }}"
GITEA__server__ROOT_URL = "https://${NOMAD_JOB_NAME}.{{ homelab_domain_name }}"
GITEA__server__SSH_DOMAIN = "${NOMAD_JOB_NAME}.{{ homelab_domain_name }}"
GITEA__server__SSH_PORT = "2222" # Traefik gitea-ssh entrypoint
GITEA__server__START_SSH_SERVER = false
GITEA__service__ENABLE_NOTIFY_MAIL = true
GITEA__time__DEFAULT_UI_LOCATION = "America/New_York"
TZ = "America/New_York"
USER_GID = "${meta.PGID}"
USER_UID = "${meta.PUID}"
}
driver = "docker"
config {
image = "gitea/gitea:{{ gitea_version }}"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.localStorageRoot}/${NOMAD_JOB_NAME}:/data",
"/etc/timezone:/etc/timezone:ro",
"/etc/localtime:/etc/localtime:ro"
]
ports = ["webui", "ssh"]
} // docker config
service {
port = "webui"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "webui"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
service {
port = "ssh"
name = "gitea-ssh-svc"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.tcp.routers.gitea-ssh.rule=HostSNI(`*`)",
"traefik.tcp.routers.gitea-ssh.entrypoints=gitea-ssh",
"traefik.tcp.routers.gitea-ssh.service=gitea-ssh-svc"
]
} // service
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task gitea
task "save_configuration" {
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.backupCommand}"
args = [
"${meta.backupAllocArg1}",
"${meta.backupAllocArg2}",
"${meta.backupAllocArg3}",
"${meta.backupAllocArg4}",
"${meta.backupAllocArg5}",
"${NOMAD_JOB_NAME}",
"${meta.backupAllocArg6}"
]
}
lifecycle {
hook = "poststop"
sidecar = false
}
} // /task save_configuration
} // group
// group "action-runners" {
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "macmini"
// }
// constraint {
// distinct_hosts = true
// }
// count = 1
// restart {
// attempts = 0
// delay = "30s"
// }
// network {
// port "cache" {
// to = "8088"
// }
// }
// task "await-gitea" {
// lifecycle {
// hook = "prestart"
// sidecar = false
// }
// driver = "docker"
// config {
// image = "busybox:latest"
// command = "/bin/sh"
// args = [
// "-c",
// "chmod 755 /local/ping.sh && /local/ping.sh"
// ]
// network_mode = "host"
// }
// template {
// destination = "local/ping.sh"
// change_mode = "restart"
// data = <<-EOH
// #!/bin/sh
// {% raw -%}
// {{ range nomadService "gitea" }}
// IP="{{ .Address }}"
// PORT="{{ .Port }}"
// {{ end }}
// {% endraw -%}
// until [ -n "${IP}" ] && [ -n "${PORT}" ]; do
// echo "Waiting for Nomad to populate the service information..."
// sleep 1
// done
// echo "Waiting for Gitea to start..."
// until nc -z "${IP}" "${PORT}"; do
// echo "'nc -z ${IP} ${PORT}' is unavailable..."
// sleep 1
// done
// echo "Gitea is up! Found at ${IP}:${PORT}"
// EOH
// }
// }
// task "gitea-action-runner" {
// env {
// CONFIG_FILE = "/local/config.yml"
// GITEA_INSTANCE_URL = "https://${NOMAD_JOB_NAME}.{{ homelab_domain_name }}"
// GITEA_RUNNER_NAME = "${node.unique.name}-action-runner"
// GITEA_RUNNER_REGISTRATION_TOKEN = "{{ gitea_runner_registration_token }}"
// PGID = "${meta.PGID}"
// PUID = "${meta.PUID}"
// TZ = "America/New_York"
// }
// driver = "docker"
// config {
// image = "gitea/act_runner:latest"
// image_pull_timeout = "10m"
// hostname = "${NOMAD_TASK_NAME}"
// volumes = [
// "${meta.nfsStorageRoot}/pi-cluster/gitea-action-runners:/data",
// "/var/run/docker.sock:/var/run/docker.sock"
// ]
// ports = ["cache"]
// } // docker config
// template {
// destination = "local/config.yml"
// env = false
// change_mode = "noop"
// data = <<-EOH
// log:
// # The level of logging, can be trace, debug, info, warn, error, fatal
// level: info
// runner:
// # Where to store the registration result.
// {% raw %}file: .runner-{{ env "node.unique.name" }}{% endraw +%}
// # Execute how many tasks concurrently at the same time.
// capacity: 1
// # Extra environment variables to run jobs.
// envs:
// A_TEST_ENV_NAME_1: a_test_env_value_1
// A_TEST_ENV_NAME_2: a_test_env_value_2
// # Extra environment variables to run jobs from a file.
// # It will be ignored if it's empty or the file doesn't exist.
// env_file: .env
// # The timeout for a job to be finished.
// # Please note that the Gitea instance also has a timeout (3h by default) for the job.
// # So the job could be stopped by the Gitea instance if it's timeout is shorter than this.
// timeout: 3h
// # Whether skip verifying the TLS certificate of the Gitea instance.
// insecure: false
// # The timeout for fetching the job from the Gitea instance.
// fetch_timeout: 5s
// # The interval for fetching the job from the Gitea instance.
// fetch_interval: 2s
// # The labels of a runner are used to determine which jobs the runner can run, and how to run them.
// # Like: ["macos-arm64:host", "ubuntu-latest:docker://node:16-bullseye", "ubuntu-22.04:docker://node:16-bullseye"]
// # If it's empty when registering, it will ask for inputting labels.
// # If it's empty when execute `daemon`, will use labels in `.runner` file.
// labels: []
// cache:
// # Enable cache server to use actions/cache.
// enabled: false
// # The directory to store the cache data.
// # If it's empty, the cache data will be stored in $HOME/.cache/actcache.
// dir: ""
// # The host of the cache server.
// # It's not for the address to listen, but the address to connect from job containers.
// # So 0.0.0.0 is a bad choice, leave it empty to detect automatically.
// {% raw %}host: "{{ env "NOMAD_IP_cache" }}"{% endraw +%}
// # The port of the cache server.
// {% raw %}port: {{ env "NOMAD_HOST_PORT_cache" }}{% endraw +%}
// # The external cache server URL. Valid only when enable is true.
// # If it's specified, act_runner will use this URL as the ACTIONS_CACHE_URL rather than start a server by itself.
// # The URL should generally end with "/".
// external_server: ""
// container:
// # Specifies the network to which the container will connect.
// # Could be host, bridge or the name of a custom network.
// # If it's empty, act_runner will create a network automatically.
// network: ""
// # Whether to use privileged mode or not when launching task containers (privileged mode is required for Docker-in-Docker).
// privileged: false
// # And other options to be used when the container is started (eg, --add-host=my.gitea.url:host-gateway).
// options:
// # The parent directory of a job's working directory.
// # If it's empty, /workspace will be used.
// workdir_parent:
// # Volumes (including bind mounts) can be mounted to containers. Glob syntax is supported, see https://github.com/gobwas/glob
// # You can specify multiple volumes. If the sequence is empty, no volumes can be mounted.
// # For example, if you only allow containers to mount the `data` volume and all the json files in `/src`, you should change the config to:
// # valid_volumes:
// # - data
// # - /src/*.json
// # If you want to allow any volume, please use the following configuration:
// # valid_volumes:
// # - '**'
// valid_volumes:
// - '**'
// # overrides the docker client host with the specified one.
// # If it's empty, act_runner will find an available docker host automatically.
// # If it's "-", act_runner will find an available docker host automatically, but the docker host won't be mounted to the job containers and service containers.
// # If it's not empty or "-", the specified docker host will be used. An error will be returned if it doesn't work.
// docker_host: ""
// # Pull docker image(s) even if already present
// force_pull: false
// host:
// # The parent directory of a job's working directory.
// # If it's empty, $HOME/.cache/act/ will be used.
// workdir_parent:
// EOH
// }
// // service {
// // port = "cache"
// // name = "${NOMAD_TASK_NAME}"
// // provider = "nomad"
// // tags = [
// // "traefik.enable=true",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
// // "traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
// // ]
// // check {
// // type = "tcp"
// // port = "cache"
// // interval = "30s"
// // timeout = "4s"
// // }
// // check_restart {
// // limit = 0
// // grace = "1m"
// // }
// // } // service
// resources {
// cpu = 400 # MHz
// memory = 600 # MB
// } // resources
// } // task gitea-action-runner
// } // group action-runners
} // job

View File

@@ -87,6 +87,7 @@ job "grafana" {
service {
port = "http"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -109,7 +110,6 @@ job "grafana" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -52,6 +52,7 @@ job "headless-chrome" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`chrome.{{ homelab_domain_name }}`)",
@@ -70,7 +71,6 @@ job "headless-chrome" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -0,0 +1,101 @@
job "hishtory" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
# README
# https://github.com/linuxserver/docker-hishtory-server
# https://github.com/ddworken/hishtory/blob/master/README.md
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "hishtory" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "8080"
}
}
task "hishtory" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
HISHTORY_SQLITE_DB = "/config/hishtory.db"
}
driver = "docker"
config {
image = "lscr.io/linuxserver/hishtory-server:latest"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
ports = ["port1"]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 1800 # MHz
memory = 800 # MB
} // resources
} // task
} // group
} // job

View File

@@ -3,143 +3,149 @@ job "icloud_backup" {
datacenters = ["{{ datacenter_name }}"]
type = "service"
// Need to authenticate within the container by running
// icloud --username=<icloud-username> --session-directory=/app/session_data
// and then entering the 2FA code that is sent to the user associated with the iCloud account.
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "icloud_backup" {
count = 1
restart {
attempts = 0
delay = "30s"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
task "icloud_backup" {
group "icloud_backup" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
// ENV_ICLOUD_PASSWORD = "[icloud password]" # 2FA renders this env var useless at the moment.
}
count = 1
driver = "docker"
config {
image = "mandarons/icloud-drive"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/nate/icloud_backup:/app/icloud",
"${meta.nfsStorageRoot}/pi-cluster/icloud_backup/session_data:/app/session_data",
"local/icloud_backup.yaml:/app/config.yaml",
"/etc/timezone:/etc/timezone:ro",
"/etc/localtime:/etc/localtime:ro"
]
} // docker config
restart {
attempts = 0
delay = "30s"
}
template {
destination = "local/icloud_backup.yaml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
app:
logger:
# level - debug, info (default), warning, or error
level: "info"
# log filename icloud.log (default)
filename: "icloud.log"
credentials:
# iCloud drive username
username: "{{ icloud_backup_username }}"
# Retry login interval
retry_login_interval: 3600 # 1 hour
# Drive destination
root: "icloud"
smtp:
# If you want to recieve email notifications about expired/missing 2FA credentials then uncomment
email: "{{ email_smtp_account }}"
# optional, to email address. Default is sender email.
#to: "receiver@test.com"
password: "{{ icloud_backup_smtp_password }}"
host: "{{ email_smtp_host }}"
port: {{ email_smtp_port_starttls }}
# If your email provider doesn't handle TLS
no_tls: false
drive:
destination: "drive"
remove_obsolete: true
sync_interval: 172800 # 2 days
filters:
# File filters to be included in syncing iCloud drive content
folders:
- "Scanner By Readdle"
- "Documents by Readdle"
# - "folder3"
file_extensions:
# File extensions to be included
- "pdf"
- "png"
- "jpg"
- "jpeg"
- "xls"
- "xlsx"
- "docx"
- "pptx"
- "txt"
- "md"
- "html"
- "htm"
- "css"
- "js"
- "json"
- "xml"
- "yaml"
- "yml"
- "csv"
- "mp3"
- "mp4"
- "mov"
- "wav"
- "mkv"
- "m4a"
photos:
destination: "photos"
remove_obsolete: true
sync_inteval: 172800 # 2 days
filters:
albums:
# - "album1"
file_sizes: # valid values are original, medium and/or thumb
- "original"
# - "medium"
# - "thumb"
EOH
} // template data
task "icloud_backup" {
resources {
cpu = 900 # MHz
memory = 100 # MB
} // resources
env {
ENV_CONFIG_FILE_PATH = "/local/icloud_backup.yaml"
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
// ENV_ICLOUD_PASSWORD = "[icloud password]" # 2FA renders this env var useless at the moment.
}
} // task
driver = "docker"
config {
image = "mandarons/icloud-drive"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/nate/icloud_backup:/app/icloud",
"${meta.nfsStorageRoot}/pi-cluster/icloud_backup/session_data:/app/session_data",
"/etc/timezone:/etc/timezone:ro",
"/etc/localtime:/etc/localtime:ro"
]
} // docker config
template {
destination = "local/icloud_backup.yaml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
---
app:
logger:
# level - debug, info (default), warning, or error
level: "info"
# log filename icloud.log (default)
filename: "icloud.log"
credentials:
# iCloud drive username
username: "{{ icloud_backup_username }}"
# Retry login interval
retry_login_interval: 3600 # 1 hour
root: "icloud"
smtp:
# If you want to receive email notifications about expired/missing 2FA credentials then uncomment
email: "{{ email_smtp_account }}"
# optional, to email address. Default is sender email.
#to: "receiver@test.com"
password: "{{ icloud_backup_smtp_password }}"
host: "{{ email_smtp_host }}"
port: {{ email_smtp_port_starttls }}
# If your email provider doesn't handle TLS
no_tls: false
drive:
destination: "drive"
remove_obsolete: true
sync_interval: 172800 # 2 days
filters:
# File filters to be included in syncing iCloud drive content
folders:
- "Scanner By Readdle"
- "Documents by Readdle"
# - "folder3"
file_extensions:
# File extensions to be included
- "pdf"
- "png"
- "jpg"
- "jpeg"
- "xls"
- "xlsx"
- "docx"
- "pptx"
- "txt"
- "md"
- "html"
- "htm"
- "css"
- "js"
- "json"
- "xml"
- "yaml"
- "yml"
- "csv"
- "mp3"
- "mp4"
- "mov"
- "wav"
- "mkv"
- "m4a"
photos:
destination: "photos"
remove_obsolete: true
sync_interval: 172800 # 2 days
all_albums: false # Optional, default false. If true preserve album structure. If same photo is in multiple albums creates duplicates on filesystem
folder_format: "%Y-%m" # optional, if set put photos in subfolders according to format. Cheatsheet - https://strftime.org
filters:
albums:
# - "album1"
file_sizes: # valid values are original, medium and/or thumb
- "original"
# - "medium"
# - "thumb"
EOH
} // template data
resources {
cpu = 900 # MHz
memory = 100 # MB
} // resources
} // task
} // group
} // group
} // job

View File

@@ -78,6 +78,7 @@ job "influxdb" {
service {
port = "httpAPI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
check {
type = "tcp"
@@ -89,7 +90,6 @@ job "influxdb" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}

View File

@@ -0,0 +1,98 @@
job "jellyfin" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "macmini"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "jellyfin" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "webui" {
static = "8096"
to = "8096"
}
port "udp1" {
static = "7359"
to = "7359"
}
}
task "jellyfin" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
}
driver = "docker"
config {
image = "lscr.io/linuxserver/jellyfin:latest"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config",
"${meta.nfsStorageRoot}/media/media/movies:/data/movies",
"${meta.nfsStorageRoot}/media/media/tv:/data/tv"
]
ports = ["webui", "udp1"]
} // docker config
service {
port = "webui"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "webui"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 2500 # MHz
memory = 750 # MB
} // resources
} // task
} // group
} // job

View File

@@ -0,0 +1,94 @@
job "ladder" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "ladder" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "8080"
}
}
task "ladder" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
PORT = "8080"
}
driver = "docker"
config {
image = "ghcr.io/kubero-dev/ladder:latest"
hostname = "${NOMAD_TASK_NAME}"
ports = ["port1"]
image_pull_timeout = "10m"
// volumes = [
// "${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/etc/TEMPLATE/"
// ]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
]
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // job

View File

@@ -82,6 +82,7 @@ job "lidarr" {
service {
port = "lidarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "lidarr" {
check_restart {
limit = 0
grace = "10m"
ignore_warnings = true
}
} // service

View File

@@ -47,6 +47,7 @@ job "loki" {
service {
port = "loki_port"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -66,7 +67,6 @@ job "loki" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -67,6 +67,7 @@ job "mealie" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -85,7 +86,6 @@ job "mealie" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -57,6 +57,7 @@ job "nginx" {
service {
port = "web"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -75,7 +76,6 @@ job "nginx" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -55,6 +55,7 @@ job "nzbhydra" {
service {
port = "hydra_port"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`hydra.{{ homelab_domain_name }}`)",
@@ -73,7 +74,6 @@ job "nzbhydra" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -1,92 +1,92 @@
job "overseerr" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi"
// }
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "overseerr" {
count = 1
restart {
attempts = 0
delay = "30s"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
network {
port "overseerr" {
to = "5055"
}
}
group "overseerr" {
task "overseerr" {
count = 1
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
}
driver = "docker"
config {
image = "ghcr.io/linuxserver/overseerr"
hostname = "${NOMAD_JOB_NAME}"
ports = ["overseerr"]
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/overseerr:/config" ]
} // docker config
service {
port = "overseerr"
name = "${NOMAD_JOB_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=overseerr",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file"
]
check {
type = "tcp"
port = "overseerr"
interval = "30s"
timeout = "4s"
restart {
attempts = 0
delay = "30s"
}
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
network {
port "overseerr" {
to = "5055"
}
}
} // service
resources {
cpu = 1600 # MHz
memory = 300 # MB
} // resources
task "overseerr" {
} // task
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
}
driver = "docker"
config {
image = "lscr.io/linuxserver/overseerr:latest"
hostname = "${NOMAD_JOB_NAME}"
ports = ["overseerr"]
image_pull_timeout = "10m"
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/overseerr:/config" ]
} // docker config
service {
port = "overseerr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=overseerr",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "overseerr"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 1600 # MHz
memory = 300 # MB
} // resources
} // task
} // group
} // group
} // job

View File

@@ -37,7 +37,7 @@ job "pihole" {
// }
}
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {
@@ -109,6 +109,7 @@ job "pihole" {
service {
name = "${NOMAD_JOB_NAME}"
port = "web"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`p.{{ homelab_domain_name }}`)",
@@ -118,7 +119,7 @@ job "pihole" {
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare",
"traefik.http.middlewares.piholeRedirect.redirectregex.regex=^(https?://p\\.{{ homelab_domain_name }})/?$",
"traefik.http.middlewares.piholeRedirect.redirectregex.replacement=$${1}/admin/",
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file,piholeRedirect"
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=piholeRedirect"
]
check {
type = "http"
@@ -130,13 +131,13 @@ job "pihole" {
check_restart {
limit = 3
grace = "10m"
ignore_warnings = false
}
}
service {
name = "piholeDNStcp"
port = "dns"
provider = "nomad"
check {
type = "tcp"
port = "dns"

View File

@@ -51,7 +51,7 @@ job "promtail-syslogs" {
{% raw -%}
clients:
- url: http://{{ range service "loki" }}{{ .Address }}:{{ .Port }}{{ end }}/loki/api/v1/push
- url: http://{{ range nomadService "loki" }}{{ .Address }}:{{ .Port }}{{ end }}/loki/api/v1/push
{% endraw %}
scrape_configs:

View File

@@ -84,6 +84,7 @@ job "prowlarr" {
service {
port = "prowlarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -103,7 +104,6 @@ job "prowlarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -1,136 +1,136 @@
job "radarr" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi3"
// }
// constraint {
// attribute = "${node.unique.name}"
// operator = "regexp"
// value = "rpi3"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "radarrGroup" {
restart {
attempts = 0
delay = "10m"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
network {
port "radarr" {
to = "7878"
}
}
group "radarrGroup" {
task "create_filesystem" {
// Copy the most recent backup into place on the local computer. sonarr will not work with
// its database in an NFS share
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.restoreCommand}"
args = [
"${meta.restoreCommand1}",
"${meta.restoreCommand2}",
"${NOMAD_JOB_NAME}",
"${meta.restoreCommand3}"
]
}
lifecycle {
hook = "prestart"
sidecar = false
}
} // /task create_filesystem
task "radarr" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
//DOCKER_MODS = "linuxserver/mods:universal-cron|linuxserver/mods:universal-mod2"
//UMASK_SET = 022 #optional
}
driver = "docker"
config {
image = "ghcr.io/linuxserver/radarr:develop"
hostname = "${NOMAD_JOB_NAME}"
force_pull = true
ports = ["radarr"]
volumes = [
"${meta.localStorageRoot}/${NOMAD_JOB_NAME}:/config",
"${meta.nfsStorageRoot}/media:/media"
]
} // docker config
service {
port = "radarr"
name = "${NOMAD_JOB_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "radarr"
interval = "30s"
timeout = "4s"
restart {
attempts = 0
delay = "10m"
}
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
network {
port "radarr" {
to = "7878"
}
}
} // service
resources {
cpu = 2000 # MHz
memory = 400 # MB
} // resources
task "create_filesystem" {
// Copy the most recent backup into place on the local computer. sonarr will not work with
// its database in an NFS share
} // /task radarr
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.restoreCommand}"
args = [
"${meta.restoreCommand1}",
"${meta.restoreCommand2}",
"${NOMAD_JOB_NAME}",
"${meta.restoreCommand3}"
]
}
task "save_configuration" {
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.backupCommand}"
args = [
"${meta.backupAllocArg1}",
"${meta.backupAllocArg2}",
"${meta.backupAllocArg3}",
"${meta.backupAllocArg4}",
"${meta.backupAllocArg5}",
"${NOMAD_JOB_NAME}",
"${meta.backupAllocArg6}"
]
}
lifecycle {
hook = "poststop"
sidecar = false
}
} // /task save_configuration
lifecycle {
hook = "prestart"
sidecar = false
}
} // group
} // /task create_filesystem
task "radarr" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
//DOCKER_MODS = "linuxserver/mods:universal-cron|linuxserver/mods:universal-mod2"
//UMASK_SET = 022 #optional
}
driver = "docker"
config {
image = "ghcr.io/linuxserver/radarr:develop"
hostname = "${NOMAD_JOB_NAME}"
force_pull = true
ports = ["radarr"]
volumes = [
"${meta.localStorageRoot}/${NOMAD_JOB_NAME}:/config",
"${meta.nfsStorageRoot}/media:/media"
]
} // docker config
service {
port = "radarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_JOB_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "radarr"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 2000 # MHz
memory = 400 # MB
} // resources
} // /task radarr
task "save_configuration" {
driver = "raw_exec"
config {
# When running a binary that exists on the host, the path must be absolute
command = "${meta.backupCommand}"
args = [
"${meta.backupAllocArg1}",
"${meta.backupAllocArg2}",
"${meta.backupAllocArg3}",
"${meta.backupAllocArg4}",
"${meta.backupAllocArg5}",
"${NOMAD_JOB_NAME}",
"${meta.backupAllocArg6}"
]
}
lifecycle {
hook = "poststop"
sidecar = false
}
} // /task save_configuration
} // group
} // job

View File

@@ -81,6 +81,7 @@ job "readarr" {
service {
port = "readarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "readarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -39,7 +39,7 @@ job "recyclarr" {
// user = "${meta.PUID}:${meta.PGID}"
driver = "docker"
config {
image = "ghcr.io/recyclarr/recyclarr:4"
image = "ghcr.io/recyclarr/recyclarr:{{ recyclarr_version }}"
hostname = "${NOMAD_TASK_NAME}"
init = true
} // docker config
@@ -70,53 +70,152 @@ job "recyclarr" {
sonarr:
series:
base_url: https://sonarr.{{ homelab_domain_name }}/
api_key: a6cedb325b5645eeb08acb06a42b7746
api_key: "{{ sonarr_api_key }}"
delete_old_custom_formats: true
replace_existing_custom_formats: true
# Quality definitions from the guide to sync to Sonarr. Choices: series, anime
quality_definition:
type: series
# Release profiles from the guide to sync to Sonarr v3 (Sonarr v4 does not use this!)
# Use `recyclarr list release-profiles` for values you can put here.
# https://trash-guides.info/Sonarr/Sonarr-Release-Profile-RegEx/
release_profiles:
quality_profiles:
- name: "HD - 720p/1080p"
reset_unmatched_scores:
enabled: true
upgrade:
allowed: true
until_quality: WEB-1080p
qualities:
- name: Bluray-2160p Remux
enabled: false
- name: Bluray-2160p
enabled: false
- name: WEB-2160p
enabled: false
qualities:
- WEBRip-2160p
- WEBDL-2160p
- name: HDTV-2160p
enabled: false
- name: Bluray-1080p Remux
enabled: false
- name: Bluray-1080p
- name: WEB-1080p
qualities:
- WEBRip-1080p
- WEBDL-1080p
- name: HDTV-1080p
- name: Bluray-720p
enabled: false
- name: WEB-720
qualities:
- WEBRip-720p
- WEBDL-720p
- name: HDTV-720p
custom_formats:
- trash_ids:
- EBC725268D687D588A20CBC5F97E538B # Low Quality Groups
- 1B018E0C53EC825085DD911102E2CA36 # Release Sources (Streaming Service)
- 71899E6C303A07AF0E4746EFF9873532 # P2P Groups + Repack/Proper
strict_negative_scores: false
- 85c61753df5da1fb2aab6f2a47426b09 # BR-DISK
- 9c11cd3f07101cdba90a2d81cf0e56b4 # LQ
- e2315f990da2e2cbfc9fa5b7a6fcfe48 # LQ Release Title
# - 47435ece6b99a0b477caf360e79ba0bb # X265
- fbcb31d8dabd2a319072b84fc0b7249c # Extras
- 32b367365729d530ca1c124a0b180c64 # Bad dual lingual groups
- 82d40da2bc6923f41e14394075dd4b03 # No-RlsGroup
quality_profiles:
- name: "HD - 720p/1080p"
score: -1000
- trash_ids:
- 76e060895c5b8a765c310933da0a5357 # Optionals
filter:
include:
- cec8880b847dd5d31d29167ee0112b57 # Golden rule
- 436f5a7d08fbf02ba25cb5e5dfe98e55 # Ignore Dolby Vision without HDR10 fallback.
# - f3f0f3691c6a1988d4a02963e69d11f2 # Ignore The Group -SCENE
# - 5bc23c3a055a1a5d8bbe4fb49d80e0cb # Ignore so called scene releases
- 538bad00ee6f8aced8e0db5218b8484c # Ignore Bad Dual Audio Groups
- 4861d8238f9234606df6721df6e27deb # Ignore AV1
- bc7a6383cbe88c3ee2d6396e1aacc0b3 # Prefer HDR
- 6f2aefa61342a63387f2a90489e90790 # Dislike retags: rartv, rarbg, eztv, TGx
- 19cd5ecc0a24bf493a75e80a51974cdd # Dislike retagged groups
- 6a7b462c6caee4a991a9d8aa38ce2405 # Dislike release ending: en
- 236a3626a07cacf5692c73cc947bc280 # Dislike release containing: 1-
# - fa47da3377076d82d07c4e95b3f13d07 # Prefer Dolby Vision
- ec8fa7296b64e8cd390a1600981f3923 # Repack
quality_profiles:
- name: "HD - 720p/1080p"
score: 5
- trash_ids:
- eb3d5cc0a2be0db205fb823640db6a3c # Repack2
quality_profiles:
- name: "HD - 720p/1080p"
score: 6
- trash_ids:
- 44e7c4de10ae50265753082e5dc76047 # Repack3
quality_profiles:
- name: "HD - 720p/1080p"
score: 7
- trash_ids: # Streaming services, Low Tier
- bbcaf03147de0f73be2be4a9078dfa03 # 40D
- fcc09418f67ccaddcf3b641a22c5cfd7 # ALL4
- 77a7b25585c18af08f60b1547bb9b4fb # CC
- f27d46a831e6b16fa3fee2c4e5d10984 # CANALPlus
- 4e9a630db98d5391aec1368a0256e2fe # CRAV
- 36b72f59f4ea20aad9316f475f2d9fbb # DCU
- 7be9c0572d8cd4f81785dacf7e85985e # FOD
- 7a235133c87f7da4c8cccceca7e3c7a6 # HBO
- f6cce30f1733d5c8194222a7507909bb # HULU
- dc503e2425126fa1d0a9ad6168c83b3f # IP
- 0ac24a2a68a9700bcb7eeca8e5cd644c # iT
- b2b980877494b560443631eb1f473867 # NLZ
- fb1a91cdc0f26f7ca0696e0e95274645 # OViD
- c30d2958827d1867c73318a5a2957eb1 # Red
- ae58039e1319178e6be73caab5c42166 # Sho
- d100ea972d1af2150b65b1cffb80f6b5 # TVer
- 0e99e7cc719a8a73b2668c3a0c3fe10c # U-next
- 5d2317d99af813b6529c7ebf01c83533 # VDL
quality_profiles:
- name: "HD - 720p/1080p"
score: 50
- trash_ids: # Streaming services, second tier
- d660701077794679fd59e8bdf4ce3a29 # AMZN
- a880d6abc21e7c16884f3ae393f84179 # HMAX
- d34870697c9db575f17700212167be23 # NF
- 1656adc6d7bb2c8cca6acfb6592db421 # PCOK
- c67a75ae4a1715f2bb4d492755ba4195 # PMTP
- 3ac5d84fce98bab1b531393e9c82f467 # QIBI
- 1efe8da11bfd74fbbcd4d8117ddb9213 # STAN
quality_profiles:
- name: "HD - 720p/1080p"
score: 80
- trash_ids: # Streaming services, Top tier
- f67c9ca88f463a48346062e8ad07713f # ATVP
- 89358767a60cc28783cdc3d0be9388a4 # DSNP
- 81d1fbf600e2540cee87f3a23f9d3c1c # MAX
quality_profiles:
- name: "HD - 720p/1080p"
score: 100
- trash_ids: # HQ Source Groups: Tier 1
- e6258996055b9fbab7e9cb2f75819294
quality_profiles:
- name: "HD - 720p/1080p"
score: 1700
- trash_ids: # HQ Source Groups: Tier 2
- 58790d4e2fdcd9733aa7ae68ba2bb503
quality_profiles:
- name: "HD - 720p/1080p"
score: 1650
- trash_ids: # HQ Source Groups: Tier 3
- d84935abd3f8556dcd51d4f27e22d0a6
quality_profiles:
- name: "HD - 720p/1080p"
score: 1600
# Configuration specific to Radarr.
radarr:
movies:
# Set the URL/API Key to your actual instance
base_url: https://radarr.{{ homelab_domain_name }}/
api_key: 53060417cccf4978bf7384c7869616f1
api_key: "{{ radarr_api_key }}"
delete_old_custom_formats: true
replace_existing_custom_formats: true
# Which quality definition in the guide to sync to Radarr. Only choice right now is 'movie'
quality_definition:
type: movie
preferred_ratio: 0.5
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores:
enabled: true
- name: "720p/1080p Remux"
reset_unmatched_scores:
enabled: true
custom_formats:
# Use `recyclarr list custom-formats radarr` for values you can put here.
# https://trash-guides.info/Radarr/Radarr-collection-of-custom-formats/
@@ -154,9 +253,7 @@ radarr:
- af94e0fe497124d1f9ce732069ec8c3b # WEB Tier 03
quality_profiles:
- name: "720p/1080p"
reset_unmatched_scores: true
- name: "720p/1080p Remux"
reset_unmatched_scores: true
# HDR FORMATS
# ########################
@@ -178,9 +275,9 @@ radarr:
- f2aacebe2c932337fe352fa6e42c1611 # 9.1 Surround
quality_profiles:
- name: "720p/1080p"
score: -50
score: -100
- name: "720p/1080p Remux"
score: -50
score: -100
- trash_ids:
- 89dac1be53d5268a7e10a19d3c896826 # 2.0 Stereo
@@ -195,6 +292,7 @@ radarr:
score: 80
- name: "720p/1080p Remux"
score: 80
EOH
}

View File

@@ -0,0 +1,27 @@
job "remove_nzbs" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "batch"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "rpi"
}
periodic {
cron = "*/15 * * * * *"
prohibit_overlap = true
time_zone = "America/New_York"
}
task "remove_nzbs" {
driver = "raw_exec"
config {
command = "/home/pi/.pyenv/shims/python"
args = ["/home/pi/repos/bin/bin-sabnzbd/removeNZBs.py"]
}
} // /task do_backups
} //job

View File

@@ -1,496 +1,529 @@
job "reverse-proxy" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
value = "rpi1"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "authelia-group" {
group "reverse-proxy-group" {
restart {
attempts = 0
delay = "30s"
}
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "rpi"
}
network {
port "authelia-port" {
static = {{ authelia_port }}
to = 9091
}
port "whoami" {
to = 80
}
port "dashboard" {
static = 8080
to = 8080
}
port "web" {
static = 80
to = 80
}
port "websecure" {
static = 443
to = 443
}
port "externalwebsecure" {
static = 4430
to = 4430
}
}
restart {
attempts = 0
delay = "30s"
}
task "authelia" {
network {
port "authelia-port" {
to = 9091
}
}
env {
TZ = "America/New_York"
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
}
task "authelia" {
driver = "docker"
config {
image = "authelia/authelia:{{ authelia_version }}"
hostname = "authelia"
ports = ["authelia-port"]
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/authelia:/config" ]
args = [
"--config",
"/local/authelia/config.yml"
]
} // docker config
env {
TZ = "America/New_York"
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
}
template {
destination = "local/authelia/users.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
---
###############################################################
# Users Database #
###############################################################
driver = "docker"
config {
image = "authelia/authelia:{{ authelia_version }}"
hostname = "authelia"
ports = ["authelia-port"]
image_pull_timeout = "10m"
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/authelia:/config" ]
args = [
"--config",
"/local/authelia/config.yml"
]
} // docker config
# This file can be used if you do not have an LDAP set up.
users:
{{ authelia_user1_name }}:
displayname: "{{ authelia_user1_name }}"
password: "$argon2id$v=19$m=65536,t=1,p={{ authelia_user1_password }}"
email: {{ authelia_user1_email }}
groups:
- admins
- dev
EOH
}
template {
destination = "local/authelia/users.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
---
###############################################################
# Users Database #
###############################################################
template {
destination = "local/authelia/config.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
---
## The theme to display: light, dark, grey, auto.
theme: auto
# This file can be used if you do not have an LDAP set up.
users:
{{ authelia_user1_name }}:
displayname: "{{ authelia_user1_name }}"
password: "$argon2id$v=19$m=65536,t=1,p={{ authelia_user1_password }}"
email: {{ authelia_user1_email }}
groups:
- admins
- dev
EOH
}
jwt_secret: {{ authelia_jwt_secret}}
default_redirection_url: https://authelia.{{ homelab_domain_name}}
template {
destination = "local/authelia/config.yml"
env = false
change_mode = "restart"
perms = "644"
data = <<-EOH
---
## The theme to display: light, dark, grey, auto.
theme: auto
server:
host: 0.0.0.0
port: 9091
path: ""
read_buffer_size: 4096
write_buffer_size: 4096
enable_pprof: false
enable_expvars: false
disable_healthcheck: false
jwt_secret: {{ authelia_jwt_secret}}
default_redirection_url: https://authelia.{{ homelab_domain_name}}
log:
level: info
format: text
# file_path: "/config/log.txt"
keep_stdout: false
server:
host: 0.0.0.0
port: 9091
path: ""
buffers:
read: 4096
write: 4096
timeouts:
read: 15s
write: 15s
idle: 30s
enable_pprof: false
enable_expvars: false
disable_healthcheck: false
totp:
issuer: authelia.com
log:
level: info
format: text
# file_path: "/config/log.txt"
keep_stdout: false
authentication_backend:
disable_reset_password: false
file:
path: /local/authelia/users.yml
password:
algorithm: argon2id
iterations: 1
salt_length: 16
parallelism: 8
memory: 64
totp:
issuer: authelia.com
access_control:
default_policy: deny
networks:
- name: internal
networks:
- 10.0.0.0/8
#- 172.16.0.0/12
#- 192.168.0.0/18
rules:
# Rules applied to everyone
- domain: "*.{{ homelab_domain_name }}"
policy: two_factor
authentication_backend:
password_reset:
disable: false
file:
path: /local/authelia/users.yml
password:
algorithm: argon2id
iterations: 1
salt_length: 16
parallelism: 8
memory: 64
access_control:
default_policy: deny
networks:
- internal
- name: internal
networks:
- 10.0.0.0/8
#- 172.16.0.0/12
#- 192.168.0.0/18
rules:
# Rules applied to everyone
- domain: "*.{{ homelab_domain_name }}"
policy: two_factor
networks:
- internal
session:
name: authelia_session
domain: {{ homelab_domain_name }}
same_site: lax
secret: {{ authelia_session_secret }}
expiration: 1h
inactivity: 15m
remember_me_duration: 1w
session:
name: authelia_session
domain: {{ homelab_domain_name }}
same_site: lax
secret: {{ authelia_session_secret }}
expiration: 1h
inactivity: 15m
remember_me_duration: 1w
regulation:
max_retries: 5
find_time: 10m
ban_time: 15m
regulation:
max_retries: 5
find_time: 10m
ban_time: 15m
storage:
encryption_key: {{ authelia_sqlite_encryption_key}}
local:
path: /config/db.sqlite3
storage:
encryption_key: {{ authelia_sqlite_encryption_key}}
local:
path: /config/db.sqlite3
notifier:
smtp:
username: {{ email_smtp_account }}
password: {{ authelia_smtp_password }}
host: {{ email_smtp_host }}
port: {{ email_smtp_port }}
sender: "Authelia <{{ my_email_address }}>"
subject: "[Authelia] {title}"
startup_check_address: {{ my_email_address }}
notifier:
smtp:
username: {{ email_smtp_account }}
password: {{ authelia_smtp_password }}
host: {{ email_smtp_host }}
port: {{ email_smtp_port }}
sender: "Authelia <{{ my_email_address }}>"
subject: "[Authelia] {title}"
startup_check_address: {{ my_email_address }}
ntp:
address: "time.cloudflare.com:123"
version: 3
max_desync: 3s
disable_startup_check: true
disable_failure: true
EOH
}
ntp:
address: "time.cloudflare.com:123"
version: 3
max_desync: 3s
disable_startup_check: true
disable_failure: true
EOH
}
service {
port = "authelia-port"
name = "${NOMAD_TASK_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`authelia.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.middlewares.authelia-headers.headers.customResponseHeaders.Cache-Control=no-store",
"traefik.http.middlewares.authelia-headers.headers.customResponseHeaders.Pragma=no-cache",
"traefik.http.routers.authelia.middlewares=authelia-headers"
service {
port = "authelia-port"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`authelia.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.middlewares.authelia-headers.headers.customResponseHeaders.Cache-Control=no-store",
"traefik.http.middlewares.authelia-headers.headers.customResponseHeaders.Pragma=no-cache",
"traefik.http.routers.authelia.middlewares=authelia-headers"
]
check {
type = "tcp"
port = "authelia-port"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 200 # MHz
memory = 1000 # MB
}
} // task authelia
} // authelia-group
group "reverse-proxy-group" {
constraint {
attribute = "${node.unique.name}"
value = "rpi1"
}
restart {
attempts = 0
delay = "30s"
}
network {
port "whoami" {
to = 80
}
port "dashboard" {
static = 8080
to = 8080
}
port "web" {
static = 80
to = 80
}
port "websecure" {
static = 443
to = 443
}
port "externalwebsecure" {
static = 4430
to = 4430
}
port "ssh" { # Used for gitea
static = 2222
to = 2222
}
}
task "whoami" {
driver = "docker"
config {
image = "containous/whoami:latest"
hostname = "${NOMAD_TASK_NAME}"
image_pull_timeout = "10m"
ports = ["whoami"]
} // /docker config
service {
port = "whoami"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
check {
type = "http"
path = "/"
interval = "90s"
timeout = "15s"
}
check_restart {
limit = 2
grace = "1m"
}
}
resources {
cpu = 25 # MHz
memory = 10 # MB
}
} // /task whoami
task "traefik" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
CF_API_EMAIL = "{{ my_email_address }}"
CF_DNS_API_TOKEN = "{{ traefik_cf_api_token }}"
}
driver = "docker"
config {
image = "traefik:v{{ traefik_version }}"
hostname = "traefik"
ports = ["dashboard", "web", "websecure","externalwebsecure", "ssh"]
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/traefik/acme:/acme" ]
image_pull_timeout = "10m"
args = [
"--global.sendAnonymousUsage=false",
"--global.checkNewVersion=false",
"--entryPoints.gitea-ssh.address=:2222",
"--entryPoints.web.address=:80",
"--entryPoints.websecure.address=:443",
"--entryPoints.externalwebsecure.address=:4430",
"--entrypoints.web.http.redirections.entryPoint.to=websecure",
"--entrypoints.web.http.redirections.entryPoint.scheme=https",
"--entrypoints.web.http.redirections.entryPoint.permanent=true",
"--providers.file.filename=/local/traefik/siteconfigs.toml",
"--providers.file.watch=true",
"--providers.consulcatalog=true",
"--providers.consulcatalog.endpoint.address=http://${NOMAD_IP_web}:8500",
"--providers.consulcatalog.prefix=traefik",
"--providers.consulcatalog.exposedbydefault=false",
"--providers.nomad=true",
"--providers.nomad.endpoint.address=http://${NOMAD_IP_web}:4646",
// "--metrics=true",
// "--metrics.influxdb=true",
// "--metrics.influxdb.address=influxdb.service.consul:{{ influxdb_port }}",
// "--metrics.influxdb.protocol=http",
// "--metrics.influxdb.pushinterval=10s",
// "--metrics.influxdb.database=homelab",
// "--metrics.influxdb.retentionpolicy=2day",
// "--metrics.influxdb.addentrypointslabels=true",
// "--metrics.influxdb.addserviceslabels=true",
"--accesslog=true",
"--log=true",
"--log.level=ERROR",
"--api=true",
"--api.dashboard=true",
"--api.insecure=true",
"--certificatesresolvers.cloudflare.acme.email={{ my_email_address }}",
"--certificatesresolvers.cloudflare.acme.storage=/acme/acme-${node.unique.name}.json",
"--certificatesresolvers.cloudflare.acme.dnschallenge=true",
"--certificatesresolvers.cloudflare.acme.dnschallenge.provider=cloudflare",
"--certificatesresolvers.cloudflare.acme.dnschallenge.delaybeforecheck=10",
"--certificatesresolvers.cloudflare.acme.dnschallenge.resolvers=1.1.1.1:53,8.8.8.8:53"
]
} // docker config
template {
destination = "local/traefik/httpasswd"
env = false
change_mode = "noop"
data = <<-EOH
{{ my_username }}:{{ traefik_http_pass_me }}
family:{{ traefik_http_pass_family }}
EOH
}
template {
destination = "local/traefik/httpasswdFamily"
env = false
change_mode = "noop"
data = <<-EOH
{{ my_username }}:{{ traefik_http_pass_me }}
family:{{ traefik_http_pass_family }}
EOH
}
template {
destination = "local/traefik/siteconfigs.toml"
env = false
change_mode = "noop"
data = <<-EOH
[http]
[http.middlewares]
[http.middlewares.compress.compress]
[http.middlewares.localIPOnly.ipWhiteList]
sourceRange = ["10.0.0.0/8"]
[http.middlewares.redirectScheme.redirectScheme]
scheme = "https"
permanent = true
[http.middlewares.authelia.forwardAuth]
address = {% raw %}"http://{{ range nomadService "authelia" }}{{ .Address }}:{{ .Port }}{{ end }}{% endraw %}/api/verify?rd=https://authelia.{{ homelab_domain_name }}"
trustForwardHeader = true
authResponseHeaders = ["Remote-User", "Remote-Groups", "Remote-Name", "Remote-Email"]
[http.middlewares.basicauth.basicauth]
usersfile = "/local/traefik/httpasswd"
removeHeader = true
[http.middlewares.basicauth-family.basicauth]
usersfile = "/local/traefik/httpasswdFamily"
removeHeader = true
[http.middlewares.allowFrame.headers]
customFrameOptionsValue = "allow-from https://home.{{ homelab_domain_name }}"
[http.routers]
[http.routers.consul]
rule = "Host(`consul.{{ homelab_domain_name }}`)"
service = "consul"
entrypoints = ["web","websecure"]
[http.routers.consul.tls]
certResolver = "cloudflare" # From static configuration
[http.services]
[http.services.consul]
[http.services.consul.loadBalancer]
passHostHeader = true
[[http.services.consul.loadBalancer.servers]]
url = "http://consul.service.consul:8500"
EOH
}
service {
port = "dashboard"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file,redirectScheme@file"
]
check {
type = "tcp"
port = "authelia-port"
interval = "30s"
timeout = "4s"
}
check {
type = "tcp"
port = "dashboard"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 200 # MHz
memory = 1000 # MB
}
resources {
cpu = 140 # MHz
memory = 100 # MB
} // resources
} // task authelia
} // task traefik
task "whoami" {
driver = "docker"
config {
image = "containous/whoami:latest"
hostname = "${NOMAD_TASK_NAME}"
ports = ["whoami"]
// task "promtail-traefik" {
} // /docker config
// driver = "docker"
// config {
// image = "grafana/promtail"
// hostname = "promtail-traefik"
// volumes = [
// "/mnt/pi-cluster/logs:/traefik"
// ]
// args = [
// "-config.file",
// "/local/promtail-config.yaml",
// "-print-config-stderr",
// ]
// } // docker config
service {
port = "whoami"
name = "${NOMAD_TASK_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
check {
type = "http"
path = "/"
interval = "90s"
timeout = "15s"
}
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
resources {
cpu = 25 # MHz
memory = 10 # MB
}
// template {
// destination = "local/promtail-config.yaml"
// env = false
// data = <<-EOH
// server:
// http_listen_port: 9080
// grpc_listen_port: 0
} // /task whoami
// positions:
// filename: /alloc/positions.yaml
task "traefik" {
// {% raw -%}
// clients:
// - url: http://{{ range nomadService "loki" }}{{ .Address }}:{{ .Port }}{{ end }}/loki/api/v1/push
// {% endraw %}
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
CF_API_EMAIL = "{{ my_email_address }}"
CF_DNS_API_TOKEN = "{{ traefik_cf_api_token }}"
}
// scrape_configs:
// - job_name: traefik
// static_configs:
// - targets:
// - localhost
// labels:
// job: traefik_access
// {% raw %}host: {{ env "node.unique.name" }}{% endraw +%}
// __path__: "/alloc/logs/traefik.std*.0"
// pipeline_stages:
// - regex:
// expression: '^(?P<remote_addr>[\w\.]+) - (?P<remote_user>[^ ]*) \[(?P<time_local>.*)\] "(?P<method>[^ ]*) (?P<request>[^ ]*) (?P<protocol>[^ ]*)" (?P<status>[\d]+) (?P<body_bytes_sent>[\d]+) "(?P<http_referer>[^"]*)" "(?P<http_user_agent>[^"]*)" (?P<request_number>[^ ]+) "(?P<router>[^ ]+)" "(?P<server_URL>[^ ]+)" (?P<response_time_ms>[^ ]+)ms$'
// - labels:
// method:
// status:
// router:
// response_time_ms:
driver = "docker"
config {
image = "traefik:{{ traefik_version }}"
hostname = "traefik"
ports = ["dashboard", "web", "websecure","externalwebsecure"]
volumes = [ "${meta.nfsStorageRoot}/pi-cluster/traefik/acme:/acme" ]
args = [
"--global.sendAnonymousUsage=false",
"--global.checkNewVersion=false",
"--entryPoints.web.address=:80",
"--entryPoints.websecure.address=:443",
"--entryPoints.externalwebsecure.address=:4430",
"--entrypoints.web.http.redirections.entryPoint.to=websecure",
"--entrypoints.web.http.redirections.entryPoint.scheme=https",
"--entrypoints.web.http.redirections.entryPoint.permanent=true",
"--providers.file.filename=/local/traefik/siteconfigs.toml",
"--providers.file.watch=true",
"--providers.consulcatalog=true",
"--providers.consulcatalog.endpoint.address=http://consul.service.consul:8500",
"--providers.consulcatalog.prefix=traefik",
"--providers.consulcatalog.exposedbydefault=false",
"--metrics=true",
"--metrics.influxdb=true",
"--metrics.influxdb.address=influxdb.service.consul:{{ influxdb_port }}",
"--metrics.influxdb.protocol=http",
"--metrics.influxdb.pushinterval=10s",
"--metrics.influxdb.database=homelab",
"--metrics.influxdb.retentionpolicy=2day",
"--metrics.influxdb.addentrypointslabels=true",
"--metrics.influxdb.addserviceslabels=true",
"--accesslog=true",
"--log=true",
"--log.level=ERROR",
"--api=true",
"--api.dashboard=true",
"--api.insecure=true",
"--certificatesresolvers.cloudflare.acme.email={{ my_email_address }}",
"--certificatesresolvers.cloudflare.acme.storage=/acme/acme-${node.unique.name}.json",
"--certificatesresolvers.cloudflare.acme.dnschallenge=true",
"--certificatesresolvers.cloudflare.acme.dnschallenge.provider=cloudflare",
"--certificatesresolvers.cloudflare.acme.dnschallenge.delaybeforecheck=10",
"--certificatesresolvers.cloudflare.acme.dnschallenge.resolvers=1.1.1.1:53,8.8.8.8:53"
]
} // docker config
// EOH
// } // template
template {
destination = "local/traefik/httpasswd"
env = false
change_mode = "noop"
data = <<-EOH
{{ my_username }}:{{ traefik_http_pass_me }}
family:{{ traefik_http_pass_family }}
EOH
}
// lifecycle {
// hook = "poststart"
// sidecar = true
// }
template {
destination = "local/traefik/httpasswdFamily"
env = false
change_mode = "noop"
data = <<-EOH
{{ my_username }}:{{ traefik_http_pass_me }}
family:{{ traefik_http_pass_family }}
EOH
}
// resources {
// cpu = 30 # MHz
// memory = 30 # MB
// } // resources
template {
destination = "local/traefik/siteconfigs.toml"
env = false
change_mode = "noop"
data = <<-EOH
[http]
[http.middlewares]
[http.middlewares.compress.compress]
// } // promtail sidecar task
[http.middlewares.localIPOnly.ipWhiteList]
sourceRange = ["10.0.0.0/8"]
[http.middlewares.redirectScheme.redirectScheme]
scheme = "https"
permanent = true
[http.middlewares.authelia.forwardAuth]
address = "http://authelia.service.consul:{{ authelia_port }}/api/verify?rd=https://authelia.{{ homelab_domain_name }}"
trustForwardHeader = true
authResponseHeaders = ["Remote-User", "Remote-Groups", "Remote-Name", "Remote-Email"]
[http.middlewares.basicauth.basicauth]
usersfile = "/local/traefik/httpasswd"
removeHeader = true
[http.middlewares.basicauth-family.basicauth]
usersfile = "/local/traefik/httpasswdFamily"
removeHeader = true
[http.middlewares.allowFrame.headers]
customFrameOptionsValue = "allow-from https://home.{{ homelab_domain_name }}"
[http.routers]
[http.routers.consul]
rule = "Host(`consul.{{ homelab_domain_name }}`)"
service = "consul"
entrypoints = ["web","websecure"]
[http.routers.consul.tls]
certResolver = "cloudflare" # From static configuration
[http.services]
[http.services.consul]
[http.services.consul.loadBalancer]
passHostHeader = true
[[http.services.consul.loadBalancer.servers]]
url = "http://consul.service.consul:8500"
EOH
}
service {
port = "dashboard"
name = "${NOMAD_TASK_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file,redirectScheme@file"
]
check {
type = "tcp"
port = "dashboard"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service
resources {
//cpu = 40 # MHz
memory = 64 # MB
} // resources
} // task traefik
// task "promtail-traefik" {
// driver = "docker"
// config {
// image = "grafana/promtail"
// hostname = "promtail-traefik"
// volumes = [
// "/mnt/pi-cluster/logs:/traefik"
// ]
// args = [
// "-config.file",
// "/local/promtail-config.yaml",
// "-print-config-stderr",
// ]
// } // docker config
// template {
// destination = "local/promtail-config.yaml"
// env = false
// data = <<-EOH
// server:
// http_listen_port: 9080
// grpc_listen_port: 0
// positions:
// filename: /alloc/positions.yaml
// {% raw -%}
// clients:
// - url: http://{{ range service "loki" }}{{ .Address }}:{{ .Port }}{{ end }}/loki/api/v1/push
// {% endraw %}
// scrape_configs:
// - job_name: traefik
// static_configs:
// - targets:
// - localhost
// labels:
// job: traefik_access
// {% raw %}host: {{ env "node.unique.name" }}{% endraw +%}
// __path__: "/alloc/logs/traefik.std*.0"
// pipeline_stages:
// - regex:
// expression: '^(?P<remote_addr>[\w\.]+) - (?P<remote_user>[^ ]*) \[(?P<time_local>.*)\] "(?P<method>[^ ]*) (?P<request>[^ ]*) (?P<protocol>[^ ]*)" (?P<status>[\d]+) (?P<body_bytes_sent>[\d]+) "(?P<http_referer>[^"]*)" "(?P<http_user_agent>[^"]*)" (?P<request_number>[^ ]+) "(?P<router>[^ ]+)" "(?P<server_URL>[^ ]+)" (?P<response_time_ms>[^ ]+)ms$'
// - labels:
// method:
// status:
// router:
// response_time_ms:
// EOH
// } // template
// lifecycle {
// hook = "poststart"
// sidecar = true
// }
// resources {
// cpu = 30 # MHz
// memory = 30 # MB
// } // resources
// } // promtail sidecar task
} // reverse-proxy-group
} // reverse-proxy-group
}

View File

@@ -0,0 +1,101 @@
job "sabnzbd" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "macmini"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "sabnzbd" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "http" {
static = "8080"
to = "8080"
}
}
task "sabnzbd" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
DOCKER_MODS = "linuxserver/mods:universal-cron"
}
driver = "docker"
config {
image = "ghcr.io/linuxserver/sabnzbd"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config",
"${meta.nfsStorageRoot}/media/downloads/nzb:/nzbd",
"${meta.nfsStorageRoot}/media/downloads/temp:/incomplete-downloads",
"${meta.nfsStorageRoot}/media/downloads/complete:/downloads",
"${meta.nfsStorageRoot}/nate:/nate",
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}/startup-scripts:/custom-cont-init.d"
]
ports = ["http"]
} // docker config
service {
port = "http"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`sab.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare"
// "traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
check {
type = "tcp"
port = "http"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 5000 # MHz
memory = 1000 # MB
} // resources
} // task
} // group
} // job

View File

@@ -82,6 +82,7 @@ job "sonarr" {
service {
port = "sonarr"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -100,7 +101,6 @@ job "sonarr" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -0,0 +1,97 @@
job "speedtest" {
region = "global"
datacenters = ["{{ datacenter_name }}"]
type = "service"
constraint {
attribute = "${node.unique.name}"
operator = "regexp"
value = "macmini"
}
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "speedtest" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "80"
}
}
task "speedtest" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
DB_CONNECTION = "sqlite"
APP_KEY = "{{ speedtest_app_key }}"
}
driver = "docker"
config {
image = "lscr.io/linuxserver/speedtest-tracker:latest"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/config"
]
ports = ["port1"]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare"
]
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
resources {
cpu = 1000 # MHz
memory = 200 # MB
} // resources
} // task
} // group
} // job

View File

@@ -40,6 +40,7 @@ job "stash" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
STASH_STASH = "/data/"
STASH_GENERATED = "/generated/"
STASH_METADATA = "/metadata/"
@@ -58,6 +59,7 @@ job "stash" {
"${meta.nfsStorageRoot}/nate/.stash/generated:/generated",
"${meta.nfsStorageRoot}/nate/.stash/media:/data",
"${meta.nfsStorageRoot}/nate/.stash/metadata:/metadata",
"${meta.nfsStorageRoot}/nate/.stash/blobs:/blobs",
"/etc/timezone:/etc/timezone:ro"
]
ports = ["port1"]
@@ -66,6 +68,7 @@ job "stash" {
service {
port = "port1"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -73,7 +76,6 @@ job "stash" {
"traefik.http.routers.${NOMAD_JOB_NAME}.service=${NOMAD_JOB_NAME}",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_JOB_NAME}.middlewares=authelia@file"
]
check {
@@ -85,12 +87,11 @@ job "stash" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service
resources {
cpu = 4500 # MHz
cpu = 3000 # MHz
memory = 400 # MB
} // resources

View File

@@ -70,6 +70,7 @@ job "syncthing" {
service {
port = "webGUI"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -89,7 +90,6 @@ job "syncthing" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -108,7 +108,7 @@ job "TEMPLATE" {
}
}
task "await-TEMPLATEdb" {
task "await-TEMPLATEEdb" {
driver = "docker"
config {
@@ -158,6 +158,7 @@ job "TEMPLATE" {
service {
name = "${NOMAD_TASK_NAME}"
port = "port2"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
@@ -178,7 +179,6 @@ job "TEMPLATE" {
check_restart {
limit = 3
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -9,87 +9,89 @@ job "TEMPLATE" {
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "TEMPLATE" {
count = 1
restart {
attempts = 0
delay = "30s"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
network {
port "port1" {
static = "80"
to = "80"
group "TEMPLATE" {
count = 1
restart {
attempts = 0
delay = "30s"
}
}
task "TEMPLATE" {
network {
port "port1" {
static = "80"
to = "80"
}
}
// env {
// PUID = "${meta.PUID}"
// PGID = "${meta.PGID}"
// }
task "TEMPLATE" {
driver = "docker"
config {
image = ""
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/etc/TEMPLATE/",
"/etc/timezone:/etc/timezone:ro",
"/etc/localtime:/etc/localtime:ro"
]
ports = ["port1"]
} // docker config
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
}
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
driver = "docker"
config {
image = ""
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/etc/TEMPLATE/"
]
ports = ["port1"]
} // docker config
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
"traefik.http.routers.${NOMAD_TASK_NAME}.middlewares=authelia@file"
]
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
check {
type = "tcp"
port = "port1"
interval = "30s"
timeout = "4s"
}
} // task
check_restart {
limit = 0
grace = "1m"
}
} // service
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // group
} // job

View File

@@ -82,6 +82,7 @@ job "TEMPLATE" {
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -102,7 +103,6 @@ job "TEMPLATE" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -70,6 +70,7 @@ job "uptimekuma" {
service {
port = "web"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`uptime.{{ homelab_domain_name }}`)",
@@ -88,7 +89,6 @@ job "uptimekuma" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -9,56 +9,146 @@ job "valentina" {
// value = "rpi(1|2|3)"
// }
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
group "valentina" {
count = 1
restart {
attempts = 0
delay = "30s"
update {
max_parallel = 1
health_check = "checks"
min_healthy_time = "10s"
healthy_deadline = "5m"
progress_deadline = "10m"
auto_revert = true
canary = 0
stagger = "30s"
}
task "valentina" {
group "valentina" {
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "INFO"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
}
count = 1
restart {
attempts = 0
delay = "30s"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{valentina_version}}"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
task "valentina" {
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
env {
PGID = "${meta.PGID}"
PUID = "${meta.PUID}"
TZ = "America/New_York"
VALENTINA_AWS_ACCESS_KEY_ID = "{{ valentina_aws_access_key_id }}"
VALENTINA_AWS_SECRET_ACCESS_KEY = "{{ valentina_aws_secret_access_key }}"
VALENTINA_DISCORD_TOKEN = "{{ valentina_discord_token }}"
VALENTINA_GUILDS = "{{ valentina_guids }}"
VALENTINA_LOG_LEVEL = "INFO"
VALENTINA_LOG_LEVEL_AWS = "INFO"
VALENTINA_LOG_LEVEL_HTTP = "ERROR"
VALENTINA_MONGO_DATABASE_NAME = "{{ valentina_mongo_database_name }}"
VALENTINA_MONGO_URI = "{{ valentina_mongo_uri }}"
VALENTINA_OWNER_CHANNELS = "{{ valentina_owner_channels }}"
VALENTINA_OWNER_IDS = "{{ valentina_owner_ids }}"
VALENTINA_S3_BUCKET_NAME = "{{ valentina_s3_bucket_name}}"
VALENTINA_GITHUB_TOKEN = "{{ valentina_github_token }}"
VALENTINA_GITHUB_REPO = "{{ valentina_github_repo }}"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/valentina:v{{ valentina_version }}"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
volumes = [
"${meta.nfsStorageRoot}/pi-cluster/${NOMAD_TASK_NAME}:/valentina",
]
} // docker config
} // task
// resources {
// cpu = 100 # MHz
// memory = 300 # MB
// } // resources
} // task
} // group
} // group
group "mongobackup" {
count = 1
restart {
attempts = 0
delay = "30s"
}
network {
port "port1" {
to = "80"
}
}
constraint {
attribute = "${attr.cpu.arch}"
value = "amd64"
}
task "mongobackup" {
env {
PUID = "${meta.PUID}"
PGID = "${meta.PGID}"
TZ = "America/New_York"
AWS_ACCESS_KEY_ID = "{{ valentina_aws_access_key_id }}"
AWS_S3_BUCKET_NAME = "{{ valentina_s3_bucket_name }}"
AWS_S3_BUCKET_PATH = "db_backups"
AWS_SECRET_ACCESS_KEY = "{{ valentina_aws_secret_access_key }}"
BACKUP_DIR = "/data/db_backups"
CRON_SCHEDULE = "0 2 * * *" # 2am daily
// CRON_SCHEDULE = "*/1 * * * *" # Every minute
DAILY_RETENTION = "7"
DB_NAME = "{{ backup_mongo_db_name }}"
LOG_FILE = "/data/backup_mongodb.log"
LOG_LEVEL = "INFO"
MONGODB_URI = "{{ backup_mongo_mongodb_uri }}"
MONTHLY_RETENTION = "12"
PORT = "80"
STORAGE_LOCATION = "BOTH"
WEEKLY_RETENTION = "4"
YEARLY_RETENTION = "2"
}
driver = "docker"
config {
image = "ghcr.io/natelandau/backup-mongodb:v{{ backup_mongodb_version }}"
image_pull_timeout = "10m"
hostname = "${NOMAD_TASK_NAME}"
ports = ["port1"]
volumes = ["${meta.nfsStorageRoot}/pi-cluster/valentina:/data"]
} // docker config
service {
port = "port1"
name = "${NOMAD_TASK_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.rule=Host(`${NOMAD_TASK_NAME}.{{ homelab_domain_name }}`)",
"traefik.http.routers.${NOMAD_TASK_NAME}.entryPoints=web,websecure",
"traefik.http.routers.${NOMAD_TASK_NAME}.service=${NOMAD_TASK_NAME}",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls=true",
"traefik.http.routers.${NOMAD_TASK_NAME}.tls.certresolver=cloudflare",
]
check {
type = "tcp"
port = "port1"
interval = "1m"
timeout = "4s"
}
check_restart {
limit = 0
grace = "1m"
}
} // service
} // task
} // group
} // job

View File

@@ -58,6 +58,7 @@ job "whoogle" {
service {
port = "whoogle"
name = "${NOMAD_JOB_NAME}"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.${NOMAD_JOB_NAME}.rule=Host(`${NOMAD_JOB_NAME}.{{ homelab_domain_name }}`)",
@@ -77,7 +78,6 @@ job "whoogle" {
check_restart {
limit = 0
grace = "1m"
ignore_warnings = true
}
} // service

View File

@@ -28,7 +28,7 @@ job "wikijs" {
}
}
task "await_db_filesytem" {
task "await_db_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -56,7 +56,7 @@ job "wikijs" {
}
} // /task
task "await_backup_filesytem" {
task "await_backup_filesystem" {
constraint {
attribute = "${node.unique.name}"
@@ -122,6 +122,7 @@ job "wikijs" {
service {
port = "db"
name = "wikijsdb"
provider = "nomad"
check {
type = "tcp"
port = "db"
@@ -131,7 +132,6 @@ job "wikijs" {
check_restart {
limit = 2
grace = "1m"
ignore_warnings = true
}
}
@@ -180,7 +180,7 @@ group "wikijs_app_group" {
}
} // /task
task "await_filesytem" {
task "await_filesystem" {
driver = "docker"
config {
@@ -225,6 +225,7 @@ group "wikijs_app_group" {
service {
port = "http"
name = "wikijs"
provider = "nomad"
tags = [
"traefik.enable=true",
"traefik.http.routers.wikijs.rule=Host(`wiki.{{ homelab_domain_name }}`)",
@@ -241,7 +242,6 @@ group "wikijs_app_group" {
check_restart {
limit = 3
grace = "30s"
ignore_warnings = true
}
} // /service

View File

@@ -20,6 +20,7 @@ _mainScript_() {
readarr
sonarr
uptimekuma
gitea
)
fi
@@ -131,7 +132,7 @@ _mainScript_() {
BACKUP_RETENTION_HOURLY=2
BACKUP_RETENTION_DAILY=6
BACKUP_RETENTION_WEEKLY=3
BACKUP_RETENTION_MONTHLY=2
BACKUP_RETENTION_MONTHLY=4
;;
esac

350
vault.yml
View File

@@ -1,130 +1,222 @@
$ANSIBLE_VAULT;1.1;AES256
30663632373933656431386165366239613265616163656263356462626365636661333339646462
3063316332666465626234393330323737393635343564310a303036653366653333636663643765
32666661356637353863633934646536333037393735313139303433336639343466643538646236
6466633565363633620a343131356337336366636433666230306237633963616266363833343636
37623938303161323963356338313166643566636166333639303163653464353266306465336231
33373736663765663734326234653931366237336666336461363734363065356664653763363437
63383266376136653033373263393261353836643932353737626136356135373264616532323434
64386533326164346662636630373863336530396236613736626239656238363738643839613934
33393930353166643961393665326262313463656137343537393363336538313935306364616136
32643961633436303262326239393063373162343562383436336639383066343431383639366363
64363736393535336531326531353939393435356535643163356536356364396464633565616366
36303231613839316538383637386136356638663334316530366534343533393462396361376132
33366434306563316263643864656332303361346632613336313135393937346338373730336664
39623239616234373530373739336465616462653663303331616232616632316161646434366664
62653165323163323533653939353565613337653136646232333462613562326335363337656666
30396335323030336132373564643233313631653735393236383830353130393163376165313963
38383933383034303739363461306464626461613661666466346534326135376534366636303738
38613936633232343366623433376230313633616465636537393030306461613734633437663962
66636264306139323763323436636661306339653063333363386438313365656537306663376630
66346264353532366135616131613164363438663335386565316336303036316363313830366162
64326563613038616238626463356136396230633639316135323466393939653237373837636432
63363430656563373163666134633031636336626530636633663831306363336239383139653366
61653138653563633532616164663735663564353038623661393136353164373330323565633766
62646164353633343863643438633863666464643534643936643337393865306265646364653038
66386362656436313935336432643833656436373130613031353434633766376132366637616334
31633761396264366361346335346130313435636265666339346436633831643932336665396366
63653963353839383736306330376262386333643366353836623235353538353066656535663464
63623365346133646339656433353332656632363737336336626462386362386164383266316463
63366431613133313232666335666430643462363064316133373630333266626666623631326163
64646264323264393164323732353035646261633033633335643631343964643134353032653062
39303163626564326562626630376132666563316534623662366165333466396662616535653339
35623962333434303762333861613838643763636136383030303966353162373239323634313064
31306565613164346364653830373134343635316639633434656163313763646165396664356430
37306633633432326233316338306237343864623335623039336533633865356662663231653732
30616461303133623732393463313334373263613737646431653036653464316530643431353931
31626131376638383265636632616134666561653063623035623034633737356465343065643039
33623338646261396366356638316533306436633365336231386532663136363862336234303036
30326335646465366266326230363731616263643530326234623832356233303965373536626430
30646232653830633536353861626230326134353735333038313337323366376634313763313139
32316466393330376462663539323464366636316637643035333535656632303364323531303137
38393962353761623638306539653238306536386337303239613539393530646332313038343962
34613232616632326138343166613635343336346538633465323364323332313833636533643531
63633763373533376566633331383663626664386263656631333137613236303964353830613831
63363534633336376336333566343765316235343962663664626461656134323365646566643930
31336630666436353833633838643335393863623539323966313162323766356136646263646233
32353862613136643061316434333133303363316337356637623535343939386439306664306162
61626136633565383066636536373231343833343566306336303865363937646339633961313639
33653330313331373361363936373861313531613762636166623539303434303765656338313839
39376134656332323730633631373639363036626261653832366234326561386264386533336564
34313838313836653639316237663733396135623438313236306166363435333266613431653966
31663964663030316633373631626139333663646234633464313536373030313735373236613762
34373232303530313765623263356232346538646232303030333665633038666438333032636231
37653031666663393562333833663763356434353962346438346130633064393466613736343063
39336433616665323236366335363065396362313433396633396238656232313936373737333265
61663263303564643262353337393865313561306461353832303264336635613662383265316233
32633232353365386362303764313236653862633335333831636236363336636235376635643639
63373732633033626263366465393135306538643863343534386665663365336461663862633437
61383263356161383434316465666365333932323064616164376633363332353433623635646230
36336335353365323863636238346264343765333034333332383130313462356432616531346163
63663665366563663462616464653332636538336565363534303363343633333065393363316530
62363761343230656239303564373733383332613537383633306230313230393732316462383662
36663439316339356139383664336433333636323233643765353934343163336530333338346562
32383666333536656334663936653635623462303232636365363939393665313531636163316362
32613738663730336565333063363436666635323834633266656264326166656365666161616565
34383461616433656437613738633862303935363434393532643062393639383535303835346235
33393031653133353662313739386638626332373732653531313239636132643162343035646336
33396236656435333032656134613233383237313138306464653137626333346631303136343765
66313263353633306432386463316162636363333561623762653763373233343739663836633831
65663031346238346539666437653436393866646361383061383934653439653432323263613866
31356137303137306530633835663334333934333764343533346666663934313439303765306230
64396635383835393562363732366665383965663035666266313334326333653661643638316266
65623162393764623365623632633637366263346466336262353932656430313539616535626661
63366165353263353135666532386165366432343237353963343666333333643539323065663332
33613166633533663331343935313839303738653331353532633334353239363133303266613432
65646239366361643739643934633365326331313339666566393365363935666465333030616432
66363034653131656462376461306132616137633330333565313839396265666635316439343762
38646231336634356432316663376363323734366562643761396161386532306161343734343632
38663136393637626235663937373734363230373763356337636463623333386435323037396432
61643031383365356631616532316332316139396135356535353431336330336438373761376534
33333837343562383564303462616133653836323463343836613531616434316130343137373238
33313238633466383866616631346561396332356335396235393937376463353832643935313830
66386130613661373337623935336266643834336234313833626636346265323264363737633530
39613664373233333935656134363265616332613131303363363662633837333436383033366635
35383037643335373061376434616338623239353437366637366535663962616337626165653534
36383666613339396465663066366433663664343431373133643861653866336130393633323937
65333837383762653062346438363366393836326365353532653464396436303833323737323661
62393262313232366461373737613030306566386566373331353633323762626638626262393332
65376439366364326337323130636330633938323732643566643562373766653230333439343663
64323666306635653935646366323662653066653136393535346632333039333435313832633866
61656539343536313665663365626238323836626635363437366663316261353065373931666461
35376466643532656563386162643533653562646434333935343665316366376338366364366138
37373936333261303237333061626366633732353033643636336131643562633964663738646661
64363537636439353838616336363061623663386237656266616132623862633834343932663333
33323061366135316233353730326230613463633838363866306463616561316135303063656164
62333439636632636432356536376461363438303431393732386538646531326633376437663633
66646531313835393831376461396435623561613663383533366637626163303933303162323739
34653339643933343963636435623833663437653162333436633166353662366139623533376162
30306233643836613838373738336263623633393334616664613636393361313465373932343436
30326166313230383732613432663162346432313862613133643463636436313336363161366335
63653437386332336266373937663234393338393236343033336637343464306339393330663735
61313739343432343338306334306165623038346336646666653139303662366235383363313532
32386336343730363866373131303366613838623837326333303736383561613239623466326332
61376461363861616461653961336363643865636233653965353036373662393964313765623139
64393364333535316165636439613734643530666663663965343734613831636233303636636633
62323834633137346161666463393938356231633135383765363363613138373638303835363161
37646432373565653236303933316164623636626664633039363065373231343931626337653163
37663631623437613564613765333466306265653461353032626437346533373163653161613464
35346638303364386461336565646663363138613065303533376662653064383763663563383135
36363966323464346135326234393035343566623762356237623631333838633964633364626661
35323665613238356136623837346636643962336166646635376238363930373863313637346130
30643634343531346230393438363437346462646634336338363932633365646461383732393631
65623332356561366562353732376136386531343061306364333063373963353433663336393663
63303465343861353837636536356135306234643131343032623364663164306534633261613938
34303462363364386163363237623563656664386436623030313935633938663363623163316363
63363866383262353233363464326238393833323763333839633530323136613363326131626639
31333131313765633636633335386533393862363534313064616236663531643762316134393261
65343739316161666630393662353064636536323262643434616166366531383661396466356633
37663539363331356332616664316164373764633631386134363738346135623262626230316238
37323663396162653465306136653262616137366163346330623834616634353635643236326664
31333537373264623064643933373364653830666630343063646339373733316364643762366664
63646661633230626462636438323664656234323237626439626138643835313131393534346230
35623362616438393863656339316238613766316234343466633437396137643434383739633932
62653935613864363230323638666462386461343665336433353934396537336330343937363038
66356362346638386363373632643363666132343461383763653434616233663239656366633565
36386366643262633532646631356631313866303438383939373133393139353236363862633633
34373866316335376361303934613639613837353235663534316136313431333837326666616637
39383537353664336466306132396266623835346239353339373461343031636166616535363639
63623939313261336639386564336633663465306537346436386336633531653633366136633337
3839
39376437663661656434636637396539303863633934376535353938656434386533323063626439
3665306133383637383739626233663565383562393534640a396436386366303730653366363530
39383533663836613934303665613031636535646233316364313766303964666266646134343833
3365623733383236630a616635383739636635663964323361663439336235336536626433333438
63356539353363663930363639396666386236653337313366313362326132346563663663633165
36396635616138313539653966373462363666316462313765623665303439383233353733656237
38643466393336333031326438316662386137613936626563393461613236613738306537666166
34313964346132623131623561363331323764373336613238313764653739623437656136333133
37663238653665313430653031393239636562376533346335363263373836613331333131623164
36323535633831656136633661326362646366623037386233656238616163623564653137303834
39643965363039643537303031643032333836316534336439346665316436306465663062323338
35393132346334653539343430373934323233623432386665633532366335333031356263376266
36386231613533383265346130383238316666663230643532383938663562386639326330646462
38633930343332353664363463303438643232633862646535626131633633396266353337396231
65353132653034653661316630663462633565363966326137373835316366313734646230376365
33666536333134323532363935326234393031393261393935656131366364616165643662333137
33316562663566353338346538313933313562613532383733326433653262366135353830336361
65343437346234343032323763353030663731336236623635633437393332323164666539313965
37366563383463613032333437366133343361643131353963346531313037643862363762383735
65383765386462653263343335633065623836613736663532313433396433376139633333653663
37336565656463356166636330306333636561366335366638383861316633623661396565633462
32623530326334383065346466643766383531353962666135313466633532643664356430386133
64653737326430623666313666643334313866616630313162666436373031363630613065393938
34626562346333346362643461346163396565393064356663356132663731353364363565316466
64343938613864646438326630323463323435393730643631626237303466346538376566333235
35393939626463376633306566336564663765313333616539376237323735656335646635366662
65363761616439656162363834623035346230383434653662366235343334636630383331623637
63633666646262646134303930363431343964373963393035343932393739633664373362343563
30643766643935643731613737646134303133646632313630353530376635616439666566636634
64336161336535323362616637306231376534336437633364393962666662336564643030373030
37356136336235313737316232613036366632663266306131366634386462336535303636353663
34313538373965636366356435613664313934663264666533316231623331336533396634333761
66363562623235626235346232306664636239386131353966366134393932653465636137646333
37376534313361666534383433613331663131626461373931343338643433616561336566626361
62623661323338353265356361366537653164316465316631653263636165636166366466326230
37363632653031616563323737616438666166363936386531313537653239656163353731366662
66616438303538366338326533653630636338323435636263616263306636616632616533623130
32316139356430656262343133653932356231386364393734393964343537306561383138316536
64663132393239306665353636643561336334393938643239336637303766393533323436386665
61373036623566613931366365663961396432346535633837323531323934373939383832636637
64653131333432666366393431303036353165643362383934623730323965306163303333323236
63626130373234366531386465636336663065633339386337366361316235333563653534313064
38393437383438323762616330393535326333366364666330646135636339363232333336326162
39633964373439346237363734316633376234383933666137616533313034333932353330623333
37373963363234326634663634333766616232626262663764326164333464663066636339646463
37633937666532613762313330323365323033613039316463363633316164363663653865653363
64343536333634663864333035623238316536386131613762616161636361326632353234313236
61353831666664316361613065323038326439636262363762653066396434353137373439303662
35333437616432326334333536666633616334313530316262623634386531313066666662633633
33343635666433316437386661633361336466306161653262323734623961363736643532623132
39323364656339386561333463323866356438663965393661353638333530663934346563393565
33383862633632613366313738383337636366666565353464623131316630316237653335376239
66623436663366653530323864366264666362306461613939393463353437636535656539653131
63623433346431393165346235396565373038633430366138316163613238386231363736626630
65336533663233386331663534636236393531643139386434373032666234343736336264396630
32353131356461343938633164306338366338303736353665613137316664653337313135363031
61306636613833336538663764616138633532396164666461636362336533383330343434326237
32663833313066396263636634316566383966326334396465396633363961616365383434653137
33373163363362643765313033646537386533396230333232343163333432323631386662363765
66643232323239303766643033326362373162333237383863336365336530386563663264626365
38383337646138643762623937643333383834633735356434613561626433633035386236376135
36656234666363356433356565666161303664623236303961623332353431646565613733393031
38643661376636393335643839326133653464343637393661623464336230343761353738616438
31306335396538643366643734633164393037616363623435623330396432323231313665393864
64336662386639326637373866386234343638366630366434356537666437643364613466613331
63363338333962356261346664643635373964633339373935356339666661343532386534366264
30323538656539353433613765356232386366633065366235316434626438396630363862653433
63323537616630666339653732623838666132613333386231333934393065663930653436336133
35633432363738623366376537303065306330613664626535333361383431663331623836353361
64393937316639383934383963313738313539316334356232316265623830346337343933336465
66316338343762646165643030633865343561323434636239306131373238303931633261396332
30306139343634613130646434303766356639353334663165643435666635373638623130353734
63336432653134653462633032383232343839333734653966653038343935303932386535623863
61383638383835353836626138393534353239356462396235656662613138356530316532633137
36383832623364363435363535343131373238613534393462636638663130373265326362303435
38666261633263363262333338333765343164316566363366666563643030353338656535613437
30616436656239343438313133386564383566623930353866323336643131306166393632353830
61646534356433376534376436383366323836663766376661343961626238316338633039623539
35346637633963346464616361613862333662663239376635346437346436393736346130343534
32313736613936383339323738316335633039626563303136373363316365336565633062373065
62313632336138626665373365303463623264623034623538336461333962343432353435396162
66396264326661323865393638343166373336653561373437313266636536623534333165323537
32396331353964316231306466653237393430313736393932323365363239353264616565326466
34643866323736303730356433326463306536383662316566653738613636363061623261313239
62663961343636366631613732343363366165383335613635616433316539383035623762336363
37373764343038393136373139613565363838343435333730346430396535333533643534363532
63376332303162633533666439303065366162653838393333313738643662303135353665373338
35326436393233623961383263636134393038373435666131616332323462653337303162346363
37633735376432663236633234383166306132633037303731373735356663626539333362613132
64313838626136356663323936356337623561633364303438353038666539363738616537653364
34646535306462666164626566323739623333306132396534343938313766663836643534396335
33306330356337653663666337313039376464633132383933323732313235393338346239303664
66666561396331653636646530616534616132363834386436623634366339623538383065363836
33313131303533616335366339333239313031346365353461326430643666666534613637616564
66623264303263663261653034383661306261366263303233336231386265393164393231363263
63396334303431623833313864353263663538623165656230316232646464383335363732333833
35383030396138313635613966336639623330626662383835333336646632313338353239313262
36663431373464363734376136393362653536636436386139643438643362386631663161356636
37653337306435633365333039663832353331653030393161353338656532323130353230383862
37636633666361373430663139646466643530313336616139663539313838346233326634366138
61656161383838356662393366643537633438623066306134636332373735363536653031643064
66363033336533393739366235346264656432383065633364333838303766643938396362393935
30353363343839333536393866366561393261623863383964373366626666323363323664333962
32663064366133386263306262656564613563653066313732366134383166313362383832393561
66316563643733633062326530366263323339353039326664643935346235316632386337396266
66363836666639313066333736653535393435653631316132323637316234303032366664366537
30373739363136393037613039623761343261346633343066633462616535313037316535376639
33653232373237613933616531393130356466363761316531353234663334343630363736383465
37623063323065363130303661633939663436646435313834636437653938396264646137343561
31373937346337396338393666666264636632343961343461646134653338373164313238313030
38326231343838623730633334356162303934393538333035356139613939353833326637633933
34626363616239363661346665333232646230316534666436333335363038303033356232326231
33333034373833353530376435303135313230313361326137363737643733363961616265343764
30303730623536303731633561333162373036316263376461666265363133363836613666353732
39343637333538303830633064313432646239336539346136323537356663373464303862653232
61653235303638376161326531626339616164353636306162353739316666333537633836636562
64626432656438376238633763356565356365623739616663303639646431393465376439643538
38326436313563396536383162316365383065653866663138663265373131333437626664366638
32633033366138663263653639376534633933323633326332636335303262373931656539356230
31643061636139373362343735396337313534316230646435333036636238393131353635393061
37313031633566396336353664626431376238336363323164393562613335613361623932393635
39613633626437383932323063353238366466663365396134663431303334333866323134646137
61336233356437323362346661373566633363353431323138316262343865636239376165353031
33613432633239626336323964646461643233313633636239333430346463323334373863303736
61343566383135653736663761643461343866373930616165613166336230616639316332663764
35323438633339383736383539383836613436313036393262316362663038336134366263376365
30616231633536643138383235393362306435333536363538353936626532366439366534646535
37653563623839633133396430376464326336663233396234386264386331366366356632313366
65363332363661623739623563303064626230363966643137616337383366343236663638326130
61316637336430643539656266396435623362633465366132653036353435386635653034376266
30313466366538373832393363313661653936303035363136343739336239356461356433393964
32393239343238366262616662373235633763383532613961653164303234663231313038393237
66353836623437653164373165393834366162623138653363316231306131383438653265663866
35643433636230363232303837323939366131613933366137373861303538633830303631313765
38313838313538643437613965353430386231376263643938343365333433313864313039336466
65323531616439333363373433663464373366653834643861373566373631316339643330393232
63373962353737303232653562303834323766353932323130303565383934306366636666626666
36376136333962643663333730336161613466306261636531353935336631373635663662356139
64383635333864333638316433303463616534376661333433313664623536303532613532363733
63356535336239333262323432643932636164623833656237623831373062663261363330393664
62646337343536356434336463386337373963363832306338316139636430363530346636353765
35303963636132313264666133633837356563386537646464636563303734653062393331333561
65346134386136353433326234616635393632383934363862636130346532303561336632636437
65616230373063323063326663613131663162326530326466306331353237303462396639663934
35333035353831383664336133636363363930353866383237363666393365323837306562393838
32343431643062393532396339383539353863643466653235363963336136313836393663623132
62376634333630353532653436643333636330303334366565316538313138396566663834353334
35613830326430393433333539373938323435623061323833653736663938306366636335333964
35303862326561613038643765643066636665663734336434616430356666393861666661303430
30636437663738633463303561353566393035613134313664633037373261646337663736613562
37393934653437343736386438633234653764653736636265623563666362333431633765326331
62396135613637666237303164323661303461613739316538663633626639353932366363626331
38383033643965616232333138613761613933323934356530623763363162613464653736333639
35666339393866373431623663383130356532656263643332373064363864346639326635353132
38666534346133623232636635383335376438343434633633303531636163303736333738656364
38373864313066376361626163333162323138663936386232383034366437393737353833386664
33303935666264633465366363333932396139643964323066336565623334393239393132343236
33366536653864656233663461396432386437363239323064333234313937373033323962336231
61313331636334313365306133646339643731303732356136633130386261656263616435623866
30353031373939303236313235643963623338643531386265656230336434373365303432386262
37356631376661326266343266613862333737303630316637363333623632633832653534346232
30623535393934303332653537316133396639366331323865366333663638653730366431666463
34366632626665353731616637393132353531613232356538613764376361616536313430633965
37643836363366656666653366383233623231333364333661656632336137383732363435353933
36353038653835643632316563383930376462363238386361303939343730316537313662626164
34613065306332333433653766396365396332353839383562326361373939356662396361613362
63623030353463396131336361653730646537303066653636633239333634386663366637346465
61626633633436316233646432323534666333396238363366313433656134646566333862623833
61393263376638353634333338653763363639383136306165393736663865616337643864333432
35366563643637363465303930333062396165643366366464373636373463326663333162613361
33323032626433333664333161623031316430383939613533313534393833303935343865333262
66396366383233356436663762316231656535626237633739613765356238386339333331363031
32616238313137376635623631303232616330303037373433376333313763626262643636636638
36353738373036343535616337663634393764633062306435396263343032323032353130326137
38613134306438626331636633346361336161633831353761623236326638663462653039643961
64653938613361336463663532366431316138356236633637313133333961316266326132653730
65333266666566373962363962356132383531656130356465383363356166303764643863303531
33363137356237616361316565336536376236353637633435633433356161316361313330643834
34626664323065666166613530373664623635643566613333353565616439383365383962633230
65316437373430623561346333363463613531356333356234663338633338316161633039323964
62376534373636376166383166393139613331323937306138396232653762653765386231616637
33376338346532363366383438353266326366626434336430616566653438313334383337613939
64366136323631653766616232386438656635333964343439633666613834613361393262653462
39383336356233343436313036643435303639343435356365333765653464656238336338323034
30363039623866346539323739393664623564373230353533383631663137313533366538373537
32346134663533353063323639383735323365336133373334343233303961663631333864373134
62306432643663646663353065643565626538343538333431616364376535616261656431626263
39643136623964336630353231346637613437333437616265353630616533383761356430343633
37303737336335633661363663326561323433613431653264653635653766353030373437653333
32393935613831636333313735613066306166643734303731363038343532363638386130343837
39376537336437393263363662313831353633343130626365616431383663633235306238396439
35396461303136323733346638306430643030646262623661643764336264666161373263356535
38336234326631303838313538343061653037366366316236383333623563323264643462326531
35373061653734663766663063613466333836333566343562616666646566313163636266663961
37623035383632353735306661313530353334656563323866616462626565313035663739343663
66646332653939666163636432396332316563356534336463326466316133333731636534383734
36613936393064376361343437303365643534653961616634656261323335336335373932343535
36373263666665393164393563316166363938333462643736626235303231316562393334373736
65353961313065636332353432333539636431353833333337346636356661633565646339393865
33666230383462363136336665396463393862363637316634616262386363303062356663343265
39666466303337643637343665356330373938653066346139653365346263623437363539313334
65313834366239326161323535336339303764353736623365663336353763383061396630656631
36623961633166353261393536663234623335323063343764363864656139623633393932616665
32356138613135616163613532316231366137326131346366313638386335393664383061326631
30653933323434303931653465376132313831326235343563663932333233633033613962633532
39343065653934383565376262663262633637313864346363643631353564306337626335306135
38356430626566366634313538336563393262396434303463363561646165313330646561333366
37333931316432346561623535313735313331373664393931383166663133373263383766343539
66323338366637343133633735373465313266626465323237656563343137363763643634333536
37353635643835656461316438653961346131643535643461383632613436653634666439643739
62323234633230326433383164386532326339623838336130393264666561323063396137653534
65643036316131313330353265633564383164363365303838376535623639383239323433656165
61363933333737306331393166316666316561663262613261646533666365623531663130326564
64656134316438363431366434303836383666316435373762343739663136643337343531393166
35356663346564316436663031636262323764376231663238613233353231666564313333666331
34616565636261343963646435313536326364303130336137323234393432343437386137363630
33386632353730333563626163656366646133376465383430616637653065373962306532616330
36323466373339386139316466373163383363376262643264323332343064656464363134656563
61316462333462303139653361303765613062633265616335313032306165633033666432616138
38356635646431306462393061643363323335386666616666353063646164623338363265623166
35663532613666613765313861353832336233653564653734656339383935623538