mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-16 08:53:48 -05:00
Compare commits
9 Commits
v0.12.0
...
dependabot
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
de37c7f12f | ||
|
|
64ac9a3ea3 | ||
|
|
8f7d4bb015 | ||
|
|
ba5693cf61 | ||
|
|
28c721f6d9 | ||
|
|
10449b3e6a | ||
|
|
22e9719402 | ||
|
|
461a067115 | ||
|
|
34aa78c103 |
2
.github/workflows/automated-tests.yml
vendored
2
.github/workflows/automated-tests.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
||||
uploader.codecov.io:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python and Poetry
|
||||
uses: ./.github/actions/setup-poetry
|
||||
|
||||
2
.github/workflows/commit-linter.yml
vendored
2
.github/workflows/commit-linter.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
github.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/create-release.yml
vendored
2
.github/workflows/create-release.yml
vendored
@@ -37,7 +37,7 @@ jobs:
|
||||
uploads.github.com:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
|
||||
2
.github/workflows/devcontainer-checker.yml
vendored
2
.github/workflows/devcontainer-checker.yml
vendored
@@ -53,7 +53,7 @@ jobs:
|
||||
webi.sh:443
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and run dev container task
|
||||
uses: devcontainers/ci@v0.3
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -18,6 +18,6 @@ jobs:
|
||||
api.github.com:443
|
||||
github.com:443
|
||||
|
||||
- uses: actions/labeler@v4
|
||||
- uses: actions/labeler@v5
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
2
.github/workflows/pypi-release.yml
vendored
2
.github/workflows/pypi-release.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
upload.pypi.org:443
|
||||
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Setup Python and Poetry
|
||||
uses: ./.github/actions/setup-poetry
|
||||
|
||||
@@ -5,7 +5,7 @@ default_stages: [commit, manual]
|
||||
fail_fast: true
|
||||
repos:
|
||||
- repo: "https://github.com/commitizen-tools/commitizen"
|
||||
rev: 3.2.2
|
||||
rev: v3.13.0
|
||||
hooks:
|
||||
- id: commitizen
|
||||
- id: commitizen-branch
|
||||
@@ -26,7 +26,7 @@ repos:
|
||||
- id: text-unicode-replacement-char
|
||||
|
||||
- repo: "https://github.com/pre-commit/pre-commit-hooks"
|
||||
rev: v4.4.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-added-large-files
|
||||
- id: check-ast
|
||||
@@ -54,30 +54,39 @@ repos:
|
||||
types: [python]
|
||||
|
||||
- repo: "https://github.com/adrienverge/yamllint.git"
|
||||
rev: v1.31.0
|
||||
rev: v1.33.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
files: ^.*\.(yaml|yml)$
|
||||
entry: yamllint --strict --config-file .yamllint.yml
|
||||
|
||||
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
||||
rev: "v0.0.267"
|
||||
rev: "v0.1.8"
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--extend-ignore", "I001,D301,D401"]
|
||||
exclude: tests/
|
||||
|
||||
- repo: "https://github.com/jendrikseipp/vulture"
|
||||
rev: "v2.7"
|
||||
rev: "v2.10"
|
||||
hooks:
|
||||
- id: vulture
|
||||
|
||||
- repo: "https://github.com/crate-ci/typos"
|
||||
rev: "v1.16.25"
|
||||
hooks:
|
||||
- id: typos
|
||||
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: custom
|
||||
name: custom pre-commit script
|
||||
entry: scripts/pre-commit-hook.sh
|
||||
# This calls a custom pre-commit script.
|
||||
# Disable if you don't have it.
|
||||
- id: stopwords
|
||||
name: stopwords
|
||||
entry: bash -c '~/bin/git-stopwords ${PWD}/"$@"'
|
||||
language: system
|
||||
pass_filenames: true
|
||||
types: [text]
|
||||
|
||||
- id: black
|
||||
name: black
|
||||
|
||||
7
.typos.toml
Normal file
7
.typos.toml
Normal file
@@ -0,0 +1,7 @@
|
||||
[default]
|
||||
default.locale = "en_us"
|
||||
|
||||
[default.extend-words]
|
||||
nd = "nd" # In the context of 2nd
|
||||
[files]
|
||||
extend-exclude = ["*_cache", ".venv", "src/jdfile/utils/strings.py", "tests/fixtures/"]
|
||||
@@ -1,3 +1,9 @@
|
||||
## v0.12.1 (2023-09-02)
|
||||
|
||||
### Fix
|
||||
|
||||
- **notes**: preserve file encoding when writing to filesystem (#59)
|
||||
|
||||
## v0.12.0 (2023-05-17)
|
||||
|
||||
### Feat
|
||||
@@ -95,7 +101,7 @@
|
||||
|
||||
### Fix
|
||||
|
||||
- **ui**: add seperator to top of select lists
|
||||
- **ui**: add separator to top of select lists
|
||||
- allow adding inline tags with same key different values (#17)
|
||||
- remove unnecessary question when viewing diffs
|
||||
|
||||
|
||||
1337
poetry.lock
generated
1337
poetry.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -11,45 +11,45 @@
|
||||
name = "obsidian-metadata"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||
version = "0.12.0"
|
||||
version = "0.12.1"
|
||||
|
||||
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||
|
||||
[tool.poetry.dependencies]
|
||||
charset-normalizer = "^3.1.0"
|
||||
emoji = "^2.2.0"
|
||||
charset-normalizer = "^3.2.0"
|
||||
emoji = "^2.8.0"
|
||||
loguru = "^0.7.0"
|
||||
python = "^3.10"
|
||||
questionary = "^1.10.0"
|
||||
regex = "^2023.5.5"
|
||||
rich = "^13.3.5"
|
||||
ruamel-yaml = "^0.17.26"
|
||||
shellingham = "^1.5.0.post1"
|
||||
tomlkit = "^0.11.8"
|
||||
regex = "^2023.8.8"
|
||||
rich = "^13.5.2"
|
||||
ruamel-yaml = "^0.17.32"
|
||||
shellingham = "^1.5.3"
|
||||
tomlkit = "^0.12.1"
|
||||
typer = "^0.9.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.3.1"
|
||||
pytest = "^7.4.0"
|
||||
pytest-clarity = "^1.0.1"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-mock = "^3.11.1"
|
||||
pytest-pretty-terminal = "^1.1.0"
|
||||
pytest-xdist = "^3.3.0"
|
||||
pytest-xdist = "^3.3.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.3.0"
|
||||
commitizen = "^3.2.2"
|
||||
coverage = "^7.2.5"
|
||||
black = "^23.7.0"
|
||||
commitizen = "^3.7.0"
|
||||
coverage = "^7.3.0"
|
||||
interrogate = "^1.5.0"
|
||||
mypy = "^1.3.0"
|
||||
pdoc = "^13.1.1"
|
||||
poethepoet = "^0.20.0"
|
||||
pre-commit = "^3.3.1"
|
||||
ruff = "^0.0.267"
|
||||
sh = "^2.0.4"
|
||||
typeguard = "^4.0.0"
|
||||
types-python-dateutil = "^2.8.19.13"
|
||||
vulture = "^2.7"
|
||||
mypy = "^1.5.1"
|
||||
pdoc = "^14.0.0"
|
||||
poethepoet = "^0.22.0"
|
||||
pre-commit = "^3.3.3"
|
||||
ruff = "^0.0.286"
|
||||
sh = "^2.0.6"
|
||||
types-python-dateutil = "^2.8.19.14"
|
||||
typos = "^1.16.9"
|
||||
vulture = "^2.9.1"
|
||||
|
||||
[tool.black]
|
||||
line-length = 100
|
||||
@@ -59,7 +59,7 @@
|
||||
changelog_incremental = true
|
||||
tag_format = "v$version"
|
||||
update_changelog_on_bump = true
|
||||
version = "0.12.0"
|
||||
version = "0.12.1"
|
||||
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
||||
|
||||
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||
@@ -270,6 +270,9 @@
|
||||
[[tool.poe.tasks.lint.sequence]]
|
||||
shell = "yamllint ."
|
||||
|
||||
[[tool.poe.tasks.lint.sequence]]
|
||||
shell = "typos"
|
||||
|
||||
[[tool.poe.tasks.lint.sequence]]
|
||||
shell = "interrogate -c pyproject.toml ."
|
||||
|
||||
|
||||
@@ -1,821 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# shellcheck disable=SC2317
|
||||
|
||||
_mainScript_() {
|
||||
|
||||
_customStopWords_() {
|
||||
# DESC: Check if any specified stop words are in the commit diff. If found, the pre-commit hook will exit with a non-zero exit code.
|
||||
# ARGS:
|
||||
# $1 (Required): Path to file
|
||||
# OUTS:
|
||||
# 0: Success
|
||||
# 1: Failure
|
||||
# USAGE:
|
||||
# _customStopWords_ "/path/to/file.sh"
|
||||
# NOTE:
|
||||
# Requires a plaintext stopword file located at
|
||||
# `~/.git_stop_words` containing one stopword per line.
|
||||
|
||||
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||
|
||||
local _gitDiffTmp
|
||||
local FILE_TO_CHECK="${1}"
|
||||
|
||||
_gitDiffTmp="${TMP_DIR}/${RANDOM}.${RANDOM}.${RANDOM}.diff.txt"
|
||||
|
||||
if [ -f "${STOP_WORD_FILE}" ]; then
|
||||
|
||||
if [[ $(basename "${STOP_WORD_FILE}") == "$(basename "${FILE_TO_CHECK}")" ]]; then
|
||||
debug "$(basename "${1}"): Don't check stop words file for stop words."
|
||||
return 0
|
||||
fi
|
||||
debug "$(basename "${FILE_TO_CHECK}"): Checking for stop words..."
|
||||
|
||||
# remove blank lines from stopwords file
|
||||
sed '/^$/d' "${STOP_WORD_FILE}" >"${TMP_DIR}/pattern_file.txt"
|
||||
|
||||
# Check for stopwords
|
||||
if git diff --cached -- "${FILE_TO_CHECK}" | grep –i -q "new file mode"; then
|
||||
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${FILE_TO_CHECK}"; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
else
|
||||
# Add diff to a temporary file
|
||||
git diff --cached -- "${FILE_TO_CHECK}" | grep '^+' >"${_gitDiffTmp}"
|
||||
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${_gitDiffTmp}"; then
|
||||
return 1
|
||||
else
|
||||
return 0
|
||||
fi
|
||||
fi
|
||||
|
||||
else
|
||||
|
||||
notice "Could not find git stopwords file expected at '${STOP_WORD_FILE}'. Continuing..."
|
||||
return 0
|
||||
fi
|
||||
}
|
||||
|
||||
# Don;t lint binary files
|
||||
if [[ ${ARGS[0]} =~ \.(jpg|jpeg|gif|png|exe|zip|gzip|tiff|tar|dmg|ttf|otf|m4a|mp3|mkv|mov|avi|eot|svg|woff2?|aac|wav|flac|pdf|doc|xls|ppt|7z|bin|dmg|dat|sql|ico|mpe?g)$ ]]; then
|
||||
_safeExit_ 0
|
||||
fi
|
||||
|
||||
if ! _customStopWords_ "${ARGS[0]}"; then
|
||||
error "Stop words found in ${ARGS[0]}"
|
||||
_safeExit_ 1
|
||||
fi
|
||||
}
|
||||
# end _mainScript_
|
||||
|
||||
# ################################## Flags and defaults
|
||||
# Required variables
|
||||
LOGFILE="${HOME}/logs/$(basename "$0").log"
|
||||
QUIET=false
|
||||
LOGLEVEL=ERROR
|
||||
VERBOSE=false
|
||||
FORCE=false
|
||||
DRYRUN=false
|
||||
declare -a ARGS=()
|
||||
|
||||
# Script specific
|
||||
LOGLEVEL=NONE
|
||||
STOP_WORD_FILE="${HOME}/.git_stop_words"
|
||||
shopt -s nocasematch
|
||||
# ################################## Custom utility functions (Pasted from repository)
|
||||
|
||||
# ################################## Functions required for this template to work
|
||||
|
||||
_setColors_() {
|
||||
# DESC:
|
||||
# Sets colors use for alerts.
|
||||
# ARGS:
|
||||
# None
|
||||
# OUTS:
|
||||
# None
|
||||
# USAGE:
|
||||
# printf "%s\n" "${blue}Some text${reset}"
|
||||
|
||||
if tput setaf 1 >/dev/null 2>&1; then
|
||||
bold=$(tput bold)
|
||||
underline=$(tput smul)
|
||||
reverse=$(tput rev)
|
||||
reset=$(tput sgr0)
|
||||
|
||||
if [[ $(tput colors) -ge 256 ]] >/dev/null 2>&1; then
|
||||
white=$(tput setaf 231)
|
||||
blue=$(tput setaf 38)
|
||||
yellow=$(tput setaf 11)
|
||||
green=$(tput setaf 82)
|
||||
red=$(tput setaf 9)
|
||||
purple=$(tput setaf 171)
|
||||
gray=$(tput setaf 250)
|
||||
else
|
||||
white=$(tput setaf 7)
|
||||
blue=$(tput setaf 38)
|
||||
yellow=$(tput setaf 3)
|
||||
green=$(tput setaf 2)
|
||||
red=$(tput setaf 9)
|
||||
purple=$(tput setaf 13)
|
||||
gray=$(tput setaf 7)
|
||||
fi
|
||||
else
|
||||
bold="\033[4;37m"
|
||||
reset="\033[0m"
|
||||
underline="\033[4;37m"
|
||||
# shellcheck disable=SC2034
|
||||
reverse=""
|
||||
white="\033[0;37m"
|
||||
blue="\033[0;34m"
|
||||
yellow="\033[0;33m"
|
||||
green="\033[1;32m"
|
||||
red="\033[0;31m"
|
||||
purple="\033[0;35m"
|
||||
gray="\033[0;37m"
|
||||
fi
|
||||
}
|
||||
|
||||
_alert_() {
|
||||
# DESC:
|
||||
# Controls all printing of messages to log files and stdout.
|
||||
# ARGS:
|
||||
# $1 (required) - The type of alert to print
|
||||
# (success, header, notice, dryrun, debug, warning, error,
|
||||
# fatal, info, input)
|
||||
# $2 (required) - The message to be printed to stdout and/or a log file
|
||||
# $3 (optional) - Pass '${LINENO}' to print the line number where the _alert_ was triggered
|
||||
# OUTS:
|
||||
# stdout: The message is printed to stdout
|
||||
# log file: The message is printed to a log file
|
||||
# USAGE:
|
||||
# [_alertType] "[MESSAGE]" "${LINENO}"
|
||||
# NOTES:
|
||||
# - The colors of each alert type are set in this function
|
||||
# - For specified alert types, the funcstac will be printed
|
||||
|
||||
local _color
|
||||
local _alertType="${1}"
|
||||
local _message="${2}"
|
||||
local _line="${3-}" # Optional line number
|
||||
|
||||
[[ $# -lt 2 ]] && fatal 'Missing required argument to _alert_'
|
||||
|
||||
if [[ -n ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||
_message="${_message} ${gray}(line: ${_line}) $(_printFuncStack_)"
|
||||
elif [[ -n ${_line} && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||
_message="${_message} ${gray}(line: ${_line})"
|
||||
elif [[ -z ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||
_message="${_message} ${gray}$(_printFuncStack_)"
|
||||
fi
|
||||
|
||||
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||
_color="${bold}${red}"
|
||||
elif [ "${_alertType}" == "info" ]; then
|
||||
_color="${gray}"
|
||||
elif [ "${_alertType}" == "warning" ]; then
|
||||
_color="${red}"
|
||||
elif [ "${_alertType}" == "success" ]; then
|
||||
_color="${green}"
|
||||
elif [ "${_alertType}" == "debug" ]; then
|
||||
_color="${purple}"
|
||||
elif [ "${_alertType}" == "header" ]; then
|
||||
_color="${bold}${white}${underline}"
|
||||
elif [ "${_alertType}" == "notice" ]; then
|
||||
_color="${bold}"
|
||||
elif [ "${_alertType}" == "input" ]; then
|
||||
_color="${bold}${underline}"
|
||||
elif [ "${_alertType}" = "dryrun" ]; then
|
||||
_color="${blue}"
|
||||
else
|
||||
_color=""
|
||||
fi
|
||||
|
||||
_writeToScreen_() {
|
||||
("${QUIET}") && return 0 # Print to console when script is not 'quiet'
|
||||
[[ ${VERBOSE} == false && ${_alertType} =~ ^(debug|verbose) ]] && return 0
|
||||
|
||||
if ! [[ -t 1 || -z ${TERM-} ]]; then # Don't use colors on non-recognized terminals
|
||||
_color=""
|
||||
reset=""
|
||||
fi
|
||||
|
||||
if [[ ${_alertType} == header ]]; then
|
||||
printf "${_color}%s${reset}\n" "${_message}"
|
||||
else
|
||||
printf "${_color}[%7s] %s${reset}\n" "${_alertType}" "${_message}"
|
||||
fi
|
||||
}
|
||||
_writeToScreen_
|
||||
|
||||
_writeToLog_() {
|
||||
[[ ${_alertType} == "input" ]] && return 0
|
||||
[[ ${LOGLEVEL} =~ (off|OFF|Off) ]] && return 0
|
||||
if [ -z "${LOGFILE-}" ]; then
|
||||
LOGFILE="$(pwd)/$(basename "$0").log"
|
||||
fi
|
||||
[ ! -d "$(dirname "${LOGFILE}")" ] && mkdir -p "$(dirname "${LOGFILE}")"
|
||||
[[ ! -f ${LOGFILE} ]] && touch "${LOGFILE}"
|
||||
|
||||
# Don't use colors in logs
|
||||
local _cleanmessage
|
||||
_cleanmessage="$(printf "%s" "${_message}" | sed -E 's/(\x1b)?\[(([0-9]{1,2})(;[0-9]{1,3}){0,2})?[mGK]//g')"
|
||||
# Print message to log file
|
||||
printf "%s [%7s] %s %s\n" "$(date +"%b %d %R:%S")" "${_alertType}" "[$(/bin/hostname)]" "${_cleanmessage}" >>"${LOGFILE}"
|
||||
}
|
||||
|
||||
# Write specified log level data to logfile
|
||||
case "${LOGLEVEL:-ERROR}" in
|
||||
ALL | all | All)
|
||||
_writeToLog_
|
||||
;;
|
||||
DEBUG | debug | Debug)
|
||||
_writeToLog_
|
||||
;;
|
||||
INFO | info | Info)
|
||||
if [[ ${_alertType} =~ ^(error|fatal|warning|info|notice|success) ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
NOTICE | notice | Notice)
|
||||
if [[ ${_alertType} =~ ^(error|fatal|warning|notice|success) ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
WARN | warn | Warn)
|
||||
if [[ ${_alertType} =~ ^(error|fatal|warning) ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
ERROR | error | Error)
|
||||
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
FATAL | fatal | Fatal)
|
||||
if [[ ${_alertType} =~ ^fatal ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
OFF | off)
|
||||
return 0
|
||||
;;
|
||||
*)
|
||||
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||
_writeToLog_
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
} # /_alert_
|
||||
|
||||
error() { _alert_ error "${1}" "${2-}"; }
|
||||
warning() { _alert_ warning "${1}" "${2-}"; }
|
||||
notice() { _alert_ notice "${1}" "${2-}"; }
|
||||
info() { _alert_ info "${1}" "${2-}"; }
|
||||
success() { _alert_ success "${1}" "${2-}"; }
|
||||
dryrun() { _alert_ dryrun "${1}" "${2-}"; }
|
||||
input() { _alert_ input "${1}" "${2-}"; }
|
||||
header() { _alert_ header "${1}" "${2-}"; }
|
||||
debug() { _alert_ debug "${1}" "${2-}"; }
|
||||
fatal() {
|
||||
_alert_ fatal "${1}" "${2-}"
|
||||
_safeExit_ "1"
|
||||
}
|
||||
|
||||
_printFuncStack_() {
|
||||
# DESC:
|
||||
# Prints the function stack in use. Used for debugging, and error reporting.
|
||||
# ARGS:
|
||||
# None
|
||||
# OUTS:
|
||||
# stdout: Prints [function]:[file]:[line]
|
||||
# NOTE:
|
||||
# Does not print functions from the alert class
|
||||
local _i
|
||||
declare -a _funcStackResponse=()
|
||||
for ((_i = 1; _i < ${#BASH_SOURCE[@]}; _i++)); do
|
||||
case "${FUNCNAME[${_i}]}" in
|
||||
_alert_ | _trapCleanup_ | fatal | error | warning | notice | info | debug | dryrun | header | success)
|
||||
continue
|
||||
;;
|
||||
*)
|
||||
_funcStackResponse+=("${FUNCNAME[${_i}]}:$(basename "${BASH_SOURCE[${_i}]}"):${BASH_LINENO[_i - 1]}")
|
||||
;;
|
||||
esac
|
||||
|
||||
done
|
||||
printf "( "
|
||||
printf %s "${_funcStackResponse[0]}"
|
||||
printf ' < %s' "${_funcStackResponse[@]:1}"
|
||||
printf ' )\n'
|
||||
}
|
||||
|
||||
_safeExit_() {
|
||||
# DESC:
|
||||
# Cleanup and exit from a script
|
||||
# ARGS:
|
||||
# $1 (optional) - Exit code (defaults to 0)
|
||||
# OUTS:
|
||||
# None
|
||||
|
||||
if [[ -d ${SCRIPT_LOCK-} ]]; then
|
||||
if command rm -rf "${SCRIPT_LOCK}"; then
|
||||
debug "Removing script lock"
|
||||
else
|
||||
warning "Script lock could not be removed. Try manually deleting ${yellow}'${SCRIPT_LOCK}'"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n ${TMP_DIR-} && -d ${TMP_DIR-} ]]; then
|
||||
if [[ ${1-} == 1 && -n "$(ls "${TMP_DIR}")" ]]; then
|
||||
command rm -r "${TMP_DIR}"
|
||||
else
|
||||
command rm -r "${TMP_DIR}"
|
||||
debug "Removing temp directory"
|
||||
fi
|
||||
fi
|
||||
|
||||
trap - INT TERM EXIT
|
||||
exit "${1:-0}"
|
||||
}
|
||||
|
||||
_trapCleanup_() {
|
||||
# DESC:
|
||||
# Log errors and cleanup from script when an error is trapped. Called by 'trap'
|
||||
# ARGS:
|
||||
# $1: Line number where error was trapped
|
||||
# $2: Line number in function
|
||||
# $3: Command executing at the time of the trap
|
||||
# $4: Names of all shell functions currently in the execution call stack
|
||||
# $5: Scriptname
|
||||
# $6: $BASH_SOURCE
|
||||
# USAGE:
|
||||
# trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM ERR
|
||||
# OUTS:
|
||||
# Exits script with error code 1
|
||||
|
||||
local _line=${1-} # LINENO
|
||||
local _linecallfunc=${2-}
|
||||
local _command="${3-}"
|
||||
local _funcstack="${4-}"
|
||||
local _script="${5-}"
|
||||
local _sourced="${6-}"
|
||||
|
||||
# Replace the cursor in-case 'tput civis' has been used
|
||||
tput cnorm
|
||||
|
||||
if declare -f "fatal" &>/dev/null && declare -f "_printFuncStack_" &>/dev/null; then
|
||||
|
||||
_funcstack="'$(printf "%s" "${_funcstack}" | sed -E 's/ / < /g')'"
|
||||
|
||||
if [[ ${_script##*/} == "${_sourced##*/}" ]]; then
|
||||
fatal "${7-} command: '${_command}' (line: ${_line}) [func: $(_printFuncStack_)]"
|
||||
else
|
||||
fatal "${7-} command: '${_command}' (func: ${_funcstack} called at line ${_linecallfunc} of '${_script##*/}') (line: ${_line} of '${_sourced##*/}') "
|
||||
fi
|
||||
else
|
||||
printf "%s\n" "Fatal error trapped. Exiting..."
|
||||
fi
|
||||
|
||||
if declare -f _safeExit_ &>/dev/null; then
|
||||
_safeExit_ 1
|
||||
else
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
_makeTempDir_() {
|
||||
# DESC:
|
||||
# Creates a temp directory to house temporary files
|
||||
# ARGS:
|
||||
# $1 (Optional) - First characters/word of directory name
|
||||
# OUTS:
|
||||
# Sets $TMP_DIR variable to the path of the temp directory
|
||||
# USAGE:
|
||||
# _makeTempDir_ "$(basename "$0")"
|
||||
|
||||
[ -d "${TMP_DIR-}" ] && return 0
|
||||
|
||||
if [ -n "${1-}" ]; then
|
||||
TMP_DIR="${TMPDIR:-/tmp/}${1}.${RANDOM}.${RANDOM}.$$"
|
||||
else
|
||||
TMP_DIR="${TMPDIR:-/tmp/}$(basename "$0").${RANDOM}.${RANDOM}.${RANDOM}.$$"
|
||||
fi
|
||||
(umask 077 && mkdir "${TMP_DIR}") || {
|
||||
fatal "Could not create temporary directory! Exiting."
|
||||
}
|
||||
debug "\$TMP_DIR=${TMP_DIR}"
|
||||
}
|
||||
|
||||
# shellcheck disable=SC2120
|
||||
_acquireScriptLock_() {
|
||||
# DESC:
|
||||
# Acquire script lock to prevent running the same script a second time before the
|
||||
# first instance exits
|
||||
# ARGS:
|
||||
# $1 (optional) - Scope of script execution lock (system or user)
|
||||
# OUTS:
|
||||
# exports $SCRIPT_LOCK - Path to the directory indicating we have the script lock
|
||||
# Exits script if lock cannot be acquired
|
||||
# NOTE:
|
||||
# If the lock was acquired it's automatically released in _safeExit_()
|
||||
|
||||
local _lockDir
|
||||
if [[ ${1-} == 'system' ]]; then
|
||||
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").lock"
|
||||
else
|
||||
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").${UID}.lock"
|
||||
fi
|
||||
|
||||
if command mkdir "${_lockDir}" 2>/dev/null; then
|
||||
readonly SCRIPT_LOCK="${_lockDir}"
|
||||
debug "Acquired script lock: ${yellow}${SCRIPT_LOCK}${purple}"
|
||||
else
|
||||
if declare -f "_safeExit_" &>/dev/null; then
|
||||
error "Unable to acquire script lock: ${yellow}${_lockDir}${red}"
|
||||
fatal "If you trust the script isn't running, delete the lock dir"
|
||||
else
|
||||
printf "%s\n" "ERROR: Could not acquire script lock. If you trust the script isn't running, delete: ${_lockDir}"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
fi
|
||||
}
|
||||
|
||||
_setPATH_() {
|
||||
# DESC:
|
||||
# Add directories to $PATH so script can find executables
|
||||
# ARGS:
|
||||
# $@ - One or more paths
|
||||
# OPTS:
|
||||
# -x - Fail if directories are not found
|
||||
# OUTS:
|
||||
# 0: Success
|
||||
# 1: Failure
|
||||
# Adds items to $PATH
|
||||
# USAGE:
|
||||
# _setPATH_ "/usr/local/bin" "${HOME}/bin" "$(npm bin)"
|
||||
|
||||
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||
|
||||
local opt
|
||||
local OPTIND=1
|
||||
local _failIfNotFound=false
|
||||
|
||||
while getopts ":xX" opt; do
|
||||
case ${opt} in
|
||||
x | X) _failIfNotFound=true ;;
|
||||
*)
|
||||
{
|
||||
error "Unrecognized option '${1}' passed to _backupFile_" "${LINENO}"
|
||||
return 1
|
||||
}
|
||||
;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND - 1))
|
||||
|
||||
local _newPath
|
||||
|
||||
for _newPath in "$@"; do
|
||||
if [ -d "${_newPath}" ]; then
|
||||
if ! printf "%s" "${PATH}" | grep -Eq "(^|:)${_newPath}($|:)"; then
|
||||
if PATH="${_newPath}:${PATH}"; then
|
||||
debug "Added '${_newPath}' to PATH"
|
||||
else
|
||||
debug "'${_newPath}' already in PATH"
|
||||
fi
|
||||
else
|
||||
debug "_setPATH_: '${_newPath}' already exists in PATH"
|
||||
fi
|
||||
else
|
||||
debug "_setPATH_: can not find: ${_newPath}"
|
||||
if [[ ${_failIfNotFound} == true ]]; then
|
||||
return 1
|
||||
fi
|
||||
continue
|
||||
fi
|
||||
done
|
||||
return 0
|
||||
}
|
||||
|
||||
_useGNUutils_() {
|
||||
# DESC:
|
||||
# Add GNU utilities to PATH to allow consistent use of sed/grep/tar/etc. on MacOS
|
||||
# ARGS:
|
||||
# None
|
||||
# OUTS:
|
||||
# 0 if successful
|
||||
# 1 if unsuccessful
|
||||
# PATH: Adds GNU utilities to the path
|
||||
# USAGE:
|
||||
# # if ! _useGNUUtils_; then exit 1; fi
|
||||
# NOTES:
|
||||
# GNU utilities can be added to MacOS using Homebrew
|
||||
|
||||
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
|
||||
|
||||
if _setPATH_ \
|
||||
"/usr/local/opt/gnu-tar/libexec/gnubin" \
|
||||
"/usr/local/opt/coreutils/libexec/gnubin" \
|
||||
"/usr/local/opt/gnu-sed/libexec/gnubin" \
|
||||
"/usr/local/opt/grep/libexec/gnubin" \
|
||||
"/usr/local/opt/findutils/libexec/gnubin" \
|
||||
"/opt/homebrew/opt/findutils/libexec/gnubin" \
|
||||
"/opt/homebrew/opt/gnu-sed/libexec/gnubin" \
|
||||
"/opt/homebrew/opt/grep/libexec/gnubin" \
|
||||
"/opt/homebrew/opt/coreutils/libexec/gnubin" \
|
||||
"/opt/homebrew/opt/gnu-tar/libexec/gnubin"; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
|
||||
}
|
||||
|
||||
_homebrewPath_() {
|
||||
# DESC:
|
||||
# Add homebrew bin dir to PATH
|
||||
# ARGS:
|
||||
# None
|
||||
# OUTS:
|
||||
# 0 if successful
|
||||
# 1 if unsuccessful
|
||||
# PATH: Adds homebrew bin directory to PATH
|
||||
# USAGE:
|
||||
# # if ! _homebrewPath_; then exit 1; fi
|
||||
|
||||
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
|
||||
|
||||
if _uname=$(command -v uname); then
|
||||
if "${_uname}" | tr '[:upper:]' '[:lower:]' | grep -q 'darwin'; then
|
||||
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
fi
|
||||
}
|
||||
|
||||
_parseOptions_() {
|
||||
# DESC:
|
||||
# Iterates through options passed to script and sets variables. Will break -ab into -a -b
|
||||
# when needed and --foo=bar into --foo bar
|
||||
# ARGS:
|
||||
# $@ from command line
|
||||
# OUTS:
|
||||
# Sets array 'ARGS' containing all arguments passed to script that were not parsed as options
|
||||
# USAGE:
|
||||
# _parseOptions_ "$@"
|
||||
|
||||
# Iterate over options
|
||||
local _optstring=h
|
||||
declare -a _options
|
||||
local _c
|
||||
local i
|
||||
while (($#)); do
|
||||
case $1 in
|
||||
# If option is of type -ab
|
||||
-[!-]?*)
|
||||
# Loop over each character starting with the second
|
||||
for ((i = 1; i < ${#1}; i++)); do
|
||||
_c=${1:i:1}
|
||||
_options+=("-${_c}") # Add current char to options
|
||||
# If option takes a required argument, and it's not the last char make
|
||||
# the rest of the string its argument
|
||||
if [[ ${_optstring} == *"${_c}:"* && -n ${1:i+1} ]]; then
|
||||
_options+=("${1:i+1}")
|
||||
break
|
||||
fi
|
||||
done
|
||||
;;
|
||||
# If option is of type --foo=bar
|
||||
--?*=*) _options+=("${1%%=*}" "${1#*=}") ;;
|
||||
# add --endopts for --
|
||||
--) _options+=(--endopts) ;;
|
||||
# Otherwise, nothing special
|
||||
*) _options+=("$1") ;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
set -- "${_options[@]-}"
|
||||
unset _options
|
||||
|
||||
# Read the options and set stuff
|
||||
# shellcheck disable=SC2034
|
||||
while [[ ${1-} == -?* ]]; do
|
||||
case $1 in
|
||||
# Custom options
|
||||
|
||||
# Common options
|
||||
-h | --help)
|
||||
_usage_
|
||||
_safeExit_
|
||||
;;
|
||||
--loglevel)
|
||||
shift
|
||||
LOGLEVEL=${1}
|
||||
;;
|
||||
--logfile)
|
||||
shift
|
||||
LOGFILE="${1}"
|
||||
;;
|
||||
-n | --dryrun) DRYRUN=true ;;
|
||||
-v | --verbose) VERBOSE=true ;;
|
||||
-q | --quiet) QUIET=true ;;
|
||||
--force) FORCE=true ;;
|
||||
--endopts)
|
||||
shift
|
||||
break
|
||||
;;
|
||||
*)
|
||||
if declare -f _safeExit_ &>/dev/null; then
|
||||
fatal "invalid option: $1"
|
||||
else
|
||||
printf "%s\n" "ERROR: Invalid option: $1"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [[ -z ${*} || ${*} == null ]]; then
|
||||
ARGS=()
|
||||
else
|
||||
ARGS+=("$@") # Store the remaining user input as arguments.
|
||||
fi
|
||||
}
|
||||
|
||||
_columns_() {
|
||||
# DESC:
|
||||
# Prints a two column output from a key/value pair.
|
||||
# Optionally pass a number of 2 space tabs to indent the output.
|
||||
# ARGS:
|
||||
# $1 (required): Key name (Left column text)
|
||||
# $2 (required): Long value (Right column text. Wraps around if too long)
|
||||
# $3 (optional): Number of 2 character tabs to indent the command (default 1)
|
||||
# OPTS:
|
||||
# -b Bold the left column
|
||||
# -u Underline the left column
|
||||
# -r Reverse background and foreground colors
|
||||
# OUTS:
|
||||
# stdout: Prints the output in columns
|
||||
# NOTE:
|
||||
# Long text or ANSI colors in the first column may create display issues
|
||||
# USAGE:
|
||||
# _columns_ "Key" "Long value text" [tab level]
|
||||
|
||||
[[ $# -lt 2 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||
|
||||
local opt
|
||||
local OPTIND=1
|
||||
local _style=""
|
||||
while getopts ":bBuUrR" opt; do
|
||||
case ${opt} in
|
||||
b | B) _style="${_style}${bold}" ;;
|
||||
u | U) _style="${_style}${underline}" ;;
|
||||
r | R) _style="${_style}${reverse}" ;;
|
||||
*) fatal "Unrecognized option '${1}' passed to ${FUNCNAME[0]}. Exiting." ;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND - 1))
|
||||
|
||||
local _key="${1}"
|
||||
local _value="${2}"
|
||||
local _tabLevel="${3-}"
|
||||
local _tabSize=2
|
||||
local _line
|
||||
local _rightIndent
|
||||
local _leftIndent
|
||||
if [[ -z ${3-} ]]; then
|
||||
_tabLevel=0
|
||||
fi
|
||||
|
||||
_leftIndent="$((_tabLevel * _tabSize))"
|
||||
|
||||
local _leftColumnWidth="$((30 + _leftIndent))"
|
||||
|
||||
if [ "$(tput cols)" -gt 180 ]; then
|
||||
_rightIndent=110
|
||||
elif [ "$(tput cols)" -gt 160 ]; then
|
||||
_rightIndent=90
|
||||
elif [ "$(tput cols)" -gt 130 ]; then
|
||||
_rightIndent=60
|
||||
elif [ "$(tput cols)" -gt 120 ]; then
|
||||
_rightIndent=50
|
||||
elif [ "$(tput cols)" -gt 110 ]; then
|
||||
_rightIndent=40
|
||||
elif [ "$(tput cols)" -gt 100 ]; then
|
||||
_rightIndent=30
|
||||
elif [ "$(tput cols)" -gt 90 ]; then
|
||||
_rightIndent=20
|
||||
elif [ "$(tput cols)" -gt 80 ]; then
|
||||
_rightIndent=10
|
||||
else
|
||||
_rightIndent=0
|
||||
fi
|
||||
|
||||
local _rightWrapLength=$(($(tput cols) - _leftColumnWidth - _leftIndent - _rightIndent))
|
||||
|
||||
local _first_line=0
|
||||
while read -r _line; do
|
||||
if [[ ${_first_line} -eq 0 ]]; then
|
||||
_first_line=1
|
||||
else
|
||||
_key=" "
|
||||
fi
|
||||
printf "%-${_leftIndent}s${_style}%-${_leftColumnWidth}b${reset} %b\n" "" "${_key}${reset}" "${_line}"
|
||||
done <<<"$(fold -w${_rightWrapLength} -s <<<"${_value}")"
|
||||
}
|
||||
|
||||
_usage_() {
|
||||
cat <<USAGE_TEXT
|
||||
|
||||
${bold}$(basename "$0") [OPTION]... [FILE]...${reset}
|
||||
|
||||
Custom pre-commit hook script. This script is intended to be used as part of the pre-commit pipeline managed within .pre-commit-config.yaml.
|
||||
|
||||
${bold}${underline}Options:${reset}
|
||||
$(_columns_ -b -- '-h, --help' "Display this help and exit" 2)
|
||||
$(_columns_ -b -- "--loglevel [LEVEL]" "One of: FATAL, ERROR (default), WARN, INFO, NOTICE, DEBUG, ALL, OFF" 2)
|
||||
$(_columns_ -b -- "--logfile [FILE]" "Full PATH to logfile. (Default is '\${HOME}/logs/$(basename "$0").log')" 2)
|
||||
$(_columns_ -b -- "-n, --dryrun" "Non-destructive. Makes no permanent changes." 2)
|
||||
$(_columns_ -b -- "-q, --quiet" "Quiet (no output)" 2)
|
||||
$(_columns_ -b -- "-v, --verbose" "Output more information. (Items echoed to 'verbose')" 2)
|
||||
$(_columns_ -b -- "--force" "Skip all user interaction. Implied 'Yes' to all actions." 2)
|
||||
|
||||
${bold}${underline}Example Usage:${reset}
|
||||
|
||||
${gray}# Run the script and specify log level and log file.${reset}
|
||||
$(basename "$0") -vn --logfile "/path/to/file.log" --loglevel 'WARN'
|
||||
USAGE_TEXT
|
||||
}
|
||||
|
||||
# ################################## INITIALIZE AND RUN THE SCRIPT
|
||||
# (Comment or uncomment the lines below to customize script behavior)
|
||||
|
||||
trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM
|
||||
|
||||
# Trap errors in subshells and functions
|
||||
set -o errtrace
|
||||
|
||||
# Exit on error. Append '||true' if you expect an error
|
||||
set -o errexit
|
||||
|
||||
# Use last non-zero exit code in a pipeline
|
||||
set -o pipefail
|
||||
|
||||
# Confirm we have BASH greater than v4
|
||||
[ "${BASH_VERSINFO:-0}" -ge 4 ] || {
|
||||
printf "%s\n" "ERROR: BASH_VERSINFO is '${BASH_VERSINFO:-0}'. This script requires BASH v4 or greater."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# Make `for f in *.txt` work when `*.txt` matches zero files
|
||||
shopt -s nullglob globstar
|
||||
|
||||
# Set IFS to preferred implementation
|
||||
IFS=$' \n\t'
|
||||
|
||||
# Run in debug mode
|
||||
# set -o xtrace
|
||||
|
||||
# Initialize color constants
|
||||
_setColors_
|
||||
|
||||
# Disallow expansion of unset variables
|
||||
set -o nounset
|
||||
|
||||
# Force arguments when invoking the script
|
||||
# [[ $# -eq 0 ]] && _parseOptions_ "-h"
|
||||
|
||||
# Parse arguments passed to script
|
||||
_parseOptions_ "$@"
|
||||
|
||||
# Create a temp directory '$TMP_DIR'
|
||||
_makeTempDir_ "$(basename "$0")"
|
||||
|
||||
# Acquire script lock
|
||||
# _acquireScriptLock_
|
||||
|
||||
# Add Homebrew bin directory to PATH (MacOS)
|
||||
# _homebrewPath_
|
||||
|
||||
# Source GNU utilities from Homebrew (MacOS)
|
||||
# _useGNUutils_
|
||||
|
||||
# Run the main logic script
|
||||
_mainScript_
|
||||
|
||||
# Exit cleanly
|
||||
_safeExit_
|
||||
@@ -1,2 +1,2 @@
|
||||
"""obsidian-metadata version."""
|
||||
__version__ = "0.12.0"
|
||||
__version__ = "0.12.1"
|
||||
|
||||
@@ -54,7 +54,7 @@ class ConfigQuestions:
|
||||
class Config:
|
||||
"""Representation of a configuration file."""
|
||||
|
||||
def __init__(self, config_path: Path = None, vault_path: Path = None) -> None:
|
||||
def __init__(self, config_path: Path | None = None, vault_path: Path | None = None) -> None:
|
||||
if vault_path is None:
|
||||
self.config_path: Path = self._validate_config_path(Path(config_path))
|
||||
self.config: dict[str, Any] = self._load_config()
|
||||
|
||||
@@ -87,7 +87,7 @@ def info(msg: str) -> None:
|
||||
console.print(f"INFO | {msg}")
|
||||
|
||||
|
||||
def usage(msg: str, width: int = None) -> None:
|
||||
def usage(msg: str, width: int | None = None) -> None:
|
||||
"""Print a usage message without using logging.
|
||||
|
||||
Args:
|
||||
@@ -124,14 +124,10 @@ def dim(msg: str) -> None:
|
||||
|
||||
def _log_formatter(record: dict) -> str:
|
||||
"""Create custom log formatter based on the log level. This effects the logs sent to stdout/stderr but not the log file."""
|
||||
if (
|
||||
record["level"].name == "INFO"
|
||||
or record["level"].name == "SUCCESS"
|
||||
or record["level"].name == "WARNING"
|
||||
):
|
||||
if record["level"].name in ("INFO", "SUCCESS", "WARNING"):
|
||||
return "<level><normal>{level: <8} | {message}</normal></level>\n{exception}"
|
||||
|
||||
if record["level"].name == "TRACE" or record["level"].name == "DEBUG":
|
||||
if record["level"].name in ("TRACE", "DEBUG"):
|
||||
return "<level><normal>{level: <8} | {message}</normal></level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||
|
||||
return "<level>{level: <8} | {message}</level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||
|
||||
@@ -38,7 +38,7 @@ def clear_screen() -> None: # pragma: no cover
|
||||
|
||||
|
||||
def dict_contains(
|
||||
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||
dictionary: dict[str, list[str]], key: str, value: str | None = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if a dictionary contains a key or if a key contains a value.
|
||||
|
||||
@@ -79,7 +79,7 @@ def dict_keys_to_lower(dictionary: dict) -> dict:
|
||||
|
||||
|
||||
def delete_from_dict( # noqa: C901
|
||||
dictionary: dict, key: str, value: str = None, is_regex: bool = False
|
||||
dictionary: dict, key: str, value: str | None = None, is_regex: bool = False
|
||||
) -> dict:
|
||||
"""Delete a key or a value from a dictionary.
|
||||
|
||||
@@ -172,7 +172,7 @@ def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
|
||||
|
||||
def rename_in_dict(
|
||||
dictionary: dict[str, list[str]], key: str, value_1: str, value_2: str = None
|
||||
dictionary: dict[str, list[str]], key: str, value_1: str, value_2: str | None = None
|
||||
) -> dict:
|
||||
"""Rename a key or a value in a dictionary who's values are lists of strings.
|
||||
|
||||
@@ -236,7 +236,7 @@ def validate_csv_bulk_imports( # noqa: C901
|
||||
{"type": row["type"], "key": row["key"], "value": row["value"]}
|
||||
)
|
||||
|
||||
if row_num == 0 or row_num == 1:
|
||||
if row_num in [0, 1]:
|
||||
raise typer.BadParameter("Empty CSV file")
|
||||
|
||||
paths_to_remove = [x for x in csv_dict if x not in note_paths]
|
||||
|
||||
@@ -186,10 +186,10 @@ class Note:
|
||||
is_regex=True,
|
||||
)
|
||||
|
||||
return False
|
||||
return False # type: ignore [unreachable]
|
||||
|
||||
def _edit_inline_metadata(
|
||||
self, source: InlineField, new_key: str, new_value: str = None
|
||||
self, source: InlineField, new_key: str, new_value: str | None = None
|
||||
) -> InlineField:
|
||||
"""Edit an inline metadata field. Takes an InlineField object and a new key and/or value and edits the inline metadata in the object and note accordingly.
|
||||
|
||||
@@ -235,7 +235,11 @@ class Note:
|
||||
return new_inline_field
|
||||
|
||||
def _find_matching_fields(
|
||||
self, meta_type: MetadataType, key: str = None, value: str = None, is_regex: bool = False
|
||||
self,
|
||||
meta_type: MetadataType,
|
||||
key: str | None = None,
|
||||
value: str | None = None,
|
||||
is_regex: bool = False,
|
||||
) -> list[InlineField]:
|
||||
"""Create a list of InlineField objects matching the specified key and/or value.
|
||||
|
||||
@@ -295,7 +299,7 @@ class Note:
|
||||
return matching_inline_fields
|
||||
|
||||
def _update_inline_metadata(
|
||||
self, source: InlineField, new_key: str = None, new_value: str = None
|
||||
self, source: InlineField, new_key: str | None = None, new_value: str | None = None
|
||||
) -> bool:
|
||||
"""Update an inline metadata field. Takes an InlineField object and a new key and/or value and updates the inline metadata in the object and note accordingly.
|
||||
|
||||
@@ -354,8 +358,8 @@ class Note:
|
||||
def add_metadata(
|
||||
self,
|
||||
meta_type: MetadataType,
|
||||
added_key: str = None,
|
||||
added_value: str = None,
|
||||
added_key: str | None = None,
|
||||
added_value: str | None = None,
|
||||
location: InsertLocation = None,
|
||||
) -> bool:
|
||||
"""Add metadata to the note if it does not already exist. This method adds specified metadata to the appropriate MetadataType object AND writes the new metadata to the note's file.
|
||||
@@ -428,7 +432,7 @@ class Note:
|
||||
)
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
def commit(self, path: Path = None) -> None:
|
||||
def commit(self, path: Path | None = None) -> None:
|
||||
"""Write the note's new content to disk. This is a destructive action.
|
||||
|
||||
Args:
|
||||
@@ -444,7 +448,8 @@ class Note:
|
||||
|
||||
try:
|
||||
log.trace(f"Writing note {p} to disk")
|
||||
p.write_text(self.file_content)
|
||||
content_bytes = bytes(self.file_content, self.encoding)
|
||||
p.write_bytes(content_bytes)
|
||||
except FileNotFoundError as e:
|
||||
alerts.error(f"Note {p} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
@@ -453,7 +458,7 @@ class Note:
|
||||
self,
|
||||
meta_type: MetadataType,
|
||||
search_key: str,
|
||||
search_value: str = None,
|
||||
search_value: str | None = None,
|
||||
is_regex: bool = False,
|
||||
) -> bool:
|
||||
"""Check if a note contains the specified metadata.
|
||||
@@ -477,7 +482,7 @@ class Note:
|
||||
MetadataType.FRONTMATTER, search_key, search_value, is_regex
|
||||
) or self.contains_metadata(MetadataType.INLINE, search_key, search_value, is_regex)
|
||||
|
||||
if meta_type == MetadataType.FRONTMATTER or meta_type == MetadataType.INLINE:
|
||||
if meta_type in [MetadataType.FRONTMATTER, MetadataType.INLINE]:
|
||||
if search_key is None or re.match(r"^\s*$", search_key):
|
||||
return False
|
||||
|
||||
@@ -514,7 +519,11 @@ class Note:
|
||||
return False
|
||||
|
||||
def delete_metadata( # noqa: PLR0912, C901
|
||||
self, meta_type: MetadataType, key: str = None, value: str = None, is_regex: bool = False
|
||||
self,
|
||||
meta_type: MetadataType,
|
||||
key: str | None = None,
|
||||
value: str | None = None,
|
||||
is_regex: bool = False,
|
||||
) -> bool:
|
||||
"""Delete specified metadata from the note. Removes the metadata from the note and the metadata list. When a key is provided without a value, all values associated with that key are deleted.
|
||||
|
||||
@@ -554,8 +563,7 @@ class Note:
|
||||
meta_to_delete.extend(
|
||||
self._find_matching_fields(MetadataType.TAGS, key, value, is_regex)
|
||||
)
|
||||
|
||||
elif meta_type == MetadataType.FRONTMATTER or meta_type == MetadataType.INLINE:
|
||||
elif meta_type in {MetadataType.FRONTMATTER, MetadataType.INLINE}:
|
||||
if key is None or re.match(r"^\s*$", key):
|
||||
log.error("A valid key must be specified.")
|
||||
raise typer.Exit(code=1)
|
||||
@@ -661,7 +669,7 @@ class Note:
|
||||
"""Print the note to the console."""
|
||||
console_no_markup.print(self.file_content)
|
||||
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str | None = None) -> bool:
|
||||
"""Rename a key or key-value pair in the note's InlineMetadata and Frontmatter objects and the content of the note.
|
||||
|
||||
If no value is provided, will rename the entire specified key.
|
||||
@@ -762,7 +770,9 @@ class Note:
|
||||
if not is_regex:
|
||||
pattern = re.escape(pattern)
|
||||
|
||||
self.file_content, num_subs = re.subn(pattern, replacement, self.file_content, re.MULTILINE)
|
||||
self.file_content, num_subs = re.subn(
|
||||
pattern, replacement, self.file_content, flags=re.MULTILINE
|
||||
)
|
||||
|
||||
return num_subs > 0
|
||||
|
||||
@@ -770,8 +780,8 @@ class Note:
|
||||
self,
|
||||
begin: MetadataType,
|
||||
end: MetadataType,
|
||||
key: str = None,
|
||||
value: str = None,
|
||||
key: str | None = None,
|
||||
value: str | None = None,
|
||||
location: InsertLocation = InsertLocation.BOTTOM,
|
||||
) -> bool:
|
||||
"""Move metadata from one metadata object to another. i.e. Frontmatter to InlineMetadata or vice versa.
|
||||
@@ -793,7 +803,7 @@ class Note:
|
||||
if begin == MetadataType.FRONTMATTER and end == MetadataType.FRONTMATTER:
|
||||
return False
|
||||
|
||||
if begin == MetadataType.TAGS or end == MetadataType.TAGS:
|
||||
if MetadataType.TAGS in {begin, end}:
|
||||
# TODO: Implement transposing to and from tags
|
||||
return False
|
||||
|
||||
|
||||
@@ -63,7 +63,7 @@ class Questions:
|
||||
|
||||
return True
|
||||
|
||||
def __init__(self, vault: Vault = None, key: str = None) -> None:
|
||||
def __init__(self, vault: Vault = None, key: str | None = None) -> None:
|
||||
"""Initialize the class.
|
||||
|
||||
Args:
|
||||
|
||||
@@ -231,8 +231,8 @@ class Vault:
|
||||
def add_metadata(
|
||||
self,
|
||||
meta_type: MetadataType,
|
||||
key: str = None,
|
||||
value: str = None,
|
||||
key: str | None = None,
|
||||
value: str | None = None,
|
||||
location: InsertLocation = None,
|
||||
) -> int:
|
||||
"""Add metadata to all notes in the vault which do not already contain it.
|
||||
@@ -303,7 +303,7 @@ class Vault:
|
||||
_note.commit()
|
||||
|
||||
def contains_metadata(
|
||||
self, meta_type: MetadataType, key: str, value: str = None, is_regex: bool = False
|
||||
self, meta_type: MetadataType, key: str, value: str | None = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if the vault contains metadata.
|
||||
|
||||
@@ -374,7 +374,7 @@ class Vault:
|
||||
def delete_metadata(
|
||||
self,
|
||||
key: str,
|
||||
value: str = None,
|
||||
value: str | None = None,
|
||||
meta_type: MetadataType = MetadataType.ALL,
|
||||
is_regex: bool = False,
|
||||
) -> int:
|
||||
@@ -490,8 +490,7 @@ class Vault:
|
||||
if _note.has_changes():
|
||||
changed_notes.append(_note)
|
||||
|
||||
changed_notes = sorted(changed_notes, key=lambda x: x.note_path)
|
||||
return changed_notes
|
||||
return sorted(changed_notes, key=lambda x: x.note_path)
|
||||
|
||||
def info(self) -> None:
|
||||
"""Print information about the vault."""
|
||||
@@ -613,7 +612,7 @@ class Vault:
|
||||
|
||||
return num_changed
|
||||
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str | None = None) -> int:
|
||||
"""Rename a key or key-value pair in the note's metadata.
|
||||
|
||||
If no value is provided, will rename an entire key.
|
||||
@@ -642,8 +641,8 @@ class Vault:
|
||||
self,
|
||||
begin: MetadataType,
|
||||
end: MetadataType,
|
||||
key: str = None,
|
||||
value: str = None,
|
||||
key: str | None = None,
|
||||
value: str | None = None,
|
||||
location: InsertLocation = None,
|
||||
) -> int:
|
||||
"""Transpose metadata from one type to another.
|
||||
@@ -662,8 +661,8 @@ class Vault:
|
||||
location = self.insert_location
|
||||
|
||||
num_changed = 0
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.transpose_metadata(
|
||||
for note in self.notes_in_scope:
|
||||
if note.transpose_metadata(
|
||||
begin=begin,
|
||||
end=end,
|
||||
key=key,
|
||||
@@ -674,7 +673,7 @@ class Vault:
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
log.trace(f"Transposed metadata in {_note.note_path}")
|
||||
log.trace(f"Transposed metadata in {note.note_path}")
|
||||
|
||||
return num_changed
|
||||
|
||||
|
||||
44
tests/fixtures/CP1250.md
vendored
Normal file
44
tests/fixtures/CP1250.md
vendored
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
date_created: 2022-12-22 # confirm dates are translated to strings
|
||||
tags:
|
||||
- foo
|
||||
- bar
|
||||
frontmatter1: foo
|
||||
frontmatter2: ["bar", "baz", "qux"]
|
||||
??: ??
|
||||
# Nested lists are not supported
|
||||
# invalid:
|
||||
# invalid:
|
||||
# - invalid
|
||||
# - invalid2
|
||||
french1: "Voix ambigu<67> d'un cour qui, au z<>phyr, pr<70>fere les jattes de kiwis"
|
||||
---
|
||||
|
||||
# Heading 1
|
||||
|
||||
inline1:: foo
|
||||
inline1::bar baz
|
||||
**inline2**:: [[foo]]
|
||||
_inline3_:: value
|
||||
??::??
|
||||
key with space:: foo
|
||||
french2:: Voix ambigu<67> d'un cour qui, au z<>phyr, pr<70>fere les jattes de kiwis.
|
||||
|
||||
> inline4:: foo
|
||||
|
||||
inline5::
|
||||
|
||||
foo bar [intext1:: foo] baz `#invalid` qux (intext2:: foo) foobar. #tag1 Foo bar #tag2 baz qux. [[link]]
|
||||
|
||||
The quick brown fox jumped over the lazy dog.
|
||||
|
||||
# tag3
|
||||
|
||||
---
|
||||
|
||||
## invalid: invalid
|
||||
|
||||
```python
|
||||
invalid:: invalid
|
||||
#invalid
|
||||
```
|
||||
2
tests/fixtures/test_vault/sample_note.md
vendored
2
tests/fixtures/test_vault/sample_note.md
vendored
@@ -11,6 +11,7 @@ frontmatter2: ["bar", "baz", "qux"]
|
||||
# invalid:
|
||||
# - invalid
|
||||
# - invalid2
|
||||
french1: "Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis"
|
||||
---
|
||||
|
||||
# Heading 1
|
||||
@@ -21,6 +22,7 @@ inline1::bar baz
|
||||
_inline3_:: value
|
||||
🌱::🌿
|
||||
key with space:: foo
|
||||
french2:: Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.
|
||||
|
||||
> inline4:: foo
|
||||
|
||||
|
||||
@@ -34,7 +34,7 @@ def test_create_note_1(sample_note):
|
||||
assert note.note_path == Path(sample_note)
|
||||
assert note.dry_run is True
|
||||
assert note.encoding == "utf_8"
|
||||
assert len(note.metadata) == 20
|
||||
assert len(note.metadata) == 22
|
||||
|
||||
with sample_note.open():
|
||||
content = sample_note.read_text()
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
# type: ignore
|
||||
"""Test for metadata methods within Note class."""
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import typer
|
||||
from charset_normalizer import from_path
|
||||
from tests.helpers import Regex
|
||||
|
||||
from obsidian_metadata._utils.console import console
|
||||
@@ -97,8 +99,8 @@ def test__edit_inline_metadata_2(tmp_path, content, new_key, new_value, new_cont
|
||||
@pytest.mark.parametrize(
|
||||
("meta_type", "key", "value", "is_regex", "expected"),
|
||||
[
|
||||
(MetadataType.FRONTMATTER, None, None, False, 8),
|
||||
(MetadataType.FRONTMATTER, None, None, True, 8),
|
||||
(MetadataType.FRONTMATTER, None, None, False, 9),
|
||||
(MetadataType.FRONTMATTER, None, None, True, 9),
|
||||
(MetadataType.FRONTMATTER, "frontmatter1", None, False, 1),
|
||||
(MetadataType.FRONTMATTER, r"\w+2", None, True, 3),
|
||||
(MetadataType.FRONTMATTER, "frontmatter1", "foo", False, 1),
|
||||
@@ -108,10 +110,10 @@ def test__edit_inline_metadata_2(tmp_path, content, new_key, new_value, new_cont
|
||||
(MetadataType.FRONTMATTER, "frontmatterXX", None, False, 0),
|
||||
(MetadataType.FRONTMATTER, r"^\d", "XXX", False, 0),
|
||||
(MetadataType.FRONTMATTER, "frontmatterXX", r"^\d+", False, 0),
|
||||
(MetadataType.INLINE, None, None, False, 10),
|
||||
(MetadataType.INLINE, None, None, True, 10),
|
||||
(MetadataType.INLINE, None, None, False, 11),
|
||||
(MetadataType.INLINE, None, None, True, 11),
|
||||
(MetadataType.INLINE, "inline1", None, False, 2),
|
||||
(MetadataType.INLINE, r"\w+2", None, True, 2),
|
||||
(MetadataType.INLINE, r"\w+2", None, True, 3),
|
||||
(MetadataType.INLINE, "inline1", "foo", False, 1),
|
||||
(MetadataType.INLINE, "inline1", r"\w+", True, 2),
|
||||
(MetadataType.INLINE, r"\w+1", "foo", True, 2),
|
||||
@@ -155,7 +157,7 @@ def test__update_inline_metadata_1(sample_note, meta_type):
|
||||
THEN raise an error
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
source_field = [x for x in note.metadata][0]
|
||||
source_field = next(iter(note.metadata))
|
||||
source_field.meta_type = meta_type
|
||||
|
||||
with pytest.raises(typer.Exit):
|
||||
@@ -170,7 +172,7 @@ def test__update_inline_metadata_2(sample_note):
|
||||
THEN raise an error
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
source_field = [x for x in note.metadata][0]
|
||||
source_field = next(iter(note.metadata))
|
||||
source_field.meta_type = MetadataType.INLINE
|
||||
|
||||
with pytest.raises(typer.Exit):
|
||||
@@ -198,11 +200,11 @@ def test__update_inline_metadata_3(
|
||||
note = Note(note_path=sample_note)
|
||||
|
||||
if orig_key is None:
|
||||
source_inlinefield = [
|
||||
source_inlinefield = next(
|
||||
x
|
||||
for x in note.metadata
|
||||
if x.meta_type == MetadataType.INLINE and x.normalized_value == orig_value
|
||||
][0]
|
||||
)
|
||||
assert (
|
||||
note._update_inline_metadata(source_inlinefield, new_key=new_key, new_value=new_value)
|
||||
is True
|
||||
@@ -210,11 +212,11 @@ def test__update_inline_metadata_3(
|
||||
assert source_inlinefield.normalized_value == new_value
|
||||
|
||||
elif orig_value is None:
|
||||
source_inlinefield = [
|
||||
source_inlinefield = next(
|
||||
x
|
||||
for x in note.metadata
|
||||
if x.meta_type == MetadataType.INLINE and x.normalized_key == orig_key
|
||||
][0]
|
||||
)
|
||||
assert (
|
||||
note._update_inline_metadata(source_inlinefield, new_key=new_key, new_value=new_value)
|
||||
is True
|
||||
@@ -222,13 +224,13 @@ def test__update_inline_metadata_3(
|
||||
assert source_inlinefield.normalized_key == new_key.lower()
|
||||
|
||||
else:
|
||||
source_inlinefield = [
|
||||
source_inlinefield = next(
|
||||
x
|
||||
for x in note.metadata
|
||||
if x.meta_type == MetadataType.INLINE
|
||||
if x.normalized_key == orig_key
|
||||
if x.normalized_value == orig_value
|
||||
][0]
|
||||
)
|
||||
assert (
|
||||
note._update_inline_metadata(source_inlinefield, new_key=new_key, new_value=new_value)
|
||||
is True
|
||||
@@ -329,6 +331,18 @@ def test_add_metadata_6(sample_note):
|
||||
(MetadataType.INLINE, "test", "value", "test:: value"),
|
||||
(MetadataType.TAGS, None, "testtag", "#testtag"),
|
||||
(MetadataType.TAGS, None, "#testtag", "#testtag"),
|
||||
(
|
||||
MetadataType.INLINE,
|
||||
"french3",
|
||||
"Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
"french3:: Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
),
|
||||
(
|
||||
MetadataType.FRONTMATTER,
|
||||
"french3",
|
||||
"Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
"french3: Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
),
|
||||
],
|
||||
)
|
||||
def test_add_metadata_7(sample_note, metatype, key, value, expected):
|
||||
@@ -362,15 +376,14 @@ def test_commit_1(sample_note, tmp_path) -> None:
|
||||
note.sub(pattern="Heading 1", replacement="Heading 2")
|
||||
|
||||
note.commit()
|
||||
note = Note(note_path=sample_note)
|
||||
assert "Heading 2" in note.file_content
|
||||
assert "Heading 1" not in note.file_content
|
||||
assert "Heading 2" in sample_note.read_text()
|
||||
assert "Heading 1" not in sample_note.read_text()
|
||||
|
||||
new_path = Path(tmp_path / "new_note.md")
|
||||
|
||||
note.commit(new_path)
|
||||
note2 = Note(note_path=new_path)
|
||||
assert "Heading 2" in note2.file_content
|
||||
assert "Heading 1" not in note2.file_content
|
||||
assert "Heading 2" in new_path.read_text()
|
||||
assert "Heading 1" not in new_path.read_text()
|
||||
|
||||
|
||||
def test_commit_2(sample_note) -> None:
|
||||
@@ -382,10 +395,37 @@ def test_commit_2(sample_note) -> None:
|
||||
"""
|
||||
note = Note(note_path=sample_note, dry_run=True)
|
||||
note.sub(pattern="Heading 1", replacement="Heading 2")
|
||||
|
||||
note.commit()
|
||||
note = Note(note_path=sample_note)
|
||||
assert "Heading 1" in note.file_content
|
||||
|
||||
assert "Heading 2" in note.file_content
|
||||
assert "Heading 1" in sample_note.read_text()
|
||||
|
||||
|
||||
def test_commit_3(tmp_path) -> None:
|
||||
"""Test that commit() method preserves encoding.
|
||||
|
||||
GIVEN a file in CP1250 encoding
|
||||
WHEN the file is written to
|
||||
THEN the file is output in its original encoding.
|
||||
"""
|
||||
source_file: Path = Path("tests/fixtures/CP1250.md")
|
||||
dest_file: Path = Path(tmp_path / source_file.name)
|
||||
shutil.copy(source_file, dest_file)
|
||||
|
||||
# Assert that the file is in CP1250 encoding
|
||||
assert from_path(dest_file).best().encoding == "cp1250"
|
||||
|
||||
# Create the note object
|
||||
note = Note(note_path=dest_file)
|
||||
assert note.encoding == "cp1250"
|
||||
|
||||
# Modify and commit the note
|
||||
note.sub(pattern="Heading 1", replacement="Heading 2")
|
||||
note.commit()
|
||||
|
||||
# Assert that the file is still in CP1250 encoding
|
||||
assert from_path(dest_file).best().encoding == "cp1250"
|
||||
assert "Heading 2" in dest_file.read_text(encoding="cp1250")
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
@@ -439,8 +479,6 @@ def test_commit_2(sample_note) -> None:
|
||||
(MetadataType.ALL, None, r"^\d+", True, False),
|
||||
(MetadataType.ALL, "frontmatter1", "foo", False, True),
|
||||
(MetadataType.ALL, r"^f\w+1", r"[a-z]{3}", True, True),
|
||||
(MetadataType.ALL, "frontmatter1", "foo", False, True),
|
||||
(MetadataType.ALL, r"^f\w+1", r"[a-z]{3}", True, True),
|
||||
(MetadataType.ALL, "inline1", "foo", False, True),
|
||||
(MetadataType.ALL, r"^i\w+1", r"[a-z]{3}", True, True),
|
||||
(MetadataType.ALL, None, "#tag1", False, True),
|
||||
@@ -563,7 +601,6 @@ def test_delete_metadata_2(sample_note, meta_type, key, value, is_regex):
|
||||
(MetadataType.META, r"\d{8}", None, True),
|
||||
(MetadataType.FRONTMATTER, r"\d{8}", None, True),
|
||||
(MetadataType.INLINE, r"\d{8}", None, True),
|
||||
(MetadataType.META, r"\d{8}", None, True),
|
||||
(MetadataType.META, "frontmatter1", r"\d{8}", True),
|
||||
(MetadataType.FRONTMATTER, "frontmatter1", r"\d{8}", True),
|
||||
(MetadataType.INLINE, "inline1", r"\d{8}", True),
|
||||
@@ -835,7 +872,7 @@ def test_transpose_metadata_1(sample_note, begin, end, key, value, location):
|
||||
None,
|
||||
None,
|
||||
InsertLocation.BOTTOM,
|
||||
"```\n\ninline1:: bar baz\ninline1:: foo\ninline2:: [[foo]]\ninline3:: value\ninline4:: foo\ninline5::\nintext1:: foo\nintext2:: foo\nkey with space:: foo\n🌱:: 🌿",
|
||||
"```\n\nfrench2:: Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.\ninline1:: bar baz\ninline1:: foo\ninline2:: [[foo]]\ninline3:: value\ninline4:: foo\ninline5::\nintext1:: foo\nintext2:: foo\nkey with space:: foo\n🌱:: 🌿",
|
||||
),
|
||||
],
|
||||
)
|
||||
@@ -978,7 +1015,7 @@ no frontmatter
|
||||
)
|
||||
new_note = """\
|
||||
---
|
||||
key: value
|
||||
french: Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.
|
||||
---
|
||||
|
||||
# Header1
|
||||
@@ -986,7 +1023,11 @@ inline:: only
|
||||
no frontmatter
|
||||
"""
|
||||
note = Note(note_path=note_path)
|
||||
note.add_metadata(meta_type=MetadataType.FRONTMATTER, added_key="key", added_value="value")
|
||||
note.add_metadata(
|
||||
meta_type=MetadataType.FRONTMATTER,
|
||||
added_key="french",
|
||||
added_value="Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
)
|
||||
assert note.write_frontmatter() is True
|
||||
assert note.file_content == new_note
|
||||
|
||||
|
||||
@@ -31,12 +31,18 @@ def test_vault_creation(test_vault, tmp_path):
|
||||
assert len(vault.all_notes) == 2
|
||||
assert vault.frontmatter == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"french1": [
|
||||
"Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis",
|
||||
],
|
||||
"frontmatter1": ["foo"],
|
||||
"frontmatter2": ["bar", "baz", "qux"],
|
||||
"tags": ["bar", "foo"],
|
||||
"🌱": ["🌿"],
|
||||
}
|
||||
assert vault.inline_meta == {
|
||||
"french2": [
|
||||
"Voix ambiguë d'un cœur qui, au zéphyr, préfère les jattes de kiwis.",
|
||||
],
|
||||
"inline1": ["bar baz", "foo"],
|
||||
"inline2": ["[[foo]]"],
|
||||
"inline3": ["value"],
|
||||
|
||||
Reference in New Issue
Block a user