mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-08 05:03:47 -05:00
feat: initial application release
This commit is contained in:
32
.devcontainer/Dockerfile
Normal file
32
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
|
||||||
|
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.231.6/containers/python-3/.devcontainer/base.Dockerfile
|
||||||
|
|
||||||
|
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
||||||
|
ARG VARIANT="3.10-bullseye"
|
||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
||||||
|
|
||||||
|
# Poetry
|
||||||
|
ARG POETRY_VERSION="none"
|
||||||
|
RUN if [ "${POETRY_VERSION}" != "none" ]; then su vscode -c "umask 0002 && pip3 install poetry==${POETRY_VERSION}"; fi
|
||||||
|
|
||||||
|
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
||||||
|
# COPY requirements.txt /tmp/pip-tmp/
|
||||||
|
# RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
|
||||||
|
# && rm -rf /tmp/pip-tmp
|
||||||
|
|
||||||
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
|
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||||
|
|
||||||
|
# [Optional] Uncomment this line to install global node packages.
|
||||||
|
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||||
|
|
||||||
|
# Update locale settings
|
||||||
|
RUN apt-get update && \
|
||||||
|
export DEBIAN_FRONTEND=noninteractive && \
|
||||||
|
apt-get install -y \
|
||||||
|
locales && \
|
||||||
|
rm -r /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && \
|
||||||
|
dpkg-reconfigure --frontend=noninteractive locales
|
||||||
99
.devcontainer/devcontainer.json
Normal file
99
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,99 @@
|
|||||||
|
{
|
||||||
|
"name": "obsidian-metadata",
|
||||||
|
"build": {
|
||||||
|
"dockerfile": "Dockerfile",
|
||||||
|
"context": "..",
|
||||||
|
"args": {
|
||||||
|
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
|
||||||
|
// Append -bullseye or -buster to pin to an OS version.
|
||||||
|
// Use -bullseye variants on local on arm64/Apple Silicon.
|
||||||
|
"VARIANT": "3.10-bullseye",
|
||||||
|
"POETRY_VERSION": "1.2.2"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Set *default* container specific settings.json values on container create.
|
||||||
|
"settings": {
|
||||||
|
"autoDocstring.startOnNewLine": true,
|
||||||
|
"coverage-gutters.coverageFileNames": ["reports/coverage.xml"],
|
||||||
|
"coverage-gutters.showGutterCoverage": false,
|
||||||
|
"coverage-gutters.showLineCoverage": true,
|
||||||
|
"coverage-gutters.showRulerCoverage": true,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.codeActionsOnSave": {
|
||||||
|
"source.fixAll": true,
|
||||||
|
"source.organizeImports": true
|
||||||
|
},
|
||||||
|
"editor.rulers": [100],
|
||||||
|
"python.analysis.completeFunctionParens": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.linting.mypyEnabled": true,
|
||||||
|
"python.linting.mypyPath": "mypy",
|
||||||
|
"python.linting.pylintEnabled": false,
|
||||||
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
|
"python.terminal.activateEnvironment": false,
|
||||||
|
"python.testing.pytestEnabled": true,
|
||||||
|
"python.linting.mypyArgs": [
|
||||||
|
"--config-file",
|
||||||
|
"pyproject.toml",
|
||||||
|
"--exclude",
|
||||||
|
"'tests/'"
|
||||||
|
],
|
||||||
|
"python.linting.ignorePatterns": [
|
||||||
|
".vscode/**/*.py",
|
||||||
|
".venv/**/*.py"
|
||||||
|
],
|
||||||
|
"python.venvFolders": ["/home/vscode/.cache/pypoetry/virtualenvs"],
|
||||||
|
"ruff.importStrategy": "fromEnvironment",
|
||||||
|
"shellformat.path": "/home/vscode/.local/bin/shfmt",
|
||||||
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
|
"terminal.integrated.profiles.linux": {
|
||||||
|
"zsh": {
|
||||||
|
"path": "/usr/bin/zsh"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
// Add the IDs of extensions you want installed when the container is created.
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python",
|
||||||
|
"bierner.markdown-preview-github-styles",
|
||||||
|
"charliermarsh.ruff",
|
||||||
|
"donjayamanne.githistory",
|
||||||
|
"eamodio.gitlens",
|
||||||
|
"fcrespo82.markdown-table-formatter",
|
||||||
|
"foxundermoon.shell-format",
|
||||||
|
"GitHub.copilot",
|
||||||
|
"Gruntfuggly.todo-tree",
|
||||||
|
"mhutchie.git-graph",
|
||||||
|
"njpwerner.autodocstring",
|
||||||
|
"oderwat.indent-rainbow",
|
||||||
|
"redhat.vscode-yaml",
|
||||||
|
"ryanluker.vscode-coverage-gutters",
|
||||||
|
"samuelcolvin.jinjahtml",
|
||||||
|
"shardulm94.trailing-spaces",
|
||||||
|
"streetsidesoftware.code-spell-checker",
|
||||||
|
"tamasfe.even-better-toml",
|
||||||
|
"timonwong.shellcheck",
|
||||||
|
"Tyriar.sort-lines",
|
||||||
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
|
"Chouzz.vscode-better-align",
|
||||||
|
"yzhang.markdown-all-in-one"
|
||||||
|
],
|
||||||
|
"features": {
|
||||||
|
"ghcr.io/devcontainers/features/common-utils:1": {},
|
||||||
|
"ghcr.io/devcontainers/features/git:1": {},
|
||||||
|
"ghcr.io/devcontainers/features/github-cli:1": {},
|
||||||
|
"ghcr.io/devcontainers-contrib/features/yamllint:1": {},
|
||||||
|
"ghcr.io/stuartleeks/dev-container-features/shell-history:0": {}
|
||||||
|
},
|
||||||
|
"remoteUser": "vscode",
|
||||||
|
"postCreateCommand": "bash ./.devcontainer/post-install.sh",
|
||||||
|
"mounts": [
|
||||||
|
// "source=${localEnv:HOME}/.git_stop_words,target=/home/vscode/.git_stop_words,type=bind,consistency=cached",
|
||||||
|
// "source=${localEnv:HOME}/.gitconfig.local,target=/home/vscode/.gitconfig.local,type=bind,consistency=cached",
|
||||||
|
// "source=${localEnv:HOME}/tmp,target=/home/vscode/tmp,type=bind"
|
||||||
|
]
|
||||||
|
|
||||||
|
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||||
|
// "forwardPorts": [],
|
||||||
|
}
|
||||||
1015
.devcontainer/post-install.sh
Executable file
1015
.devcontainer/post-install.sh
Executable file
File diff suppressed because it is too large
Load Diff
68
.github/actions/setup-poetry/action.yml
vendored
Normal file
68
.github/actions/setup-poetry/action.yml
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
---
|
||||||
|
name: Cached Python and Poetry setup
|
||||||
|
description: Cache Poetry with additional extras key
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
python-version:
|
||||||
|
description: >
|
||||||
|
Version range or exact version of a Python version to use, using SemVer's version range syntax.
|
||||||
|
required: false
|
||||||
|
default: 3.x
|
||||||
|
|
||||||
|
outputs:
|
||||||
|
python-version:
|
||||||
|
description: The installed python version. Useful when given a version range as input.
|
||||||
|
value: ${{ steps.setup-python.outputs.python-version }}
|
||||||
|
cache-hit:
|
||||||
|
description: A boolean value to indicate projects dependencies were cached
|
||||||
|
value: ${{ steps.setup-python.outputs.cache-hit }}
|
||||||
|
poetry-cache-hit:
|
||||||
|
description: A boolean value to indicate Poetry installation was cached
|
||||||
|
value: ${{ steps.pipx-cache.outputs.cache-hit }}
|
||||||
|
|
||||||
|
runs:
|
||||||
|
using: composite
|
||||||
|
steps:
|
||||||
|
- name: Get pipx env vars
|
||||||
|
id: pipx-env-vars
|
||||||
|
shell: bash
|
||||||
|
run: |
|
||||||
|
echo "pipx-home=${PIPX_HOME}" >> $GITHUB_OUTPUT
|
||||||
|
echo "pipx-bin-dir=${PIPX_BIN_DIR}" >> $GITHUB_OUTPUT
|
||||||
|
|
||||||
|
- name: Load pipx cache
|
||||||
|
# If env vars are not defined do not load cache
|
||||||
|
if: >
|
||||||
|
steps.pipx-env-vars.outputs.pipx-home != ''
|
||||||
|
&& steps.pipx-env-vars.outputs.pipx-bin-dir != ''
|
||||||
|
id: pipx-cache
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: |
|
||||||
|
${{ steps.pipx-env-vars.outputs.pipx-home }}/venvs/poetry
|
||||||
|
${{ steps.pipx-env-vars.outputs.pipx-bin-dir }}/poetry
|
||||||
|
key: ${{ runner.os }}-${{ inputs.python-version }}-pipx-${{ hashFiles('**/poetry.lock') }}
|
||||||
|
|
||||||
|
- name: Install poetry
|
||||||
|
# If env vars are not defined or we missed pipx cache, install poetry
|
||||||
|
if: >
|
||||||
|
(
|
||||||
|
steps.pipx-env-vars.outputs.pipx-home == ''
|
||||||
|
&& steps.pipx-env-vars.outputs.pipx-bin-dir == ''
|
||||||
|
)
|
||||||
|
|| steps.pipx-cache.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: pipx install poetry
|
||||||
|
|
||||||
|
- name: Load poetry cache
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
id: setup-python
|
||||||
|
with:
|
||||||
|
python-version: ${{ inputs.python-version }}
|
||||||
|
cache: poetry
|
||||||
|
|
||||||
|
- name: Install poetry dependencies
|
||||||
|
# If we missed poetry cache install dependencies
|
||||||
|
if: steps.setup-python.outputs.cache-hit != 'true'
|
||||||
|
shell: bash
|
||||||
|
run: poetry install --all-extras
|
||||||
23
.github/dependabot.yml
vendored
Normal file
23
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: github-actions
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: monthly
|
||||||
|
commit-message:
|
||||||
|
prefix: "ci"
|
||||||
|
prefix-development: "ci"
|
||||||
|
include: "scope"
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: /
|
||||||
|
schedule:
|
||||||
|
interval: monthly
|
||||||
|
commit-message:
|
||||||
|
prefix: "build"
|
||||||
|
prefix-development: "build"
|
||||||
|
include: "scope"
|
||||||
|
versioning-strategy: lockfile-only
|
||||||
|
allow:
|
||||||
|
- dependency-type: "all"
|
||||||
21
.github/labeler.yml
vendored
Normal file
21
.github/labeler.yml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
---
|
||||||
|
github_actions:
|
||||||
|
- ".github/**"
|
||||||
|
dev_container:
|
||||||
|
- ".devcontainer/**"
|
||||||
|
configuration:
|
||||||
|
- ".*"
|
||||||
|
- "*.js"
|
||||||
|
- "*.json"
|
||||||
|
- "*.toml"
|
||||||
|
- "*.yaml"
|
||||||
|
- "*.yml"
|
||||||
|
documentation:
|
||||||
|
- "**.md"
|
||||||
|
- "docs/**"
|
||||||
|
- LICENSE
|
||||||
|
python:
|
||||||
|
- "src/**"
|
||||||
|
- "tests/**"
|
||||||
|
dependencies:
|
||||||
|
- "*.lock"
|
||||||
36
.github/workflows/commit-linter.yml
vendored
Normal file
36
.github/workflows/commit-linter.yml
vendored
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
---
|
||||||
|
name: Commit Linter
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened]
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
permissions: # added using https://github.com/step-security/secure-workflows
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint-commits:
|
||||||
|
if: "!contains(github.event.head_commit.message, 'bump(release)')"
|
||||||
|
permissions:
|
||||||
|
contents: read # for actions/checkout to fetch code
|
||||||
|
pull-requests: read # for wagoid/commitlint-github-action to get commits in PR
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Harden Runner
|
||||||
|
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.github.com:443
|
||||||
|
github.com:443
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Lint commits
|
||||||
|
uses: wagoid/commitlint-github-action@v5
|
||||||
91
.github/workflows/create-release.yml
vendored
Normal file
91
.github/workflows/create-release.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
---
|
||||||
|
name: Create Release
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- "v*" # Push events to matching v*, i.e. v1.0, v20.15.10
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: write
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
autorelease:
|
||||||
|
name: Create Release
|
||||||
|
runs-on: "ubuntu-latest"
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.11"]
|
||||||
|
steps:
|
||||||
|
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
disable-sudo: true
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.github.com:443
|
||||||
|
files.pythonhosted.org:443
|
||||||
|
github.com:443
|
||||||
|
install.python-poetry.org:443
|
||||||
|
pypi.org:443
|
||||||
|
python-poetry.org:443
|
||||||
|
uploads.github.com:443
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup Python and Poetry
|
||||||
|
uses: ./.github/actions/setup-poetry
|
||||||
|
|
||||||
|
- name: Add version to environment vars
|
||||||
|
run: |
|
||||||
|
PROJECT_VERSION=$(poetry version --short)
|
||||||
|
echo "PROJECT_VERSION=$PROJECT_VERSION" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Confirm we did, in fact, update the version
|
||||||
|
# ----------------------------------------------
|
||||||
|
|
||||||
|
- name: Check if tag version matches project version
|
||||||
|
run: |
|
||||||
|
TAG=$(git describe HEAD --tags --abbrev=0)
|
||||||
|
echo $TAG
|
||||||
|
echo $PROJECT_VERSION
|
||||||
|
if [[ "$TAG" != "v$PROJECT_VERSION" ]]; then exit 1; fi
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Generate release notes
|
||||||
|
# ----------------------------------------------
|
||||||
|
|
||||||
|
- name: Release Notes
|
||||||
|
run: git log $(git describe HEAD~ --tags --abbrev=0)..HEAD --pretty='format:* %h %s' --no-merges >> ".github/RELEASE-TEMPLATE.md"
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Test and then build the package
|
||||||
|
# ----------------------------------------------
|
||||||
|
- name: run poetry build
|
||||||
|
run: |
|
||||||
|
poetry run poetry check
|
||||||
|
poetry run coverage run
|
||||||
|
poetry build
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Build draft release (Note: Will need to manually publish)
|
||||||
|
# ----------------------------------------------
|
||||||
|
|
||||||
|
- name: Create Release Draft
|
||||||
|
uses: softprops/action-gh-release@v1
|
||||||
|
with:
|
||||||
|
body_path: ".github/RELEASE-TEMPLATE.md"
|
||||||
|
draft: true
|
||||||
|
files: |
|
||||||
|
dist/*-${{env.PROJECT_VERSION}}-py3-none-any.whl
|
||||||
|
dist/*-${{env.PROJECT_VERSION}}.tar.gz
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
60
.github/workflows/devcontainer-checker.yml
vendored
Normal file
60
.github/workflows/devcontainer-checker.yml
vendored
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
---
|
||||||
|
name: "Dev Container Checker"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened]
|
||||||
|
paths:
|
||||||
|
- ".devcontainer/**"
|
||||||
|
- ".github/workflows/devcontainer-checker.yml"
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".devcontainer/**"
|
||||||
|
- ".github/workflows/devcontainer-checker.yml"
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dev-container-checker:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.snapcraft.io:443
|
||||||
|
auth.docker.io:443
|
||||||
|
centralus.data.mcr.microsoft.com:443
|
||||||
|
deb.debian.org:443
|
||||||
|
deb.debian.org:80
|
||||||
|
dl.yarnpkg.com:443
|
||||||
|
eastus.data.mcr.microsoft.com:443
|
||||||
|
files.pythonhosted.org:443
|
||||||
|
ghcr.io:443
|
||||||
|
git.rootprojects.org:443
|
||||||
|
github.com:443
|
||||||
|
mcr.microsoft.com:443
|
||||||
|
nodejs.org:443
|
||||||
|
objects.githubusercontent.com:443
|
||||||
|
pkg-containers.githubusercontent.com:443
|
||||||
|
production.cloudflare.docker.com:443
|
||||||
|
pypi.org:443
|
||||||
|
registry-1.docker.io:443
|
||||||
|
registry.npmjs.org:443
|
||||||
|
webi.sh:443
|
||||||
|
westcentralus.data.mcr.microsoft.com:443
|
||||||
|
westus.data.mcr.microsoft.com:443
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Build and run dev container task
|
||||||
|
uses: devcontainers/ci@v0.2
|
||||||
|
with:
|
||||||
|
runCmd: |
|
||||||
|
poe lint
|
||||||
|
poe test
|
||||||
23
.github/workflows/labeler.yml
vendored
Normal file
23
.github/workflows/labeler.yml
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
---
|
||||||
|
name: Pull Request Labeler
|
||||||
|
on:
|
||||||
|
- pull_request_target
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
label:
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Harden Runner
|
||||||
|
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.github.com:443
|
||||||
|
github.com:443
|
||||||
|
|
||||||
|
- uses: actions/labeler@v4
|
||||||
|
with:
|
||||||
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
53
.github/workflows/pr-linter.yml
vendored
Normal file
53
.github/workflows/pr-linter.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
name: Pull Request Linter
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- opened
|
||||||
|
- edited
|
||||||
|
- synchronize
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
|
||||||
|
permissions: # added using https://github.com/step-security/secure-workflows
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
permissions:
|
||||||
|
pull-requests: read # for amannn/action-semantic-pull-request to analyze PRs
|
||||||
|
statuses: write # for amannn/action-semantic-pull-request to mark status of analyzed PR
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Harden Runner
|
||||||
|
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.github.com:443
|
||||||
|
|
||||||
|
- name: Lint Pull Request
|
||||||
|
uses: amannn/action-semantic-pull-request@v5
|
||||||
|
with:
|
||||||
|
validateSingleCommit: true
|
||||||
|
wip: true
|
||||||
|
types: |
|
||||||
|
fix
|
||||||
|
feat
|
||||||
|
docs
|
||||||
|
style
|
||||||
|
refactor
|
||||||
|
perf
|
||||||
|
test
|
||||||
|
build
|
||||||
|
ci
|
||||||
|
requireScope: false
|
||||||
|
subjectPattern: ^(?![A-Z]).+$
|
||||||
|
subjectPatternError: |
|
||||||
|
The subject "{subject}" found in the pull request title "{title}"
|
||||||
|
didn't match the configured pattern. Please ensure that the subject
|
||||||
|
doesn't start with an uppercase character.
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
48
.github/workflows/pypi-release.yml
vendored
Normal file
48
.github/workflows/pypi-release.yml
vendored
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
name: Publish to PyPi
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
release:
|
||||||
|
types:
|
||||||
|
- published
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
publish-to-pypi:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.11"]
|
||||||
|
steps:
|
||||||
|
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: audit
|
||||||
|
disable-sudo: true
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Python and Poetry
|
||||||
|
uses: ./.github/actions/setup-poetry
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Test and then build the package
|
||||||
|
# ----------------------------------------------
|
||||||
|
- name: run poetry build
|
||||||
|
run: |
|
||||||
|
poetry run poetry check
|
||||||
|
poetry run coverage run
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# Publish to PyPi
|
||||||
|
# ----------------------------------------------
|
||||||
|
- name: Publish
|
||||||
|
env:
|
||||||
|
PYPI_TOKEN: ${{ secrets.PYPI_TOKEN }}
|
||||||
|
run: |
|
||||||
|
poetry config pypi-token.pypi $PYPI_TOKEN
|
||||||
|
poetry publish --build
|
||||||
92
.github/workflows/python-code-checker.yml
vendored
Normal file
92
.github/workflows/python-code-checker.yml
vendored
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
---
|
||||||
|
name: "Python Code Checker"
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/python-code-checker.yml"
|
||||||
|
- ".github/actions/**"
|
||||||
|
- "src/**"
|
||||||
|
- "tests/**"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "poetry.lock"
|
||||||
|
pull_request:
|
||||||
|
types: [opened, reopened]
|
||||||
|
paths:
|
||||||
|
- ".github/workflows/python-code-checker.yml"
|
||||||
|
- ".github/actions/**"
|
||||||
|
- "src/**"
|
||||||
|
- "tests/**"
|
||||||
|
- "pyproject.toml"
|
||||||
|
- "poetry.lock"
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test-python-code:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
fail-fast: true
|
||||||
|
matrix:
|
||||||
|
python-version: ["3.10", "3.11"]
|
||||||
|
steps:
|
||||||
|
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||||
|
with:
|
||||||
|
egress-policy: block
|
||||||
|
disable-sudo: true
|
||||||
|
allowed-endpoints: >
|
||||||
|
api.snapcraft.io:443
|
||||||
|
api.github.com:443
|
||||||
|
codecov.io:443
|
||||||
|
files.pythonhosted.org:443
|
||||||
|
github.com:443
|
||||||
|
install.python-poetry.org:443
|
||||||
|
pypi.org:443
|
||||||
|
python-poetry.org:443
|
||||||
|
storage.googleapis.com:443
|
||||||
|
uploader.codecov.io:443
|
||||||
|
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup Python and Poetry
|
||||||
|
uses: ./.github/actions/setup-poetry
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# run linters
|
||||||
|
# ----------------------------------------------
|
||||||
|
|
||||||
|
- name: Lint with Mypy
|
||||||
|
run: poetry run mypy src/
|
||||||
|
- name: lint with ruff
|
||||||
|
run: poetry run ruff --extend-ignore=I001,D301 src/
|
||||||
|
- name: check pyproject.toml
|
||||||
|
run: poetry run poetry check
|
||||||
|
- name: lint with black
|
||||||
|
run: poetry run black --check src/
|
||||||
|
- name: run vulture
|
||||||
|
run: poetry run vulture src/
|
||||||
|
- name: run interrogate
|
||||||
|
run: poetry run interrogate -c pyproject.toml .
|
||||||
|
|
||||||
|
# ----------------------------------------------
|
||||||
|
# run test suite
|
||||||
|
# ----------------------------------------------
|
||||||
|
- name: Run tests with pytest
|
||||||
|
run: |
|
||||||
|
poetry run coverage run
|
||||||
|
poetry run coverage report
|
||||||
|
poetry run coverage xml
|
||||||
|
# ----------------------------------------------
|
||||||
|
# upload coverage stats
|
||||||
|
# ----------------------------------------------
|
||||||
|
- name: Upload coverage
|
||||||
|
if: github.ref == 'refs/heads/main' && matrix.python-version == '3.11'
|
||||||
|
uses: codecov/codecov-action@v3
|
||||||
|
with:
|
||||||
|
# token: ${{ secrets.CODECOV_TOKEN }} # Only required for private repositories
|
||||||
|
files: reports/coverage.xml
|
||||||
|
fail_ci_if_error: false
|
||||||
53
.gitignore
vendored
Normal file
53
.gitignore
vendored
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# Scratch folder for temporary files
|
||||||
|
scratch/
|
||||||
|
|
||||||
|
# Caches
|
||||||
|
*_cache/
|
||||||
|
__pycache__/
|
||||||
|
|
||||||
|
# Coverage.py
|
||||||
|
htmlcov/
|
||||||
|
reports/
|
||||||
|
|
||||||
|
# cruft
|
||||||
|
*.rej
|
||||||
|
|
||||||
|
# Data
|
||||||
|
*.csv*
|
||||||
|
*.dat*
|
||||||
|
*.pickle*
|
||||||
|
*.xls*
|
||||||
|
*.zip*
|
||||||
|
|
||||||
|
.envrc
|
||||||
|
.env
|
||||||
|
|
||||||
|
# Jupyter
|
||||||
|
*.ipynb
|
||||||
|
.ipynb_checkpoints/
|
||||||
|
notebooks/
|
||||||
|
|
||||||
|
# macOS
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.dmypy.json
|
||||||
|
|
||||||
|
# Node.js
|
||||||
|
node_modules/
|
||||||
|
|
||||||
|
# Poetry
|
||||||
|
.venv/
|
||||||
|
dist/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
.idea/
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
|
# Python
|
||||||
|
*.py[cdo]
|
||||||
|
|
||||||
|
# act run workflows locally
|
||||||
|
bin/act
|
||||||
125
.pre-commit-config.yaml
Normal file
125
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
---
|
||||||
|
# https://pre-commit.com
|
||||||
|
default_install_hook_types: [commit-msg, pre-commit]
|
||||||
|
default_stages: [commit, manual]
|
||||||
|
fail_fast: true
|
||||||
|
repos:
|
||||||
|
- repo: "https://github.com/commitizen-tools/commitizen"
|
||||||
|
rev: v2.39.1
|
||||||
|
hooks:
|
||||||
|
- id: commitizen
|
||||||
|
- id: commitizen-branch
|
||||||
|
stages:
|
||||||
|
- post-commit
|
||||||
|
- push
|
||||||
|
|
||||||
|
- repo: "https://github.com/pre-commit/pygrep-hooks"
|
||||||
|
rev: v1.10.0
|
||||||
|
hooks:
|
||||||
|
- id: python-check-mock-methods
|
||||||
|
- id: python-no-eval
|
||||||
|
- id: python-no-log-warn
|
||||||
|
- id: python-use-type-annotations
|
||||||
|
- id: rst-backticks
|
||||||
|
- id: rst-directive-colons
|
||||||
|
- id: rst-inline-touching-normal
|
||||||
|
- id: text-unicode-replacement-char
|
||||||
|
|
||||||
|
- repo: "https://github.com/pre-commit/pre-commit-hooks"
|
||||||
|
rev: v4.4.0
|
||||||
|
hooks:
|
||||||
|
- id: check-added-large-files
|
||||||
|
- id: check-ast
|
||||||
|
- id: check-builtin-literals
|
||||||
|
- id: check-case-conflict
|
||||||
|
- id: check-docstring-first
|
||||||
|
- id: check-json
|
||||||
|
exclude: .devcontainer/|.vscode/
|
||||||
|
- id: check-merge-conflict
|
||||||
|
- id: check-shebang-scripts-are-executable
|
||||||
|
- id: check-symlinks
|
||||||
|
- id: check-toml
|
||||||
|
- id: check-vcs-permalinks
|
||||||
|
- id: check-xml
|
||||||
|
- id: check-yaml
|
||||||
|
- id: debug-statements
|
||||||
|
- id: detect-private-key
|
||||||
|
- id: fix-byte-order-marker
|
||||||
|
- id: mixed-line-ending
|
||||||
|
- id: trailing-whitespace
|
||||||
|
types: [python]
|
||||||
|
args: [--markdown-linebreak-ext=md]
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
types: [python]
|
||||||
|
|
||||||
|
- repo: "https://github.com/adrienverge/yamllint.git"
|
||||||
|
rev: v1.29.0
|
||||||
|
hooks:
|
||||||
|
- id: yamllint
|
||||||
|
files: ^.*\.(yaml|yml)$
|
||||||
|
entry: yamllint --strict --config-file .yamllint.yml
|
||||||
|
|
||||||
|
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
||||||
|
rev: "v0.0.229"
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: ["--extend-ignore", "I001,D301,D401,PLR2004"]
|
||||||
|
|
||||||
|
- repo: "https://github.com/jendrikseipp/vulture"
|
||||||
|
rev: "v2.7"
|
||||||
|
hooks:
|
||||||
|
- id: vulture
|
||||||
|
|
||||||
|
- repo: local
|
||||||
|
hooks:
|
||||||
|
- id: custom
|
||||||
|
name: custom pre-commit script
|
||||||
|
entry: scripts/pre-commit-hook.sh
|
||||||
|
language: system
|
||||||
|
|
||||||
|
- id: black
|
||||||
|
name: black
|
||||||
|
entry: black
|
||||||
|
require_serial: true
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
|
||||||
|
- id: shellcheck
|
||||||
|
name: shellcheck
|
||||||
|
entry: shellcheck --check-sourced --severity=warning
|
||||||
|
language: system
|
||||||
|
types: [shell]
|
||||||
|
|
||||||
|
- id: poetry-check
|
||||||
|
name: poetry check
|
||||||
|
entry: poetry check
|
||||||
|
language: system
|
||||||
|
files: pyproject.toml
|
||||||
|
pass_filenames: false
|
||||||
|
|
||||||
|
- id: interrogate
|
||||||
|
name: interrogate check
|
||||||
|
entry: interrogate -c pyproject.toml src/
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
pass_filenames: false
|
||||||
|
|
||||||
|
- id: mypy
|
||||||
|
name: mypy
|
||||||
|
entry: mypy --config-file pyproject.toml
|
||||||
|
exclude: tests/
|
||||||
|
language: system
|
||||||
|
types: [python]
|
||||||
|
|
||||||
|
- id: pytest
|
||||||
|
name: pytest
|
||||||
|
entry: poe test
|
||||||
|
language: system
|
||||||
|
pass_filenames: false
|
||||||
|
files: |
|
||||||
|
(?x)^(
|
||||||
|
src/|
|
||||||
|
tests/|
|
||||||
|
poetry\.lock|
|
||||||
|
pyproject\.toml
|
||||||
|
)
|
||||||
20
.vscode/launch.json
vendored
Normal file
20
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
// Use IntelliSense to learn about possible attributes.
|
||||||
|
// Hover to view descriptions of existing attributes.
|
||||||
|
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Python: CLi application",
|
||||||
|
"type": "python",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${workspaceFolder}/src/obsidian_metadata/cli.py",
|
||||||
|
"args": [
|
||||||
|
"--config-file",
|
||||||
|
"${userHome}/.obsidian_metadata.toml",
|
||||||
|
],
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
33
.yamllint.yml
Normal file
33
.yamllint.yml
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
---
|
||||||
|
# Find full documentation at: https://yamllint.readthedocs.io/en/stable/index.html
|
||||||
|
extends: default
|
||||||
|
locale: en_US.UTF-8
|
||||||
|
|
||||||
|
ignore: |
|
||||||
|
.venv
|
||||||
|
|
||||||
|
rules:
|
||||||
|
braces:
|
||||||
|
level: error
|
||||||
|
max-spaces-inside: 1
|
||||||
|
min-spaces-inside: 1
|
||||||
|
comments-indentation: disable
|
||||||
|
comments:
|
||||||
|
min-spaces-from-content: 1
|
||||||
|
indentation:
|
||||||
|
spaces: consistent
|
||||||
|
indent-sequences: true
|
||||||
|
check-multi-line-strings: false
|
||||||
|
line-length: disable
|
||||||
|
quoted-strings:
|
||||||
|
quote-type: any
|
||||||
|
required: false
|
||||||
|
extra-required:
|
||||||
|
- "^http://"
|
||||||
|
- "^https://"
|
||||||
|
- "ftp://"
|
||||||
|
- 'ssh \w.*'
|
||||||
|
extra-allowed: []
|
||||||
|
truthy:
|
||||||
|
level: error
|
||||||
|
check-keys: false
|
||||||
83
README.md
Normal file
83
README.md
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
[](https://github.com/natelandau/obsidian-metadata/actions/workflows/python-code-checker.yml) [](https://codecov.io/gh/natelandau/obsidian-metadata)
|
||||||
|
# obsidian-metadata
|
||||||
|
A script to make batch updates to metadata in an Obsidian vault. Provides the following capabilities:
|
||||||
|
|
||||||
|
- in-text tag: delete every occurrence
|
||||||
|
- in-text tags: Rename tag (`#tag1` -> `#tag2`)
|
||||||
|
- frontmatter: Delete a key matching a regex pattern and all associated values
|
||||||
|
- frontmatter: Rename a key
|
||||||
|
- frontmatter: Delete a value matching a regex pattern from a specified key
|
||||||
|
- frontmatter: Rename a value from a specified key
|
||||||
|
- inline metadata: Delete a key matching a regex pattern and all associated values
|
||||||
|
- inline metadata: Rename a key
|
||||||
|
- inline metadata: Delete a value matching a regex pattern from a specified key
|
||||||
|
- inline metadata: Rename a value from a specified key
|
||||||
|
- vault: Create a backup of the Obsidian vault
|
||||||
|
|
||||||
|
|
||||||
|
## Install
|
||||||
|
`obsidian-metadata` requires Python v3.10 or above.
|
||||||
|
|
||||||
|
|
||||||
|
Use [PIPX](https://pypa.github.io/pipx/) to install this package from Github.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pipx install git+https://${GITHUB_TOKEN}@github.com/natelandau/obsidian-metadata
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
## Disclaimer
|
||||||
|
**Important:** It is strongly recommended that you back up your vault prior to committing changes. This script makes changes directly to the markdown files in your vault. Once the changes are committed, there is no ability to recreate the original information unless you have a backup. Follow the instructions in the script to create a backup of your vault if needed.
|
||||||
|
|
||||||
|
The author of this script is not responsible for any data loss that may occur. Use at your own risk.
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
The script provides a menu of available actions. Make as many changes as you require and review them as you go. No changes are made to the Vault until they are explicitly committed.
|
||||||
|
|
||||||
|
[](https://asciinema.org/a/553464)
|
||||||
|
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
`obsidian-metadata` requires a configuration file at `~/.obsidian_metadata.toml`. On first run, this file will be created. Read the comments in this file to configure your preferences. This configuration file contains the following information.
|
||||||
|
|
||||||
|
```toml
|
||||||
|
# Path to your obsidian vault
|
||||||
|
vault = "/path/to/vault"
|
||||||
|
|
||||||
|
# Folders within the vault to ignore when indexing metadata
|
||||||
|
exclude_paths = [".git", ".obsidian"]
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Contributing
|
||||||
|
|
||||||
|
## Setup: Once per project
|
||||||
|
|
||||||
|
There are two ways to contribute to this project.
|
||||||
|
|
||||||
|
### 21. Containerized development (Recommended)
|
||||||
|
|
||||||
|
1. Clone this repository. `git clone https://github.com/natelandau/obsidian-metadata`
|
||||||
|
2. Open the repository in Visual Studio Code
|
||||||
|
3. Start the [Dev Container](https://code.visualstudio.com/docs/remote/containers). Run <kbd>Ctrl/⌘</kbd> + <kbd>⇧</kbd> + <kbd>P</kbd> → _Remote-Containers: Reopen in Container_.
|
||||||
|
4. Run `poetry env info -p` to find the PATH to the Python interpreter if needed by VSCode.
|
||||||
|
|
||||||
|
### 2. Local development
|
||||||
|
|
||||||
|
1. Install Python 3.10 and [Poetry](https://python-poetry.org)
|
||||||
|
2. Clone this repository. `git clone https://github.com/natelandau/obsidian-metadata`
|
||||||
|
3. Install the Poetry environment with `poetry install`.
|
||||||
|
4. Activate your Poetry environment with `poetry shell`.
|
||||||
|
5. Install the pre-commit hooks with `pre-commit install --install-hooks`.
|
||||||
|
|
||||||
|
## Developing
|
||||||
|
|
||||||
|
- This project follows the [Conventional Commits](https://www.conventionalcommits.org/) standard to automate [Semantic Versioning](https://semver.org/) and [Keep A Changelog](https://keepachangelog.com/) with [Commitizen](https://github.com/commitizen-tools/commitizen).
|
||||||
|
- When you're ready to commit changes run `cz c`
|
||||||
|
- Run `poe` from within the development environment to print a list of [Poe the Poet](https://github.com/nat-n/poethepoet) tasks available to run on this project. Common commands:
|
||||||
|
- `poe lint` runs all linters
|
||||||
|
- `poe test` runs all tests with Pytest
|
||||||
|
- Run `poetry add {package}` from within the development environment to install a run time dependency and add it to `pyproject.toml` and `poetry.lock`.
|
||||||
|
- Run `poetry remove {package}` from within the development environment to uninstall a run time dependency and remove it from `pyproject.toml` and `poetry.lock`.
|
||||||
|
- Run `poetry update` from within the development environment to upgrade all dependencies to the latest versions allowed by `pyproject.toml`.
|
||||||
14
codecov.yml
Normal file
14
codecov.yml
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
---
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 50% # the required coverage value
|
||||||
|
threshold: 1% # the leniency in hitting the target
|
||||||
|
|
||||||
|
ignore:
|
||||||
|
- tests/
|
||||||
|
|
||||||
|
comment:
|
||||||
|
layout: "reach, diff, flags, files" # Remove items here to change the format
|
||||||
|
require_changes: true
|
||||||
1317
poetry.lock
generated
Normal file
1317
poetry.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
2
poetry.toml
Normal file
2
poetry.toml
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[virtualenvs]
|
||||||
|
in-project = true
|
||||||
239
pyproject.toml
Normal file
239
pyproject.toml
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
[build-system]
|
||||||
|
build-backend = "poetry.core.masonry.api"
|
||||||
|
requires = ["poetry-core>=1.0.0"]
|
||||||
|
|
||||||
|
[tool.poetry]
|
||||||
|
authors = ["Nate Landau <github@natenate.org>"]
|
||||||
|
description = "Make batch updates to Obsidian metadata"
|
||||||
|
homepage = "https://github.com/natelandau/obsidian-metadata"
|
||||||
|
keywords = ["obsidian"]
|
||||||
|
license = "GNU AFFERO"
|
||||||
|
name = "obsidian-metadata"
|
||||||
|
readme = "README.md"
|
||||||
|
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||||
|
version = "0.0.0"
|
||||||
|
|
||||||
|
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||||
|
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||||
|
|
||||||
|
[tool.poetry.dependencies]
|
||||||
|
loguru = "^0.6.0"
|
||||||
|
python = "^3.10"
|
||||||
|
questionary = "^1.10.0"
|
||||||
|
rich = "^13.2.0"
|
||||||
|
ruamel-yaml = "^0.17.21"
|
||||||
|
shellingham = "^1.4.0"
|
||||||
|
tomli = "^2.0.1"
|
||||||
|
typer = "^0.7.0"
|
||||||
|
|
||||||
|
[tool.poetry.group.test.dependencies]
|
||||||
|
pytest = "^7.2.0"
|
||||||
|
pytest-clarity = "^1.0.1"
|
||||||
|
pytest-mock = "^3.10.0"
|
||||||
|
pytest-pretty-terminal = "^1.1.0"
|
||||||
|
pytest-xdist = "^3.1.0"
|
||||||
|
|
||||||
|
[tool.poetry.group.dev.dependencies]
|
||||||
|
absolufy-imports = "^0.3.1"
|
||||||
|
black = "^22.12.0"
|
||||||
|
commitizen = "^2.39.1"
|
||||||
|
coverage = "^7.0.4"
|
||||||
|
interrogate = "^1.5.0"
|
||||||
|
mypy = "^0.991"
|
||||||
|
pdoc = "^12.3.1"
|
||||||
|
pep8-naming = "^0.13.3"
|
||||||
|
poethepoet = "^0.18.0"
|
||||||
|
pre-commit = "^2.21.0"
|
||||||
|
ruff = "^0.0.217"
|
||||||
|
typeguard = "^2.13.3"
|
||||||
|
types-python-dateutil = "^2.8.19.5"
|
||||||
|
types-pyyaml = "^6.0.12.2"
|
||||||
|
vulture = "^2.7"
|
||||||
|
|
||||||
|
[tool.ruff] # https://github.com/charliermarsh/ruff
|
||||||
|
fix = true
|
||||||
|
ignore = [
|
||||||
|
"B006",
|
||||||
|
"B008",
|
||||||
|
"D107",
|
||||||
|
"D203",
|
||||||
|
"D204",
|
||||||
|
"D213",
|
||||||
|
"D215",
|
||||||
|
"D400",
|
||||||
|
"D404",
|
||||||
|
"D406",
|
||||||
|
"D407",
|
||||||
|
"D408",
|
||||||
|
"D409",
|
||||||
|
"D413",
|
||||||
|
"E501",
|
||||||
|
"N805",
|
||||||
|
"PGH001",
|
||||||
|
"PGH003",
|
||||||
|
"UP007",
|
||||||
|
]
|
||||||
|
ignore-init-module-imports = true
|
||||||
|
line-length = 100
|
||||||
|
select = [
|
||||||
|
"A",
|
||||||
|
"B",
|
||||||
|
"BLE",
|
||||||
|
"C4",
|
||||||
|
"C90",
|
||||||
|
"D",
|
||||||
|
"E",
|
||||||
|
"ERA",
|
||||||
|
"F",
|
||||||
|
"I",
|
||||||
|
"N",
|
||||||
|
"PGH",
|
||||||
|
"PLC",
|
||||||
|
"PLE",
|
||||||
|
"PLR",
|
||||||
|
"PLW",
|
||||||
|
"RET",
|
||||||
|
"RUF",
|
||||||
|
"SIM",
|
||||||
|
"TID",
|
||||||
|
"UP",
|
||||||
|
"W",
|
||||||
|
"YTT",
|
||||||
|
]
|
||||||
|
src = ["src", "tests"]
|
||||||
|
target-version = "py310"
|
||||||
|
unfixable = ["ERA001", "F401", "F401", "UP007"]
|
||||||
|
|
||||||
|
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||||
|
exclude_lines = [
|
||||||
|
'def __repr__',
|
||||||
|
'except [\w\s\._]+ as .*:',
|
||||||
|
'log\.critical',
|
||||||
|
'log\.debug',
|
||||||
|
'log\.error',
|
||||||
|
'log\.exception',
|
||||||
|
'log\.info',
|
||||||
|
'log\.success',
|
||||||
|
'log\.trace',
|
||||||
|
'log\.warning',
|
||||||
|
'pragma: no cover',
|
||||||
|
'raise Abort',
|
||||||
|
'raise Exit',
|
||||||
|
'raise typer\.Exit',
|
||||||
|
]
|
||||||
|
fail_under = 50
|
||||||
|
precision = 1
|
||||||
|
show_missing = true
|
||||||
|
skip_covered = true
|
||||||
|
|
||||||
|
[tool.coverage.run]
|
||||||
|
branch = true
|
||||||
|
command_line = "--module pytest"
|
||||||
|
data_file = "reports/.coverage"
|
||||||
|
source = ["src"]
|
||||||
|
|
||||||
|
[tool.coverage.xml]
|
||||||
|
output = "reports/coverage.xml"
|
||||||
|
|
||||||
|
[tool.black]
|
||||||
|
line-length = 100
|
||||||
|
|
||||||
|
[tool.commitizen]
|
||||||
|
bump_message = "bump(release): v$current_version → v$new_version"
|
||||||
|
tag_format = "v$version"
|
||||||
|
update_changelog_on_bump = true
|
||||||
|
version = "0.0.0"
|
||||||
|
version_files = [
|
||||||
|
"pyproject.toml:version",
|
||||||
|
"src/obsidian_metadata/__version__.py:__version__",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.interrogate]
|
||||||
|
exclude = ["build", "docs", "tests"]
|
||||||
|
fail-under = 90
|
||||||
|
ignore-init-method = true
|
||||||
|
verbose = 2
|
||||||
|
|
||||||
|
[tool.mypy] # https://mypy.readthedocs.io/en/latest/config_file.html
|
||||||
|
disallow_any_unimported = false
|
||||||
|
disallow_subclassing_any = false
|
||||||
|
disallow_untyped_decorators = false
|
||||||
|
disallow_untyped_defs = true
|
||||||
|
exclude = [
|
||||||
|
'tests/', # TOML literal string (single-quotes, regex okay, no escaping necessary)
|
||||||
|
]
|
||||||
|
follow_imports = "normal"
|
||||||
|
ignore_missing_imports = true
|
||||||
|
junit_xml = "reports/mypy.xml"
|
||||||
|
no_implicit_optional = true
|
||||||
|
pretty = false
|
||||||
|
show_column_numbers = true
|
||||||
|
show_error_codes = true
|
||||||
|
show_error_context = true
|
||||||
|
strict_optional = false
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unreachable = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "--color=yes --doctest-modules --exitfirst --failed-first --strict-config --strict-markers --verbosity=2 --junitxml=reports/pytest.xml"
|
||||||
|
filterwarnings = ["error", "ignore::DeprecationWarning"]
|
||||||
|
testpaths = ["src", "tests"]
|
||||||
|
xfail_strict = true
|
||||||
|
|
||||||
|
[tool.vulture] # https://pypi.org/project/vulture/
|
||||||
|
# exclude = ["file*.py", "dir/"]
|
||||||
|
# ignore_decorators = ["@app.route", "@require_*"]
|
||||||
|
ignore_names = ["args", "cls", "indentless", "kwargs", "request", "version"]
|
||||||
|
# make_whitelist = true
|
||||||
|
min_confidence = 80
|
||||||
|
paths = ["src", "tests"]
|
||||||
|
sort_by_size = true
|
||||||
|
verbose = false
|
||||||
|
|
||||||
|
[tool.poe.tasks]
|
||||||
|
|
||||||
|
[tool.poe.tasks.docs]
|
||||||
|
cmd = """
|
||||||
|
pdoc
|
||||||
|
--docformat google
|
||||||
|
--output-directory docs
|
||||||
|
src/obsidian_metadata
|
||||||
|
"""
|
||||||
|
help = "Generate this package's docs"
|
||||||
|
|
||||||
|
[tool.poe.tasks.lint]
|
||||||
|
help = "Lint this package"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "ruff --extend-ignore=I001,D301 src/ tests/"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "black --check src/ tests/"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "poetry check"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "mypy --config-file pyproject.toml src/"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "vulture src/ tests/"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "yamllint ."
|
||||||
|
|
||||||
|
[[tool.poe.tasks.lint.sequence]]
|
||||||
|
shell = "interrogate -c pyproject.toml ."
|
||||||
|
|
||||||
|
[tool.poe.tasks.test]
|
||||||
|
help = "Test this package"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.test.sequence]]
|
||||||
|
cmd = "coverage run"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.test.sequence]]
|
||||||
|
cmd = "coverage report"
|
||||||
|
|
||||||
|
[[tool.poe.tasks.test.sequence]]
|
||||||
|
cmd = "coverage xml"
|
||||||
821
scripts/pre-commit-hook.sh
Executable file
821
scripts/pre-commit-hook.sh
Executable file
@@ -0,0 +1,821 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# shellcheck disable=SC2317
|
||||||
|
|
||||||
|
_mainScript_() {
|
||||||
|
|
||||||
|
_customStopWords_() {
|
||||||
|
# DESC: Check if any specified stop words are in the commit diff. If found, the pre-commit hook will exit with a non-zero exit code.
|
||||||
|
# ARGS:
|
||||||
|
# $1 (Required): Path to file
|
||||||
|
# OUTS:
|
||||||
|
# 0: Success
|
||||||
|
# 1: Failure
|
||||||
|
# USAGE:
|
||||||
|
# _customStopWords_ "/path/to/file.sh"
|
||||||
|
# NOTE:
|
||||||
|
# Requires a plaintext stopword file located at
|
||||||
|
# `~/.git_stop_words` containing one stopword per line.
|
||||||
|
|
||||||
|
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||||
|
|
||||||
|
local _gitDiffTmp
|
||||||
|
local FILE_TO_CHECK="${1}"
|
||||||
|
|
||||||
|
_gitDiffTmp="${TMP_DIR}/${RANDOM}.${RANDOM}.${RANDOM}.diff.txt"
|
||||||
|
|
||||||
|
if [ -f "${STOP_WORD_FILE}" ]; then
|
||||||
|
|
||||||
|
if [[ $(basename "${STOP_WORD_FILE}") == "$(basename "${FILE_TO_CHECK}")" ]]; then
|
||||||
|
debug "$(basename "${1}"): Don't check stop words file for stop words."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
debug "$(basename "${FILE_TO_CHECK}"): Checking for stop words..."
|
||||||
|
|
||||||
|
# remove blank lines from stopwords file
|
||||||
|
sed '/^$/d' "${STOP_WORD_FILE}" >"${TMP_DIR}/pattern_file.txt"
|
||||||
|
|
||||||
|
# Check for stopwords
|
||||||
|
if git diff --cached -- "${FILE_TO_CHECK}" | grep –i -q "new file mode"; then
|
||||||
|
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${FILE_TO_CHECK}"; then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
# Add diff to a temporary file
|
||||||
|
git diff --cached -- "${FILE_TO_CHECK}" | grep '^+' >"${_gitDiffTmp}"
|
||||||
|
if grep -i --file="${TMP_DIR}/pattern_file.txt" "${_gitDiffTmp}"; then
|
||||||
|
return 1
|
||||||
|
else
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
else
|
||||||
|
|
||||||
|
notice "Could not find git stopwords file expected at '${STOP_WORD_FILE}'. Continuing..."
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Don;t lint binary files
|
||||||
|
if [[ ${ARGS[0]} =~ \.(jpg|jpeg|gif|png|exe|zip|gzip|tiff|tar|dmg|ttf|otf|m4a|mp3|mkv|mov|avi|eot|svg|woff2?|aac|wav|flac|pdf|doc|xls|ppt|7z|bin|dmg|dat|sql|ico|mpe?g)$ ]]; then
|
||||||
|
_safeExit_ 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if ! _customStopWords_ "${ARGS[0]}"; then
|
||||||
|
error "Stop words found in ${ARGS[0]}"
|
||||||
|
_safeExit_ 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
# end _mainScript_
|
||||||
|
|
||||||
|
# ################################## Flags and defaults
|
||||||
|
# Required variables
|
||||||
|
LOGFILE="${HOME}/logs/$(basename "$0").log"
|
||||||
|
QUIET=false
|
||||||
|
LOGLEVEL=ERROR
|
||||||
|
VERBOSE=false
|
||||||
|
FORCE=false
|
||||||
|
DRYRUN=false
|
||||||
|
declare -a ARGS=()
|
||||||
|
|
||||||
|
# Script specific
|
||||||
|
LOGLEVEL=NONE
|
||||||
|
STOP_WORD_FILE="${HOME}/.git_stop_words"
|
||||||
|
shopt -s nocasematch
|
||||||
|
# ################################## Custom utility functions (Pasted from repository)
|
||||||
|
|
||||||
|
# ################################## Functions required for this template to work
|
||||||
|
|
||||||
|
_setColors_() {
|
||||||
|
# DESC:
|
||||||
|
# Sets colors use for alerts.
|
||||||
|
# ARGS:
|
||||||
|
# None
|
||||||
|
# OUTS:
|
||||||
|
# None
|
||||||
|
# USAGE:
|
||||||
|
# printf "%s\n" "${blue}Some text${reset}"
|
||||||
|
|
||||||
|
if tput setaf 1 >/dev/null 2>&1; then
|
||||||
|
bold=$(tput bold)
|
||||||
|
underline=$(tput smul)
|
||||||
|
reverse=$(tput rev)
|
||||||
|
reset=$(tput sgr0)
|
||||||
|
|
||||||
|
if [[ $(tput colors) -ge 256 ]] >/dev/null 2>&1; then
|
||||||
|
white=$(tput setaf 231)
|
||||||
|
blue=$(tput setaf 38)
|
||||||
|
yellow=$(tput setaf 11)
|
||||||
|
green=$(tput setaf 82)
|
||||||
|
red=$(tput setaf 9)
|
||||||
|
purple=$(tput setaf 171)
|
||||||
|
gray=$(tput setaf 250)
|
||||||
|
else
|
||||||
|
white=$(tput setaf 7)
|
||||||
|
blue=$(tput setaf 38)
|
||||||
|
yellow=$(tput setaf 3)
|
||||||
|
green=$(tput setaf 2)
|
||||||
|
red=$(tput setaf 9)
|
||||||
|
purple=$(tput setaf 13)
|
||||||
|
gray=$(tput setaf 7)
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
bold="\033[4;37m"
|
||||||
|
reset="\033[0m"
|
||||||
|
underline="\033[4;37m"
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
reverse=""
|
||||||
|
white="\033[0;37m"
|
||||||
|
blue="\033[0;34m"
|
||||||
|
yellow="\033[0;33m"
|
||||||
|
green="\033[1;32m"
|
||||||
|
red="\033[0;31m"
|
||||||
|
purple="\033[0;35m"
|
||||||
|
gray="\033[0;37m"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_alert_() {
|
||||||
|
# DESC:
|
||||||
|
# Controls all printing of messages to log files and stdout.
|
||||||
|
# ARGS:
|
||||||
|
# $1 (required) - The type of alert to print
|
||||||
|
# (success, header, notice, dryrun, debug, warning, error,
|
||||||
|
# fatal, info, input)
|
||||||
|
# $2 (required) - The message to be printed to stdout and/or a log file
|
||||||
|
# $3 (optional) - Pass '${LINENO}' to print the line number where the _alert_ was triggered
|
||||||
|
# OUTS:
|
||||||
|
# stdout: The message is printed to stdout
|
||||||
|
# log file: The message is printed to a log file
|
||||||
|
# USAGE:
|
||||||
|
# [_alertType] "[MESSAGE]" "${LINENO}"
|
||||||
|
# NOTES:
|
||||||
|
# - The colors of each alert type are set in this function
|
||||||
|
# - For specified alert types, the funcstac will be printed
|
||||||
|
|
||||||
|
local _color
|
||||||
|
local _alertType="${1}"
|
||||||
|
local _message="${2}"
|
||||||
|
local _line="${3-}" # Optional line number
|
||||||
|
|
||||||
|
[[ $# -lt 2 ]] && fatal 'Missing required argument to _alert_'
|
||||||
|
|
||||||
|
if [[ -n ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||||
|
_message="${_message} ${gray}(line: ${_line}) $(_printFuncStack_)"
|
||||||
|
elif [[ -n ${_line} && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||||
|
_message="${_message} ${gray}(line: ${_line})"
|
||||||
|
elif [[ -z ${_line} && ${_alertType} =~ ^fatal && ${FUNCNAME[2]} != "_trapCleanup_" ]]; then
|
||||||
|
_message="${_message} ${gray}$(_printFuncStack_)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||||
|
_color="${bold}${red}"
|
||||||
|
elif [ "${_alertType}" == "info" ]; then
|
||||||
|
_color="${gray}"
|
||||||
|
elif [ "${_alertType}" == "warning" ]; then
|
||||||
|
_color="${red}"
|
||||||
|
elif [ "${_alertType}" == "success" ]; then
|
||||||
|
_color="${green}"
|
||||||
|
elif [ "${_alertType}" == "debug" ]; then
|
||||||
|
_color="${purple}"
|
||||||
|
elif [ "${_alertType}" == "header" ]; then
|
||||||
|
_color="${bold}${white}${underline}"
|
||||||
|
elif [ "${_alertType}" == "notice" ]; then
|
||||||
|
_color="${bold}"
|
||||||
|
elif [ "${_alertType}" == "input" ]; then
|
||||||
|
_color="${bold}${underline}"
|
||||||
|
elif [ "${_alertType}" = "dryrun" ]; then
|
||||||
|
_color="${blue}"
|
||||||
|
else
|
||||||
|
_color=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
_writeToScreen_() {
|
||||||
|
("${QUIET}") && return 0 # Print to console when script is not 'quiet'
|
||||||
|
[[ ${VERBOSE} == false && ${_alertType} =~ ^(debug|verbose) ]] && return 0
|
||||||
|
|
||||||
|
if ! [[ -t 1 || -z ${TERM-} ]]; then # Don't use colors on non-recognized terminals
|
||||||
|
_color=""
|
||||||
|
reset=""
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ${_alertType} == header ]]; then
|
||||||
|
printf "${_color}%s${reset}\n" "${_message}"
|
||||||
|
else
|
||||||
|
printf "${_color}[%7s] %s${reset}\n" "${_alertType}" "${_message}"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
_writeToScreen_
|
||||||
|
|
||||||
|
_writeToLog_() {
|
||||||
|
[[ ${_alertType} == "input" ]] && return 0
|
||||||
|
[[ ${LOGLEVEL} =~ (off|OFF|Off) ]] && return 0
|
||||||
|
if [ -z "${LOGFILE-}" ]; then
|
||||||
|
LOGFILE="$(pwd)/$(basename "$0").log"
|
||||||
|
fi
|
||||||
|
[ ! -d "$(dirname "${LOGFILE}")" ] && mkdir -p "$(dirname "${LOGFILE}")"
|
||||||
|
[[ ! -f ${LOGFILE} ]] && touch "${LOGFILE}"
|
||||||
|
|
||||||
|
# Don't use colors in logs
|
||||||
|
local _cleanmessage
|
||||||
|
_cleanmessage="$(printf "%s" "${_message}" | sed -E 's/(\x1b)?\[(([0-9]{1,2})(;[0-9]{1,3}){0,2})?[mGK]//g')"
|
||||||
|
# Print message to log file
|
||||||
|
printf "%s [%7s] %s %s\n" "$(date +"%b %d %R:%S")" "${_alertType}" "[$(/bin/hostname)]" "${_cleanmessage}" >>"${LOGFILE}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Write specified log level data to logfile
|
||||||
|
case "${LOGLEVEL:-ERROR}" in
|
||||||
|
ALL | all | All)
|
||||||
|
_writeToLog_
|
||||||
|
;;
|
||||||
|
DEBUG | debug | Debug)
|
||||||
|
_writeToLog_
|
||||||
|
;;
|
||||||
|
INFO | info | Info)
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal|warning|info|notice|success) ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
NOTICE | notice | Notice)
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal|warning|notice|success) ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
WARN | warn | Warn)
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal|warning) ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
ERROR | error | Error)
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
FATAL | fatal | Fatal)
|
||||||
|
if [[ ${_alertType} =~ ^fatal ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
OFF | off)
|
||||||
|
return 0
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
if [[ ${_alertType} =~ ^(error|fatal) ]]; then
|
||||||
|
_writeToLog_
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
} # /_alert_
|
||||||
|
|
||||||
|
error() { _alert_ error "${1}" "${2-}"; }
|
||||||
|
warning() { _alert_ warning "${1}" "${2-}"; }
|
||||||
|
notice() { _alert_ notice "${1}" "${2-}"; }
|
||||||
|
info() { _alert_ info "${1}" "${2-}"; }
|
||||||
|
success() { _alert_ success "${1}" "${2-}"; }
|
||||||
|
dryrun() { _alert_ dryrun "${1}" "${2-}"; }
|
||||||
|
input() { _alert_ input "${1}" "${2-}"; }
|
||||||
|
header() { _alert_ header "${1}" "${2-}"; }
|
||||||
|
debug() { _alert_ debug "${1}" "${2-}"; }
|
||||||
|
fatal() {
|
||||||
|
_alert_ fatal "${1}" "${2-}"
|
||||||
|
_safeExit_ "1"
|
||||||
|
}
|
||||||
|
|
||||||
|
_printFuncStack_() {
|
||||||
|
# DESC:
|
||||||
|
# Prints the function stack in use. Used for debugging, and error reporting.
|
||||||
|
# ARGS:
|
||||||
|
# None
|
||||||
|
# OUTS:
|
||||||
|
# stdout: Prints [function]:[file]:[line]
|
||||||
|
# NOTE:
|
||||||
|
# Does not print functions from the alert class
|
||||||
|
local _i
|
||||||
|
declare -a _funcStackResponse=()
|
||||||
|
for ((_i = 1; _i < ${#BASH_SOURCE[@]}; _i++)); do
|
||||||
|
case "${FUNCNAME[${_i}]}" in
|
||||||
|
_alert_ | _trapCleanup_ | fatal | error | warning | notice | info | debug | dryrun | header | success)
|
||||||
|
continue
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
_funcStackResponse+=("${FUNCNAME[${_i}]}:$(basename "${BASH_SOURCE[${_i}]}"):${BASH_LINENO[_i - 1]}")
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
|
done
|
||||||
|
printf "( "
|
||||||
|
printf %s "${_funcStackResponse[0]}"
|
||||||
|
printf ' < %s' "${_funcStackResponse[@]:1}"
|
||||||
|
printf ' )\n'
|
||||||
|
}
|
||||||
|
|
||||||
|
_safeExit_() {
|
||||||
|
# DESC:
|
||||||
|
# Cleanup and exit from a script
|
||||||
|
# ARGS:
|
||||||
|
# $1 (optional) - Exit code (defaults to 0)
|
||||||
|
# OUTS:
|
||||||
|
# None
|
||||||
|
|
||||||
|
if [[ -d ${SCRIPT_LOCK-} ]]; then
|
||||||
|
if command rm -rf "${SCRIPT_LOCK}"; then
|
||||||
|
debug "Removing script lock"
|
||||||
|
else
|
||||||
|
warning "Script lock could not be removed. Try manually deleting ${yellow}'${SCRIPT_LOCK}'"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ -n ${TMP_DIR-} && -d ${TMP_DIR-} ]]; then
|
||||||
|
if [[ ${1-} == 1 && -n "$(ls "${TMP_DIR}")" ]]; then
|
||||||
|
command rm -r "${TMP_DIR}"
|
||||||
|
else
|
||||||
|
command rm -r "${TMP_DIR}"
|
||||||
|
debug "Removing temp directory"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
trap - INT TERM EXIT
|
||||||
|
exit "${1:-0}"
|
||||||
|
}
|
||||||
|
|
||||||
|
_trapCleanup_() {
|
||||||
|
# DESC:
|
||||||
|
# Log errors and cleanup from script when an error is trapped. Called by 'trap'
|
||||||
|
# ARGS:
|
||||||
|
# $1: Line number where error was trapped
|
||||||
|
# $2: Line number in function
|
||||||
|
# $3: Command executing at the time of the trap
|
||||||
|
# $4: Names of all shell functions currently in the execution call stack
|
||||||
|
# $5: Scriptname
|
||||||
|
# $6: $BASH_SOURCE
|
||||||
|
# USAGE:
|
||||||
|
# trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM ERR
|
||||||
|
# OUTS:
|
||||||
|
# Exits script with error code 1
|
||||||
|
|
||||||
|
local _line=${1-} # LINENO
|
||||||
|
local _linecallfunc=${2-}
|
||||||
|
local _command="${3-}"
|
||||||
|
local _funcstack="${4-}"
|
||||||
|
local _script="${5-}"
|
||||||
|
local _sourced="${6-}"
|
||||||
|
|
||||||
|
# Replace the cursor in-case 'tput civis' has been used
|
||||||
|
tput cnorm
|
||||||
|
|
||||||
|
if declare -f "fatal" &>/dev/null && declare -f "_printFuncStack_" &>/dev/null; then
|
||||||
|
|
||||||
|
_funcstack="'$(printf "%s" "${_funcstack}" | sed -E 's/ / < /g')'"
|
||||||
|
|
||||||
|
if [[ ${_script##*/} == "${_sourced##*/}" ]]; then
|
||||||
|
fatal "${7-} command: '${_command}' (line: ${_line}) [func: $(_printFuncStack_)]"
|
||||||
|
else
|
||||||
|
fatal "${7-} command: '${_command}' (func: ${_funcstack} called at line ${_linecallfunc} of '${_script##*/}') (line: ${_line} of '${_sourced##*/}') "
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
printf "%s\n" "Fatal error trapped. Exiting..."
|
||||||
|
fi
|
||||||
|
|
||||||
|
if declare -f _safeExit_ &>/dev/null; then
|
||||||
|
_safeExit_ 1
|
||||||
|
else
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_makeTempDir_() {
|
||||||
|
# DESC:
|
||||||
|
# Creates a temp directory to house temporary files
|
||||||
|
# ARGS:
|
||||||
|
# $1 (Optional) - First characters/word of directory name
|
||||||
|
# OUTS:
|
||||||
|
# Sets $TMP_DIR variable to the path of the temp directory
|
||||||
|
# USAGE:
|
||||||
|
# _makeTempDir_ "$(basename "$0")"
|
||||||
|
|
||||||
|
[ -d "${TMP_DIR-}" ] && return 0
|
||||||
|
|
||||||
|
if [ -n "${1-}" ]; then
|
||||||
|
TMP_DIR="${TMPDIR:-/tmp/}${1}.${RANDOM}.${RANDOM}.$$"
|
||||||
|
else
|
||||||
|
TMP_DIR="${TMPDIR:-/tmp/}$(basename "$0").${RANDOM}.${RANDOM}.${RANDOM}.$$"
|
||||||
|
fi
|
||||||
|
(umask 077 && mkdir "${TMP_DIR}") || {
|
||||||
|
fatal "Could not create temporary directory! Exiting."
|
||||||
|
}
|
||||||
|
debug "\$TMP_DIR=${TMP_DIR}"
|
||||||
|
}
|
||||||
|
|
||||||
|
# shellcheck disable=SC2120
|
||||||
|
_acquireScriptLock_() {
|
||||||
|
# DESC:
|
||||||
|
# Acquire script lock to prevent running the same script a second time before the
|
||||||
|
# first instance exits
|
||||||
|
# ARGS:
|
||||||
|
# $1 (optional) - Scope of script execution lock (system or user)
|
||||||
|
# OUTS:
|
||||||
|
# exports $SCRIPT_LOCK - Path to the directory indicating we have the script lock
|
||||||
|
# Exits script if lock cannot be acquired
|
||||||
|
# NOTE:
|
||||||
|
# If the lock was acquired it's automatically released in _safeExit_()
|
||||||
|
|
||||||
|
local _lockDir
|
||||||
|
if [[ ${1-} == 'system' ]]; then
|
||||||
|
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").lock"
|
||||||
|
else
|
||||||
|
_lockDir="${TMPDIR:-/tmp/}$(basename "$0").${UID}.lock"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if command mkdir "${_lockDir}" 2>/dev/null; then
|
||||||
|
readonly SCRIPT_LOCK="${_lockDir}"
|
||||||
|
debug "Acquired script lock: ${yellow}${SCRIPT_LOCK}${purple}"
|
||||||
|
else
|
||||||
|
if declare -f "_safeExit_" &>/dev/null; then
|
||||||
|
error "Unable to acquire script lock: ${yellow}${_lockDir}${red}"
|
||||||
|
fatal "If you trust the script isn't running, delete the lock dir"
|
||||||
|
else
|
||||||
|
printf "%s\n" "ERROR: Could not acquire script lock. If you trust the script isn't running, delete: ${_lockDir}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_setPATH_() {
|
||||||
|
# DESC:
|
||||||
|
# Add directories to $PATH so script can find executables
|
||||||
|
# ARGS:
|
||||||
|
# $@ - One or more paths
|
||||||
|
# OPTS:
|
||||||
|
# -x - Fail if directories are not found
|
||||||
|
# OUTS:
|
||||||
|
# 0: Success
|
||||||
|
# 1: Failure
|
||||||
|
# Adds items to $PATH
|
||||||
|
# USAGE:
|
||||||
|
# _setPATH_ "/usr/local/bin" "${HOME}/bin" "$(npm bin)"
|
||||||
|
|
||||||
|
[[ $# == 0 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||||
|
|
||||||
|
local opt
|
||||||
|
local OPTIND=1
|
||||||
|
local _failIfNotFound=false
|
||||||
|
|
||||||
|
while getopts ":xX" opt; do
|
||||||
|
case ${opt} in
|
||||||
|
x | X) _failIfNotFound=true ;;
|
||||||
|
*)
|
||||||
|
{
|
||||||
|
error "Unrecognized option '${1}' passed to _backupFile_" "${LINENO}"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
shift $((OPTIND - 1))
|
||||||
|
|
||||||
|
local _newPath
|
||||||
|
|
||||||
|
for _newPath in "$@"; do
|
||||||
|
if [ -d "${_newPath}" ]; then
|
||||||
|
if ! printf "%s" "${PATH}" | grep -Eq "(^|:)${_newPath}($|:)"; then
|
||||||
|
if PATH="${_newPath}:${PATH}"; then
|
||||||
|
debug "Added '${_newPath}' to PATH"
|
||||||
|
else
|
||||||
|
debug "'${_newPath}' already in PATH"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
debug "_setPATH_: '${_newPath}' already exists in PATH"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
debug "_setPATH_: can not find: ${_newPath}"
|
||||||
|
if [[ ${_failIfNotFound} == true ]]; then
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
_useGNUutils_() {
|
||||||
|
# DESC:
|
||||||
|
# Add GNU utilities to PATH to allow consistent use of sed/grep/tar/etc. on MacOS
|
||||||
|
# ARGS:
|
||||||
|
# None
|
||||||
|
# OUTS:
|
||||||
|
# 0 if successful
|
||||||
|
# 1 if unsuccessful
|
||||||
|
# PATH: Adds GNU utilities to the path
|
||||||
|
# USAGE:
|
||||||
|
# # if ! _useGNUUtils_; then exit 1; fi
|
||||||
|
# NOTES:
|
||||||
|
# GNU utilities can be added to MacOS using Homebrew
|
||||||
|
|
||||||
|
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
|
||||||
|
|
||||||
|
if _setPATH_ \
|
||||||
|
"/usr/local/opt/gnu-tar/libexec/gnubin" \
|
||||||
|
"/usr/local/opt/coreutils/libexec/gnubin" \
|
||||||
|
"/usr/local/opt/gnu-sed/libexec/gnubin" \
|
||||||
|
"/usr/local/opt/grep/libexec/gnubin" \
|
||||||
|
"/usr/local/opt/findutils/libexec/gnubin" \
|
||||||
|
"/opt/homebrew/opt/findutils/libexec/gnubin" \
|
||||||
|
"/opt/homebrew/opt/gnu-sed/libexec/gnubin" \
|
||||||
|
"/opt/homebrew/opt/grep/libexec/gnubin" \
|
||||||
|
"/opt/homebrew/opt/coreutils/libexec/gnubin" \
|
||||||
|
"/opt/homebrew/opt/gnu-tar/libexec/gnubin"; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
_homebrewPath_() {
|
||||||
|
# DESC:
|
||||||
|
# Add homebrew bin dir to PATH
|
||||||
|
# ARGS:
|
||||||
|
# None
|
||||||
|
# OUTS:
|
||||||
|
# 0 if successful
|
||||||
|
# 1 if unsuccessful
|
||||||
|
# PATH: Adds homebrew bin directory to PATH
|
||||||
|
# USAGE:
|
||||||
|
# # if ! _homebrewPath_; then exit 1; fi
|
||||||
|
|
||||||
|
! declare -f "_setPATH_" &>/dev/null && fatal "${FUNCNAME[0]} needs function _setPATH_"
|
||||||
|
|
||||||
|
if _uname=$(command -v uname); then
|
||||||
|
if "${_uname}" | tr '[:upper:]' '[:lower:]' | grep -q 'darwin'; then
|
||||||
|
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if _setPATH_ "/usr/local/bin" "/opt/homebrew/bin"; then
|
||||||
|
return 0
|
||||||
|
else
|
||||||
|
return 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_parseOptions_() {
|
||||||
|
# DESC:
|
||||||
|
# Iterates through options passed to script and sets variables. Will break -ab into -a -b
|
||||||
|
# when needed and --foo=bar into --foo bar
|
||||||
|
# ARGS:
|
||||||
|
# $@ from command line
|
||||||
|
# OUTS:
|
||||||
|
# Sets array 'ARGS' containing all arguments passed to script that were not parsed as options
|
||||||
|
# USAGE:
|
||||||
|
# _parseOptions_ "$@"
|
||||||
|
|
||||||
|
# Iterate over options
|
||||||
|
local _optstring=h
|
||||||
|
declare -a _options
|
||||||
|
local _c
|
||||||
|
local i
|
||||||
|
while (($#)); do
|
||||||
|
case $1 in
|
||||||
|
# If option is of type -ab
|
||||||
|
-[!-]?*)
|
||||||
|
# Loop over each character starting with the second
|
||||||
|
for ((i = 1; i < ${#1}; i++)); do
|
||||||
|
_c=${1:i:1}
|
||||||
|
_options+=("-${_c}") # Add current char to options
|
||||||
|
# If option takes a required argument, and it's not the last char make
|
||||||
|
# the rest of the string its argument
|
||||||
|
if [[ ${_optstring} == *"${_c}:"* && -n ${1:i+1} ]]; then
|
||||||
|
_options+=("${1:i+1}")
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
;;
|
||||||
|
# If option is of type --foo=bar
|
||||||
|
--?*=*) _options+=("${1%%=*}" "${1#*=}") ;;
|
||||||
|
# add --endopts for --
|
||||||
|
--) _options+=(--endopts) ;;
|
||||||
|
# Otherwise, nothing special
|
||||||
|
*) _options+=("$1") ;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
set -- "${_options[@]-}"
|
||||||
|
unset _options
|
||||||
|
|
||||||
|
# Read the options and set stuff
|
||||||
|
# shellcheck disable=SC2034
|
||||||
|
while [[ ${1-} == -?* ]]; do
|
||||||
|
case $1 in
|
||||||
|
# Custom options
|
||||||
|
|
||||||
|
# Common options
|
||||||
|
-h | --help)
|
||||||
|
_usage_
|
||||||
|
_safeExit_
|
||||||
|
;;
|
||||||
|
--loglevel)
|
||||||
|
shift
|
||||||
|
LOGLEVEL=${1}
|
||||||
|
;;
|
||||||
|
--logfile)
|
||||||
|
shift
|
||||||
|
LOGFILE="${1}"
|
||||||
|
;;
|
||||||
|
-n | --dryrun) DRYRUN=true ;;
|
||||||
|
-v | --verbose) VERBOSE=true ;;
|
||||||
|
-q | --quiet) QUIET=true ;;
|
||||||
|
--force) FORCE=true ;;
|
||||||
|
--endopts)
|
||||||
|
shift
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
if declare -f _safeExit_ &>/dev/null; then
|
||||||
|
fatal "invalid option: $1"
|
||||||
|
else
|
||||||
|
printf "%s\n" "ERROR: Invalid option: $1"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
shift
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -z ${*} || ${*} == null ]]; then
|
||||||
|
ARGS=()
|
||||||
|
else
|
||||||
|
ARGS+=("$@") # Store the remaining user input as arguments.
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
_columns_() {
|
||||||
|
# DESC:
|
||||||
|
# Prints a two column output from a key/value pair.
|
||||||
|
# Optionally pass a number of 2 space tabs to indent the output.
|
||||||
|
# ARGS:
|
||||||
|
# $1 (required): Key name (Left column text)
|
||||||
|
# $2 (required): Long value (Right column text. Wraps around if too long)
|
||||||
|
# $3 (optional): Number of 2 character tabs to indent the command (default 1)
|
||||||
|
# OPTS:
|
||||||
|
# -b Bold the left column
|
||||||
|
# -u Underline the left column
|
||||||
|
# -r Reverse background and foreground colors
|
||||||
|
# OUTS:
|
||||||
|
# stdout: Prints the output in columns
|
||||||
|
# NOTE:
|
||||||
|
# Long text or ANSI colors in the first column may create display issues
|
||||||
|
# USAGE:
|
||||||
|
# _columns_ "Key" "Long value text" [tab level]
|
||||||
|
|
||||||
|
[[ $# -lt 2 ]] && fatal "Missing required argument to ${FUNCNAME[0]}"
|
||||||
|
|
||||||
|
local opt
|
||||||
|
local OPTIND=1
|
||||||
|
local _style=""
|
||||||
|
while getopts ":bBuUrR" opt; do
|
||||||
|
case ${opt} in
|
||||||
|
b | B) _style="${_style}${bold}" ;;
|
||||||
|
u | U) _style="${_style}${underline}" ;;
|
||||||
|
r | R) _style="${_style}${reverse}" ;;
|
||||||
|
*) fatal "Unrecognized option '${1}' passed to ${FUNCNAME[0]}. Exiting." ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
shift $((OPTIND - 1))
|
||||||
|
|
||||||
|
local _key="${1}"
|
||||||
|
local _value="${2}"
|
||||||
|
local _tabLevel="${3-}"
|
||||||
|
local _tabSize=2
|
||||||
|
local _line
|
||||||
|
local _rightIndent
|
||||||
|
local _leftIndent
|
||||||
|
if [[ -z ${3-} ]]; then
|
||||||
|
_tabLevel=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
_leftIndent="$((_tabLevel * _tabSize))"
|
||||||
|
|
||||||
|
local _leftColumnWidth="$((30 + _leftIndent))"
|
||||||
|
|
||||||
|
if [ "$(tput cols)" -gt 180 ]; then
|
||||||
|
_rightIndent=110
|
||||||
|
elif [ "$(tput cols)" -gt 160 ]; then
|
||||||
|
_rightIndent=90
|
||||||
|
elif [ "$(tput cols)" -gt 130 ]; then
|
||||||
|
_rightIndent=60
|
||||||
|
elif [ "$(tput cols)" -gt 120 ]; then
|
||||||
|
_rightIndent=50
|
||||||
|
elif [ "$(tput cols)" -gt 110 ]; then
|
||||||
|
_rightIndent=40
|
||||||
|
elif [ "$(tput cols)" -gt 100 ]; then
|
||||||
|
_rightIndent=30
|
||||||
|
elif [ "$(tput cols)" -gt 90 ]; then
|
||||||
|
_rightIndent=20
|
||||||
|
elif [ "$(tput cols)" -gt 80 ]; then
|
||||||
|
_rightIndent=10
|
||||||
|
else
|
||||||
|
_rightIndent=0
|
||||||
|
fi
|
||||||
|
|
||||||
|
local _rightWrapLength=$(($(tput cols) - _leftColumnWidth - _leftIndent - _rightIndent))
|
||||||
|
|
||||||
|
local _first_line=0
|
||||||
|
while read -r _line; do
|
||||||
|
if [[ ${_first_line} -eq 0 ]]; then
|
||||||
|
_first_line=1
|
||||||
|
else
|
||||||
|
_key=" "
|
||||||
|
fi
|
||||||
|
printf "%-${_leftIndent}s${_style}%-${_leftColumnWidth}b${reset} %b\n" "" "${_key}${reset}" "${_line}"
|
||||||
|
done <<<"$(fold -w${_rightWrapLength} -s <<<"${_value}")"
|
||||||
|
}
|
||||||
|
|
||||||
|
_usage_() {
|
||||||
|
cat <<USAGE_TEXT
|
||||||
|
|
||||||
|
${bold}$(basename "$0") [OPTION]... [FILE]...${reset}
|
||||||
|
|
||||||
|
Custom pre-commit hook script. This script is intended to be used as part of the pre-commit pipeline managed within .pre-commit-config.yaml.
|
||||||
|
|
||||||
|
${bold}${underline}Options:${reset}
|
||||||
|
$(_columns_ -b -- '-h, --help' "Display this help and exit" 2)
|
||||||
|
$(_columns_ -b -- "--loglevel [LEVEL]" "One of: FATAL, ERROR (default), WARN, INFO, NOTICE, DEBUG, ALL, OFF" 2)
|
||||||
|
$(_columns_ -b -- "--logfile [FILE]" "Full PATH to logfile. (Default is '\${HOME}/logs/$(basename "$0").log')" 2)
|
||||||
|
$(_columns_ -b -- "-n, --dryrun" "Non-destructive. Makes no permanent changes." 2)
|
||||||
|
$(_columns_ -b -- "-q, --quiet" "Quiet (no output)" 2)
|
||||||
|
$(_columns_ -b -- "-v, --verbose" "Output more information. (Items echoed to 'verbose')" 2)
|
||||||
|
$(_columns_ -b -- "--force" "Skip all user interaction. Implied 'Yes' to all actions." 2)
|
||||||
|
|
||||||
|
${bold}${underline}Example Usage:${reset}
|
||||||
|
|
||||||
|
${gray}# Run the script and specify log level and log file.${reset}
|
||||||
|
$(basename "$0") -vn --logfile "/path/to/file.log" --loglevel 'WARN'
|
||||||
|
USAGE_TEXT
|
||||||
|
}
|
||||||
|
|
||||||
|
# ################################## INITIALIZE AND RUN THE SCRIPT
|
||||||
|
# (Comment or uncomment the lines below to customize script behavior)
|
||||||
|
|
||||||
|
trap '_trapCleanup_ ${LINENO} ${BASH_LINENO} "${BASH_COMMAND}" "${FUNCNAME[*]}" "${0}" "${BASH_SOURCE[0]}"' EXIT INT TERM SIGINT SIGQUIT SIGTERM
|
||||||
|
|
||||||
|
# Trap errors in subshells and functions
|
||||||
|
set -o errtrace
|
||||||
|
|
||||||
|
# Exit on error. Append '||true' if you expect an error
|
||||||
|
set -o errexit
|
||||||
|
|
||||||
|
# Use last non-zero exit code in a pipeline
|
||||||
|
set -o pipefail
|
||||||
|
|
||||||
|
# Confirm we have BASH greater than v4
|
||||||
|
[ "${BASH_VERSINFO:-0}" -ge 4 ] || {
|
||||||
|
printf "%s\n" "ERROR: BASH_VERSINFO is '${BASH_VERSINFO:-0}'. This script requires BASH v4 or greater."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
# Make `for f in *.txt` work when `*.txt` matches zero files
|
||||||
|
shopt -s nullglob globstar
|
||||||
|
|
||||||
|
# Set IFS to preferred implementation
|
||||||
|
IFS=$' \n\t'
|
||||||
|
|
||||||
|
# Run in debug mode
|
||||||
|
# set -o xtrace
|
||||||
|
|
||||||
|
# Initialize color constants
|
||||||
|
_setColors_
|
||||||
|
|
||||||
|
# Disallow expansion of unset variables
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
# Force arguments when invoking the script
|
||||||
|
# [[ $# -eq 0 ]] && _parseOptions_ "-h"
|
||||||
|
|
||||||
|
# Parse arguments passed to script
|
||||||
|
_parseOptions_ "$@"
|
||||||
|
|
||||||
|
# Create a temp directory '$TMP_DIR'
|
||||||
|
_makeTempDir_ "$(basename "$0")"
|
||||||
|
|
||||||
|
# Acquire script lock
|
||||||
|
# _acquireScriptLock_
|
||||||
|
|
||||||
|
# Add Homebrew bin directory to PATH (MacOS)
|
||||||
|
# _homebrewPath_
|
||||||
|
|
||||||
|
# Source GNU utilities from Homebrew (MacOS)
|
||||||
|
# _useGNUutils_
|
||||||
|
|
||||||
|
# Run the main logic script
|
||||||
|
_mainScript_
|
||||||
|
|
||||||
|
# Exit cleanly
|
||||||
|
_safeExit_
|
||||||
1
src/obsidian_metadata/__init__.py
Normal file
1
src/obsidian_metadata/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""obsidian-metadata package."""
|
||||||
2
src/obsidian_metadata/__version__.py
Normal file
2
src/obsidian_metadata/__version__.py
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"""obsidian-metadata version."""
|
||||||
|
__version__ = "0.0.0"
|
||||||
5
src/obsidian_metadata/_config/__init__.py
Normal file
5
src/obsidian_metadata/_config/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
"""Config module for obsidian frontmatter."""
|
||||||
|
|
||||||
|
from obsidian_metadata._config.config import Config
|
||||||
|
|
||||||
|
__all__ = ["Config"]
|
||||||
116
src/obsidian_metadata/_config/config.py
Normal file
116
src/obsidian_metadata/_config/config.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
"""Instantiate the configuration object."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import questionary
|
||||||
|
import rich.repr
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import alerts, vault_validation
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
|
||||||
|
try:
|
||||||
|
import tomllib
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
import tomli as tomllib # type: ignore [no-redef]
|
||||||
|
|
||||||
|
DEFAULT_CONFIG_FILE: Path = Path(__file__).parent / "default.toml"
|
||||||
|
|
||||||
|
|
||||||
|
@rich.repr.auto
|
||||||
|
class Config:
|
||||||
|
"""Configuration class."""
|
||||||
|
|
||||||
|
def __init__(self, config_path: Path = None, vault_path: Path = None) -> None:
|
||||||
|
self.config_path: Path = self._validate_config_path(Path(config_path))
|
||||||
|
self.config: dict[str, Any] = self._load_config()
|
||||||
|
self.config_content: str = self.config_path.read_text()
|
||||||
|
self.vault_path: Path = self._validate_vault_path(vault_path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.exclude_paths: list[Any] = self.config["exclude_paths"]
|
||||||
|
except KeyError:
|
||||||
|
self.exclude_paths = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.metadata_location: str = self.config["metadata"]["metadata_location"]
|
||||||
|
except KeyError:
|
||||||
|
self.metadata_location = "frontmatter"
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.tags_location: str = self.config["metadata"]["tags_location"]
|
||||||
|
except KeyError:
|
||||||
|
self.tags_location = "top"
|
||||||
|
|
||||||
|
log.debug(f"Loaded configuration from '{self.config_path}'")
|
||||||
|
log.trace(self.config)
|
||||||
|
|
||||||
|
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||||
|
"""Define rich representation of Vault."""
|
||||||
|
yield "config_path", self.config_path
|
||||||
|
yield "config_content",
|
||||||
|
yield "vault_path", self.vault_path
|
||||||
|
yield "metadata_location", self.metadata_location
|
||||||
|
yield "tags_location", self.tags_location
|
||||||
|
yield "exclude_paths", self.exclude_paths
|
||||||
|
|
||||||
|
def _validate_config_path(self, config_path: Path | None) -> Path:
|
||||||
|
"""Load the configuration path."""
|
||||||
|
if config_path is None:
|
||||||
|
config_path = Path(Path.home() / f".{__package__.split('.')[0]}.toml")
|
||||||
|
|
||||||
|
if not config_path.exists():
|
||||||
|
shutil.copy(DEFAULT_CONFIG_FILE, config_path)
|
||||||
|
alerts.info(f"Created default configuration file at '{config_path}'")
|
||||||
|
|
||||||
|
return config_path.expanduser().resolve()
|
||||||
|
|
||||||
|
def _load_config(self) -> dict[str, Any]:
|
||||||
|
"""Load the configuration file."""
|
||||||
|
try:
|
||||||
|
with self.config_path.open("rb") as f:
|
||||||
|
return tomllib.load(f)
|
||||||
|
except tomllib.TOMLDecodeError as e:
|
||||||
|
alerts.error(f"Could not parse '{self.config_path}'")
|
||||||
|
raise typer.Exit(code=1) from e
|
||||||
|
|
||||||
|
def _validate_vault_path(self, vault_path: Path | None) -> Path:
|
||||||
|
"""Validate the vault path."""
|
||||||
|
if vault_path is None:
|
||||||
|
try:
|
||||||
|
vault_path = Path(self.config["vault"]).expanduser().resolve()
|
||||||
|
except KeyError:
|
||||||
|
vault_path = Path("/I/Do/Not/Exist")
|
||||||
|
|
||||||
|
if not vault_path.exists(): # pragma: no cover
|
||||||
|
alerts.error(f"Vault path not found: '{vault_path}'")
|
||||||
|
|
||||||
|
vault_path = questionary.path(
|
||||||
|
"Enter a path to Obsidian vault:",
|
||||||
|
only_directories=True,
|
||||||
|
validate=vault_validation,
|
||||||
|
).ask()
|
||||||
|
if vault_path is None:
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
|
vault_path = Path(vault_path).expanduser().resolve()
|
||||||
|
|
||||||
|
self.write_config_value("vault", str(vault_path))
|
||||||
|
return vault_path
|
||||||
|
|
||||||
|
def write_config_value(self, key: str, value: str | int) -> None:
|
||||||
|
"""Write a new value to the configuration file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): The key to write.
|
||||||
|
value (str|int): The value to write.
|
||||||
|
"""
|
||||||
|
self.config_content = re.sub(
|
||||||
|
rf"( *{key} = ['\"])[^'\"]*(['\"].*)", rf"\1{value}\2", self.config_content
|
||||||
|
)
|
||||||
|
|
||||||
|
alerts.notice(f"Writing new configuration for '{key}' to '{self.config_path}'")
|
||||||
|
self.config_path.write_text(self.config_content)
|
||||||
5
src/obsidian_metadata/_config/default.toml
Normal file
5
src/obsidian_metadata/_config/default.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
# Path to your obsidian vault
|
||||||
|
vault = "/path/to/vault"
|
||||||
|
|
||||||
|
# Folders within the vault to ignore when indexing metadata
|
||||||
|
exclude_paths = [".git", ".obsidian"]
|
||||||
27
src/obsidian_metadata/_utils/__init__.py
Normal file
27
src/obsidian_metadata/_utils/__init__.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
"""Shared utilities."""
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import alerts
|
||||||
|
from obsidian_metadata._utils.alerts import LoggerManager
|
||||||
|
from obsidian_metadata._utils.utilities import (
|
||||||
|
clean_dictionary,
|
||||||
|
clear_screen,
|
||||||
|
dict_contains,
|
||||||
|
dict_values_to_lists_strings,
|
||||||
|
docstring_parameter,
|
||||||
|
remove_markdown_sections,
|
||||||
|
vault_validation,
|
||||||
|
version_callback,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"alerts",
|
||||||
|
"clean_dictionary",
|
||||||
|
"clear_screen",
|
||||||
|
"dict_values_to_lists_strings",
|
||||||
|
"dict_contains",
|
||||||
|
"docstring_parameter",
|
||||||
|
"LoggerManager",
|
||||||
|
"remove_markdown_sections",
|
||||||
|
"vault_validation",
|
||||||
|
"version_callback",
|
||||||
|
]
|
||||||
242
src/obsidian_metadata/_utils/alerts.py
Normal file
242
src/obsidian_metadata/_utils/alerts.py
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
"""Logging and alerts."""
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import rich.repr
|
||||||
|
import typer
|
||||||
|
from loguru import logger
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
|
||||||
|
def dryrun(msg: str) -> None:
|
||||||
|
"""Print a message if the dry run flag is set.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[cyan]DRYRUN | {msg}[/cyan]")
|
||||||
|
|
||||||
|
|
||||||
|
def success(msg: str) -> None:
|
||||||
|
"""Print a success message without using logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[green]SUCCESS | {msg}[/green]")
|
||||||
|
|
||||||
|
|
||||||
|
def warning(msg: str) -> None:
|
||||||
|
"""Print a warning message without using logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[yellow]WARNING | {msg}[/yellow]")
|
||||||
|
|
||||||
|
|
||||||
|
def error(msg: str) -> None:
|
||||||
|
"""Print an error message without using logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[red]ERROR | {msg}[/red]")
|
||||||
|
|
||||||
|
|
||||||
|
def notice(msg: str) -> None:
|
||||||
|
"""Print a notice message without using logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[bold]NOTICE | {msg}[/bold]")
|
||||||
|
|
||||||
|
|
||||||
|
def info(msg: str) -> None:
|
||||||
|
"""Print a notice message without using logging.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"INFO | {msg}")
|
||||||
|
|
||||||
|
|
||||||
|
def dim(msg: str) -> None:
|
||||||
|
"""Print a message in dimmed color.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg: Message to print
|
||||||
|
"""
|
||||||
|
print(f"[dim]{msg}[/dim]")
|
||||||
|
|
||||||
|
|
||||||
|
def _log_formatter(record: dict) -> str:
|
||||||
|
"""Create custom log formatter based on the log level. This effects the logs sent to stdout/stderr but not the log file."""
|
||||||
|
if (
|
||||||
|
record["level"].name == "INFO"
|
||||||
|
or record["level"].name == "SUCCESS"
|
||||||
|
or record["level"].name == "WARNING"
|
||||||
|
):
|
||||||
|
return "<level>{level: <8}</level> | <level>{message}</level>\n{exception}"
|
||||||
|
|
||||||
|
return "<level>{level: <8}</level> | <level>{message}</level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||||
|
|
||||||
|
|
||||||
|
@rich.repr.auto
|
||||||
|
class LoggerManager:
|
||||||
|
"""Instantiate the loguru logging system with the following levels.
|
||||||
|
|
||||||
|
- TRACE: Usage: log.trace("")
|
||||||
|
- DEBUG: Usage: log.debug("")
|
||||||
|
- INFO: Usage: log.info("")
|
||||||
|
- WARNING: Usage: log.warning("")
|
||||||
|
- ERROR: Usage: log.error("")
|
||||||
|
- CRITICAL: Usage: log.critical("")
|
||||||
|
- EXCEPTION: Usage: log.exception("")
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
log_file (Path): Path to the log file.
|
||||||
|
verbosity (int): Verbosity level.
|
||||||
|
log_to_file (bool): Whether to log to a file.
|
||||||
|
log_level (int): Default log level (verbosity overrides this)
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
Instantiate the logger:
|
||||||
|
|
||||||
|
logging = _utils.alerts.LoggerManager(
|
||||||
|
verbosity,
|
||||||
|
log_to_file,
|
||||||
|
log_file,
|
||||||
|
log_level)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
log_file: Path = Path("/logs"),
|
||||||
|
verbosity: int = 0,
|
||||||
|
log_to_file: bool = False,
|
||||||
|
log_level: int = 30,
|
||||||
|
) -> None:
|
||||||
|
self.verbosity = verbosity
|
||||||
|
self.log_to_file = log_to_file
|
||||||
|
self.log_file = log_file
|
||||||
|
self.log_level = log_level
|
||||||
|
|
||||||
|
if self.log_file == Path("/logs") and self.log_to_file: # pragma: no cover
|
||||||
|
print("No log file specified")
|
||||||
|
raise typer.Exit(1)
|
||||||
|
|
||||||
|
if self.verbosity >= 3:
|
||||||
|
logger.remove()
|
||||||
|
logger.add(
|
||||||
|
sys.stderr,
|
||||||
|
level="TRACE",
|
||||||
|
format=_log_formatter, # type: ignore[arg-type]
|
||||||
|
backtrace=False,
|
||||||
|
diagnose=True,
|
||||||
|
)
|
||||||
|
self.log_level = 5
|
||||||
|
elif self.verbosity == 2:
|
||||||
|
logger.remove()
|
||||||
|
logger.add(
|
||||||
|
sys.stderr,
|
||||||
|
level="DEBUG",
|
||||||
|
format=_log_formatter, # type: ignore[arg-type]
|
||||||
|
backtrace=False,
|
||||||
|
diagnose=True,
|
||||||
|
)
|
||||||
|
self.log_level = 10
|
||||||
|
elif self.verbosity == 1:
|
||||||
|
logger.remove()
|
||||||
|
logger.add(
|
||||||
|
sys.stderr,
|
||||||
|
level="INFO",
|
||||||
|
format=_log_formatter, # type: ignore[arg-type]
|
||||||
|
backtrace=False,
|
||||||
|
diagnose=True,
|
||||||
|
)
|
||||||
|
self.log_level = 20
|
||||||
|
else:
|
||||||
|
logger.remove()
|
||||||
|
logger.add(
|
||||||
|
sys.stderr,
|
||||||
|
format=_log_formatter, # type: ignore[arg-type]
|
||||||
|
level="WARNING",
|
||||||
|
backtrace=False,
|
||||||
|
diagnose=True,
|
||||||
|
)
|
||||||
|
self.log_level = 30
|
||||||
|
|
||||||
|
if self.log_to_file is True:
|
||||||
|
logger.add(
|
||||||
|
self.log_file,
|
||||||
|
rotation="5 MB",
|
||||||
|
level=self.log_level,
|
||||||
|
backtrace=False,
|
||||||
|
diagnose=True,
|
||||||
|
delay=True,
|
||||||
|
)
|
||||||
|
logger.debug(f"Logging to file: {self.log_file}")
|
||||||
|
|
||||||
|
logger.debug("Logging instantiated")
|
||||||
|
|
||||||
|
def is_trace(self, msg: str | None = None) -> bool:
|
||||||
|
"""Check if the current log level is TRACE.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg (optional): Message to print. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the current log level is TRACE or lower, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.log_level <= 5:
|
||||||
|
if msg:
|
||||||
|
print(msg)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_debug(self, msg: str | None = None) -> bool:
|
||||||
|
"""Check if the current log level is DEBUG.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg (optional): Message to print. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the current log level is DEBUG or lower, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.log_level <= 10:
|
||||||
|
if msg:
|
||||||
|
print(msg)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_info(self, msg: str | None = None) -> bool:
|
||||||
|
"""Check if the current log level is INFO.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg (optional): Message to print. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the current log level is INFO or lower, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.log_level <= 20:
|
||||||
|
if msg:
|
||||||
|
print(msg)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def is_default(self, msg: str | None = None) -> bool:
|
||||||
|
"""Check if the current log level is default level (SUCCESS or WARNING).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
msg (optional): Message to print. Defaults to None.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the current log level is default or lower, False otherwise.
|
||||||
|
"""
|
||||||
|
if self.log_level <= 30:
|
||||||
|
if msg:
|
||||||
|
print(msg)
|
||||||
|
return True
|
||||||
|
return False # pragma: no cover
|
||||||
169
src/obsidian_metadata/_utils/utilities.py
Normal file
169
src/obsidian_metadata/_utils/utilities.py
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
"""Utility functions."""
|
||||||
|
import re
|
||||||
|
from os import name, system
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from obsidian_metadata.__version__ import __version__
|
||||||
|
|
||||||
|
|
||||||
|
def dict_values_to_lists_strings(dictionary: dict, strip_null_values: bool = False) -> dict:
|
||||||
|
"""Converts all values in a dictionary to lists of strings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dictionary (dict): Dictionary to convert
|
||||||
|
strip_null (bool): Whether to strip null values
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Dictionary with all values converted to lists of strings
|
||||||
|
|
||||||
|
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
||||||
|
"""
|
||||||
|
new_dict = {}
|
||||||
|
|
||||||
|
if strip_null_values:
|
||||||
|
for key, value in dictionary.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||||
|
elif value is None or value == "None" or value == "":
|
||||||
|
new_dict[key] = []
|
||||||
|
else:
|
||||||
|
new_dict[key] = [str(value)]
|
||||||
|
|
||||||
|
return new_dict
|
||||||
|
|
||||||
|
for key, value in dictionary.items():
|
||||||
|
if isinstance(value, list):
|
||||||
|
new_dict[key] = sorted([str(item) for item in value])
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||||
|
else:
|
||||||
|
new_dict[key] = [str(value)]
|
||||||
|
|
||||||
|
return new_dict
|
||||||
|
|
||||||
|
|
||||||
|
def remove_markdown_sections(
|
||||||
|
text: str,
|
||||||
|
strip_codeblocks: bool = False,
|
||||||
|
strip_inlinecode: bool = False,
|
||||||
|
strip_frontmatter: bool = False,
|
||||||
|
) -> str:
|
||||||
|
"""Strips markdown sections from text.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): Text to remove code blocks from
|
||||||
|
strip_codeblocks (bool, optional): Strip code blocks. Defaults to False.
|
||||||
|
strip_inlinecode (bool, optional): Strip inline code. Defaults to False.
|
||||||
|
strip_frontmatter (bool, optional): Strip frontmatter. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Text without code blocks
|
||||||
|
"""
|
||||||
|
if strip_codeblocks:
|
||||||
|
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
||||||
|
|
||||||
|
if strip_inlinecode:
|
||||||
|
text = re.sub(r"`.*?`", "", text)
|
||||||
|
|
||||||
|
if strip_frontmatter:
|
||||||
|
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
||||||
|
|
||||||
|
return text # noqa: RET504
|
||||||
|
|
||||||
|
|
||||||
|
def version_callback(value: bool) -> None:
|
||||||
|
"""Print version and exit."""
|
||||||
|
if value:
|
||||||
|
print(f"{__package__.split('.')[0]}: v{__version__}")
|
||||||
|
raise typer.Exit()
|
||||||
|
|
||||||
|
|
||||||
|
def vault_validation(path: str) -> bool | str:
|
||||||
|
"""Validates the vault path."""
|
||||||
|
path_to_validate: Path = Path(path).expanduser().resolve()
|
||||||
|
if not path_to_validate.exists():
|
||||||
|
return f"Path does not exist: {path_to_validate}"
|
||||||
|
if not path_to_validate.is_dir():
|
||||||
|
return f"Path is not a directory: {path_to_validate}"
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def docstring_parameter(*sub: Any) -> Any:
|
||||||
|
"""Decorator to replace variables within docstrings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
sub (Any): Replacement variables
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
@docstring_parameter("foo", "bar")
|
||||||
|
def foo():
|
||||||
|
'''This is a {0} docstring with {1} variables.'''
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def dec(obj: Any) -> Any:
|
||||||
|
"""Format object."""
|
||||||
|
obj.__doc__ = obj.__doc__.format(*sub)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
return dec
|
||||||
|
|
||||||
|
|
||||||
|
def clean_dictionary(dictionary: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Clean up a dictionary by markdown formatting from keys and values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dictionary (dict): Dictionary to clean
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Cleaned dictionary
|
||||||
|
"""
|
||||||
|
new_dict = {key.strip(): value for key, value in dictionary.items()}
|
||||||
|
new_dict = {key.strip("*[]#"): value for key, value in new_dict.items()}
|
||||||
|
for key, value in new_dict.items():
|
||||||
|
new_dict[key] = [s.strip("*[]#") for s in value if isinstance(value, list)]
|
||||||
|
|
||||||
|
return new_dict
|
||||||
|
|
||||||
|
|
||||||
|
def clear_screen() -> None:
|
||||||
|
"""Clears the screen."""
|
||||||
|
# for windows
|
||||||
|
_ = system("cls") if name == "nt" else system("clear")
|
||||||
|
|
||||||
|
|
||||||
|
def dict_contains(
|
||||||
|
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||||
|
) -> bool:
|
||||||
|
"""Checks if a dictionary contains a key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dictionary (dict): Dictionary to check
|
||||||
|
key (str): Key to check for
|
||||||
|
value (str, optional): Value to check for. Defaults to None.
|
||||||
|
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the dictionary contains the key
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
if is_regex:
|
||||||
|
return any(re.search(key, str(_key)) for _key in dictionary)
|
||||||
|
return key in dictionary
|
||||||
|
|
||||||
|
if is_regex:
|
||||||
|
found_keys = []
|
||||||
|
for _key in dictionary:
|
||||||
|
if re.search(key, str(_key)):
|
||||||
|
found_keys.append(
|
||||||
|
any(re.search(value, _v) for _v in dictionary[_key]),
|
||||||
|
)
|
||||||
|
return any(found_keys)
|
||||||
|
|
||||||
|
return key in dictionary and value in dictionary[key]
|
||||||
115
src/obsidian_metadata/cli.py
Normal file
115
src/obsidian_metadata/cli.py
Normal file
@@ -0,0 +1,115 @@
|
|||||||
|
"""obsidian-metadata CLI."""
|
||||||
|
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import typer
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
from obsidian_metadata._config import Config
|
||||||
|
from obsidian_metadata._utils import alerts, docstring_parameter, version_callback
|
||||||
|
from obsidian_metadata.models import Application
|
||||||
|
|
||||||
|
app = typer.Typer(add_completion=False, no_args_is_help=True, rich_markup_mode="rich")
|
||||||
|
|
||||||
|
typer.rich_utils.STYLE_HELPTEXT = ""
|
||||||
|
|
||||||
|
HELP_TEXT = """
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
@app.command()
|
||||||
|
@docstring_parameter(__package__)
|
||||||
|
def main(
|
||||||
|
vault_path: Path = typer.Option(
|
||||||
|
None,
|
||||||
|
help="Path to Obsidian vault",
|
||||||
|
show_default=False,
|
||||||
|
),
|
||||||
|
config_file: Path = typer.Option(
|
||||||
|
Path(Path.home() / f".{__package__}.toml"),
|
||||||
|
help="Specify a custom path to a configuration file",
|
||||||
|
show_default=False,
|
||||||
|
),
|
||||||
|
dry_run: bool = typer.Option(
|
||||||
|
False,
|
||||||
|
"--dry-run",
|
||||||
|
"-n",
|
||||||
|
help="Dry run - don't actually change anything",
|
||||||
|
),
|
||||||
|
log_file: Path = typer.Option(
|
||||||
|
Path(Path.home() / "logs" / "obsidian_metadata.log"),
|
||||||
|
help="Path to log file",
|
||||||
|
show_default=True,
|
||||||
|
dir_okay=False,
|
||||||
|
file_okay=True,
|
||||||
|
exists=False,
|
||||||
|
),
|
||||||
|
log_to_file: bool = typer.Option(
|
||||||
|
False,
|
||||||
|
"--log-to-file",
|
||||||
|
help="Log to file",
|
||||||
|
show_default=True,
|
||||||
|
),
|
||||||
|
verbosity: int = typer.Option(
|
||||||
|
0,
|
||||||
|
"-v",
|
||||||
|
"--verbose",
|
||||||
|
show_default=False,
|
||||||
|
help="""Set verbosity level (0=WARN, 1=INFO, 2=DEBUG, 3=TRACE)""",
|
||||||
|
count=True,
|
||||||
|
),
|
||||||
|
version: Optional[bool] = typer.Option(
|
||||||
|
None, "--version", help="Print version and exit", callback=version_callback, is_eager=True
|
||||||
|
),
|
||||||
|
) -> None:
|
||||||
|
r"""A script to make batch updates to metadata in an Obsidian vault.
|
||||||
|
|
||||||
|
[bold] [/]
|
||||||
|
[bold underline]Features:[/]
|
||||||
|
|
||||||
|
- [code]in-text tags:[/] delete every occurrence
|
||||||
|
- [code]in-text tags:[/] Rename tag ([dim]#tag1[/] -> [dim]#tag2[/])
|
||||||
|
- [code]frontmatter:[/] Delete a key matching a regex pattern and all associated values
|
||||||
|
- [code]frontmatter:[/] Rename a key
|
||||||
|
- [code]frontmatter:[/] Delete a value matching a regex pattern from a specified key
|
||||||
|
- [code]frontmatter:[/] Rename a value from a specified key
|
||||||
|
- [code]inline metadata:[/] Delete a key matching a regex pattern and all associated values
|
||||||
|
- [code]inline metadata:[/] Rename a key
|
||||||
|
- [code]inline metadata:[/] Delete a value matching a regex pattern from a specified key
|
||||||
|
- [code]inline metadata:[/] Rename a value from a specified key
|
||||||
|
- [code]vault:[/] Create a backup of the Obsidian vault.
|
||||||
|
|
||||||
|
[bold underline]Usage:[/]
|
||||||
|
Run [tan]obsidian-metadata[/] from the command line. The script will allow you to make batch updates to metadata in an Obsidian vault. Once you have made your changes, review them prior to committing them to the vault.
|
||||||
|
|
||||||
|
Configuration is specified in a configuration file. On First run, this file will be created at [tan]~/.{0}.env[/]. Any options specified on the command line will override the configuration file.
|
||||||
|
"""
|
||||||
|
# Instantiate logger
|
||||||
|
alerts.LoggerManager( # pragma: no cover
|
||||||
|
log_file,
|
||||||
|
verbosity,
|
||||||
|
log_to_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
config: Config = Config(config_path=config_file, vault_path=vault_path)
|
||||||
|
application = Application(dry_run=dry_run, config=config)
|
||||||
|
|
||||||
|
banner = r"""
|
||||||
|
___ _ _ _ _
|
||||||
|
/ _ \| |__ ___(_) __| (_) __ _ _ __
|
||||||
|
| | | | '_ \/ __| |/ _` | |/ _` | '_ \
|
||||||
|
| |_| | |_) \__ \ | (_| | | (_| | | | |
|
||||||
|
\___/|_.__/|___/_|\__,_|_|\__,_|_| |_|
|
||||||
|
| \/ | ___| |_ __ _ __| | __ _| |_ __ _
|
||||||
|
| |\/| |/ _ \ __/ _` |/ _` |/ _` | __/ _` |
|
||||||
|
| | | | __/ || (_| | (_| | (_| | || (_| |
|
||||||
|
|_| |_|\___|\__\__,_|\__,_|\__,_|\__\__,_|
|
||||||
|
"""
|
||||||
|
print(banner)
|
||||||
|
application.main_app()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
app()
|
||||||
24
src/obsidian_metadata/models/__init__.py
Normal file
24
src/obsidian_metadata/models/__init__.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
"""Shared models."""
|
||||||
|
from obsidian_metadata.models.patterns import Patterns # isort: skip
|
||||||
|
from obsidian_metadata.models.metadata import (
|
||||||
|
Frontmatter,
|
||||||
|
InlineMetadata,
|
||||||
|
InlineTags,
|
||||||
|
VaultMetadata,
|
||||||
|
)
|
||||||
|
from obsidian_metadata.models.notes import Note
|
||||||
|
from obsidian_metadata.models.vault import Vault
|
||||||
|
|
||||||
|
from obsidian_metadata.models.application import Application # isort: skip
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Frontmatter",
|
||||||
|
"InlineMetadata",
|
||||||
|
"InlineTags",
|
||||||
|
"LoggerManager",
|
||||||
|
"Note",
|
||||||
|
"Patterns",
|
||||||
|
"Application",
|
||||||
|
"Vault",
|
||||||
|
"VaultMetadata",
|
||||||
|
]
|
||||||
370
src/obsidian_metadata/models/application.py
Normal file
370
src/obsidian_metadata/models/application.py
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
"""Questions for the cli."""
|
||||||
|
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import questionary
|
||||||
|
import typer
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
from obsidian_metadata._config import Config
|
||||||
|
from obsidian_metadata._utils import alerts, clear_screen
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
from obsidian_metadata.models import Patterns, Vault
|
||||||
|
|
||||||
|
PATTERNS = Patterns()
|
||||||
|
|
||||||
|
|
||||||
|
class Application:
|
||||||
|
"""Questions for use in the cli.
|
||||||
|
|
||||||
|
Contains methods which ask a series of questions to the user and return a dictionary with their answers.
|
||||||
|
|
||||||
|
More info: https://questionary.readthedocs.io/en/stable/pages/advanced.html#create-questions-from-dictionaries
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config: Config, dry_run: bool) -> None:
|
||||||
|
self.config = config
|
||||||
|
self.dry_run = dry_run
|
||||||
|
self.custom_style = questionary.Style(
|
||||||
|
[
|
||||||
|
("separator", "bold fg:#6C6C6C"),
|
||||||
|
("instruction", "fg:#6C6C6C"),
|
||||||
|
("highlighted", "bold reverse"),
|
||||||
|
("pointer", "bold"),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
clear_screen()
|
||||||
|
|
||||||
|
def load_vault(self, path_filter: str = None) -> None:
|
||||||
|
"""Load the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path_filter (str, optional): Regex to filter notes by path.
|
||||||
|
"""
|
||||||
|
self.vault: Vault = Vault(config=self.config, dry_run=self.dry_run, path_filter=path_filter)
|
||||||
|
log.info(f"Indexed {self.vault.num_notes()} notes from {self.vault.vault_path}")
|
||||||
|
|
||||||
|
def main_app(self) -> None: # noqa: C901
|
||||||
|
"""Questions for the main application."""
|
||||||
|
self.load_vault()
|
||||||
|
|
||||||
|
while True:
|
||||||
|
self.vault.info()
|
||||||
|
operation = questionary.select(
|
||||||
|
"What do you want to do?",
|
||||||
|
choices=[
|
||||||
|
questionary.Separator("\n-- VAULT ACTIONS -----------------"),
|
||||||
|
{"name": "Backup vault", "value": "backup_vault"},
|
||||||
|
{"name": "Delete vault backup", "value": "delete_backup"},
|
||||||
|
{"name": "View all metadata", "value": "all_metadata"},
|
||||||
|
{"name": "List notes in scope", "value": "list_notes"},
|
||||||
|
{
|
||||||
|
"name": "Filter the notes being processed by their path",
|
||||||
|
"value": "filter_notes",
|
||||||
|
},
|
||||||
|
questionary.Separator("\n-- INLINE TAG ACTIONS ---------"),
|
||||||
|
questionary.Separator("Tags in the note body"),
|
||||||
|
{
|
||||||
|
"name": "Rename an inline tag",
|
||||||
|
"value": "rename_inline_tag",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Delete an inline tag",
|
||||||
|
"value": "delete_inline_tag",
|
||||||
|
},
|
||||||
|
questionary.Separator("\n-- METADATA ACTIONS -----------"),
|
||||||
|
questionary.Separator("Frontmatter or inline metadata"),
|
||||||
|
{"name": "Rename Key", "value": "rename_key"},
|
||||||
|
{"name": "Delete Key", "value": "delete_key"},
|
||||||
|
{"name": "Rename Value", "value": "rename_value"},
|
||||||
|
{"name": "Delete Value", "value": "delete_value"},
|
||||||
|
questionary.Separator("\n-- REVIEW/COMMIT CHANGES ------"),
|
||||||
|
{"name": "Review changes", "value": "review_changes"},
|
||||||
|
{"name": "Commit changes", "value": "commit_changes"},
|
||||||
|
questionary.Separator("-------------------------------"),
|
||||||
|
{"name": "Quit", "value": "abort"},
|
||||||
|
],
|
||||||
|
use_shortcuts=False,
|
||||||
|
style=self.custom_style,
|
||||||
|
).ask()
|
||||||
|
|
||||||
|
if operation == "filter_notes":
|
||||||
|
path_filter = questionary.text(
|
||||||
|
"Enter a regex to filter notes by path",
|
||||||
|
validate=lambda text: len(text) > 0,
|
||||||
|
).ask()
|
||||||
|
if path_filter is None:
|
||||||
|
continue
|
||||||
|
self.load_vault(path_filter=path_filter)
|
||||||
|
|
||||||
|
if operation == "all_metadata":
|
||||||
|
self.vault.metadata.print_metadata()
|
||||||
|
|
||||||
|
if operation == "backup_vault":
|
||||||
|
self.vault.backup()
|
||||||
|
|
||||||
|
if operation == "delete_backup":
|
||||||
|
self.vault.delete_backup()
|
||||||
|
|
||||||
|
if operation == "list_notes":
|
||||||
|
self.vault.list_editable_notes()
|
||||||
|
|
||||||
|
if operation == "rename_inline_tag":
|
||||||
|
self.rename_inline_tag()
|
||||||
|
|
||||||
|
if operation == "delete_inline_tag":
|
||||||
|
self.delete_inline_tag()
|
||||||
|
|
||||||
|
if operation == "rename_key":
|
||||||
|
self.rename_key()
|
||||||
|
|
||||||
|
if operation == "delete_key":
|
||||||
|
self.delete_key()
|
||||||
|
|
||||||
|
if operation == "rename_value":
|
||||||
|
self.rename_value()
|
||||||
|
|
||||||
|
if operation == "delete_value":
|
||||||
|
self.delete_value()
|
||||||
|
|
||||||
|
if operation == "review_changes":
|
||||||
|
self.review_changes()
|
||||||
|
|
||||||
|
if operation == "commit_changes":
|
||||||
|
self.commit_changes()
|
||||||
|
|
||||||
|
if operation == "abort":
|
||||||
|
break
|
||||||
|
|
||||||
|
print("Done!")
|
||||||
|
return
|
||||||
|
|
||||||
|
def rename_key(self) -> None:
|
||||||
|
"""Renames a key in the vault."""
|
||||||
|
|
||||||
|
def validate_key(text: str) -> bool:
|
||||||
|
"""Validate the key name."""
|
||||||
|
if self.vault.metadata.contains(text):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_new_key(text: str) -> bool:
|
||||||
|
"""Validate the tag name."""
|
||||||
|
if PATTERNS.validate_key_text.search(text) is not None:
|
||||||
|
return False
|
||||||
|
if len(text) == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
original_key = questionary.text(
|
||||||
|
"Which key would you like to rename?",
|
||||||
|
validate=validate_key,
|
||||||
|
).ask()
|
||||||
|
if original_key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_key = questionary.text(
|
||||||
|
"New key name",
|
||||||
|
validate=validate_new_key,
|
||||||
|
).ask()
|
||||||
|
if new_key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.vault.rename_metadata(original_key, new_key)
|
||||||
|
|
||||||
|
def rename_inline_tag(self) -> None:
|
||||||
|
"""Rename an inline tag."""
|
||||||
|
|
||||||
|
def validate_new_tag(text: str) -> bool:
|
||||||
|
"""Validate the tag name."""
|
||||||
|
if PATTERNS.validate_tag_text.search(text) is not None:
|
||||||
|
return False
|
||||||
|
if len(text) == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
original_tag = questionary.text(
|
||||||
|
"Which tag would you like to rename?",
|
||||||
|
validate=lambda text: True
|
||||||
|
if self.vault.contains_inline_tag(text)
|
||||||
|
else "Tag not found in vault",
|
||||||
|
).ask()
|
||||||
|
if original_tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_tag = questionary.text(
|
||||||
|
"New tag name",
|
||||||
|
validate=validate_new_tag,
|
||||||
|
).ask()
|
||||||
|
if new_tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.vault.rename_inline_tag(original_tag, new_tag)
|
||||||
|
alerts.success(f"Renamed [reverse]{original_tag}[/] to [reverse]{new_tag}[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
def delete_inline_tag(self) -> None:
|
||||||
|
"""Delete an inline tag."""
|
||||||
|
tag = questionary.text(
|
||||||
|
"Which tag would you like to delete?",
|
||||||
|
validate=lambda text: True
|
||||||
|
if self.vault.contains_inline_tag(text)
|
||||||
|
else "Tag not found in vault",
|
||||||
|
).ask()
|
||||||
|
if tag is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.vault.delete_inline_tag(tag)
|
||||||
|
alerts.success(f"Deleted inline tag: {tag}")
|
||||||
|
return
|
||||||
|
|
||||||
|
def delete_key(self) -> None:
|
||||||
|
"""Delete a key from the vault."""
|
||||||
|
while True:
|
||||||
|
key_to_delete = questionary.text("Regex for the key(s) you'd like to delete?").ask()
|
||||||
|
if key_to_delete is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self.vault.metadata.contains(key_to_delete, is_regex=True):
|
||||||
|
alerts.warning(f"No matching keys in the vault: {key_to_delete}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
num_changed = self.vault.delete_metadata(key_to_delete)
|
||||||
|
if num_changed == 0:
|
||||||
|
alerts.warning(f"No notes found matching: [reverse]{key_to_delete}[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
alerts.success(
|
||||||
|
f"Deleted keys matching: [reverse]{key_to_delete}[/] from {num_changed} notes"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def rename_value(self) -> None:
|
||||||
|
"""Rename a value in the vault."""
|
||||||
|
key = questionary.text(
|
||||||
|
"Which key contains the value to rename?",
|
||||||
|
validate=lambda text: True
|
||||||
|
if self.vault.metadata.contains(text)
|
||||||
|
else "Key not found in vault",
|
||||||
|
).ask()
|
||||||
|
if key is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
value = questionary.text(
|
||||||
|
"Which value would you like to rename?",
|
||||||
|
validate=lambda text: True
|
||||||
|
if self.vault.metadata.contains(key, text)
|
||||||
|
else f"Value not found in {key}",
|
||||||
|
).ask()
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_value = questionary.text(
|
||||||
|
"New value?",
|
||||||
|
validate=lambda text: True
|
||||||
|
if not self.vault.metadata.contains(key, text)
|
||||||
|
else f"Value already exists in {key}",
|
||||||
|
).ask()
|
||||||
|
|
||||||
|
if self.vault.rename_metadata(key, value, new_value):
|
||||||
|
alerts.success(f"Renamed [reverse]{key}: {value}[/] to [reverse]{key}: {new_value}[/]")
|
||||||
|
|
||||||
|
def delete_value(self) -> None:
|
||||||
|
"""Delete a value from the vault."""
|
||||||
|
while True:
|
||||||
|
key = questionary.text(
|
||||||
|
"Which key contains the value to delete?",
|
||||||
|
).ask()
|
||||||
|
if key is None:
|
||||||
|
return
|
||||||
|
if not self.vault.metadata.contains(key, is_regex=True):
|
||||||
|
alerts.warning(f"No keys in value match: {key}")
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
|
||||||
|
while True:
|
||||||
|
value = questionary.text(
|
||||||
|
"Regex for the value to delete",
|
||||||
|
).ask()
|
||||||
|
if value is None:
|
||||||
|
return
|
||||||
|
if not self.vault.metadata.contains(key, value, is_regex=True):
|
||||||
|
alerts.warning(f"No matching key value pairs found in the vault: {key}: {value}")
|
||||||
|
continue
|
||||||
|
|
||||||
|
num_changed = self.vault.delete_metadata(key, value)
|
||||||
|
if num_changed == 0:
|
||||||
|
alerts.warning(f"No notes found matching: [reverse]{key}: {value}[/]")
|
||||||
|
return
|
||||||
|
|
||||||
|
alerts.success(
|
||||||
|
f"Deleted {num_changed} entries matching: [reverse]{key}[/]: [reverse]{value}[/]"
|
||||||
|
)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
return
|
||||||
|
|
||||||
|
def review_changes(self) -> None:
|
||||||
|
"""Review all changes in the vault."""
|
||||||
|
changed_notes = self.vault.get_changed_notes()
|
||||||
|
|
||||||
|
if len(changed_notes) == 0:
|
||||||
|
alerts.info("No changes to review.")
|
||||||
|
return
|
||||||
|
|
||||||
|
print(f"\nFound {len(changed_notes)} changed notes in the vault.\n")
|
||||||
|
answer = questionary.confirm("View diffs of individual files?", default=False).ask()
|
||||||
|
if not answer:
|
||||||
|
return
|
||||||
|
|
||||||
|
choices: list[dict[str, Any] | questionary.Separator] = [questionary.Separator()]
|
||||||
|
for n, note in enumerate(changed_notes, start=1):
|
||||||
|
_selection = {
|
||||||
|
"name": f"{n}: {note.note_path.relative_to(self.vault.vault_path)}",
|
||||||
|
"value": n - 1,
|
||||||
|
}
|
||||||
|
choices.append(_selection)
|
||||||
|
|
||||||
|
choices.append(questionary.Separator())
|
||||||
|
choices.append({"name": "Return", "value": "skip"})
|
||||||
|
|
||||||
|
while True:
|
||||||
|
note_to_review = questionary.select(
|
||||||
|
"Select a new to view the diff.",
|
||||||
|
choices=choices,
|
||||||
|
use_shortcuts=False,
|
||||||
|
style=self.custom_style,
|
||||||
|
).ask()
|
||||||
|
if note_to_review is None or note_to_review == "skip":
|
||||||
|
break
|
||||||
|
changed_notes[note_to_review].print_diff()
|
||||||
|
|
||||||
|
def commit_changes(self) -> None:
|
||||||
|
"""Write all changes to disk."""
|
||||||
|
changed_notes = self.vault.get_changed_notes()
|
||||||
|
|
||||||
|
if len(changed_notes) == 0:
|
||||||
|
print("\n")
|
||||||
|
alerts.notice("No changes to commit.\n")
|
||||||
|
return
|
||||||
|
|
||||||
|
backup = questionary.confirm("Create backup before committing changes").ask()
|
||||||
|
if backup is None:
|
||||||
|
return
|
||||||
|
if backup:
|
||||||
|
self.vault.backup()
|
||||||
|
|
||||||
|
if questionary.confirm(f"Commit {len(changed_notes)} changed files to disk?").ask():
|
||||||
|
|
||||||
|
self.vault.write()
|
||||||
|
alerts.success("Changes committed to disk. Exiting.")
|
||||||
|
typer.Exit()
|
||||||
|
|
||||||
|
return
|
||||||
505
src/obsidian_metadata/models/metadata.py
Normal file
505
src/obsidian_metadata/models/metadata.py
Normal file
@@ -0,0 +1,505 @@
|
|||||||
|
"""Work with metadata items."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
from rich import print
|
||||||
|
from rich.columns import Columns
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.table import Table
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import (
|
||||||
|
clean_dictionary,
|
||||||
|
dict_contains,
|
||||||
|
dict_values_to_lists_strings,
|
||||||
|
remove_markdown_sections,
|
||||||
|
)
|
||||||
|
from obsidian_metadata.models import Patterns # isort: ignore
|
||||||
|
|
||||||
|
PATTERNS = Patterns()
|
||||||
|
INLINE_TAG_KEY: str = "Inline Tags"
|
||||||
|
|
||||||
|
|
||||||
|
class VaultMetadata:
|
||||||
|
"""Representation of all Metadata in the Vault."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.dict: dict[str, list[str]] = {}
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
"""Representation of all metadata."""
|
||||||
|
return str(self.dict)
|
||||||
|
|
||||||
|
def add_metadata(self, metadata: dict[str, list[str]]) -> None:
|
||||||
|
"""Add metadata to the vault. Takes a dictionary as input and merges it with the existing metadata. Does not overwrite existing keys.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
metadata (dict): Metadata to add.
|
||||||
|
"""
|
||||||
|
existing_metadata = self.dict
|
||||||
|
|
||||||
|
new_metadata = clean_dictionary(metadata)
|
||||||
|
|
||||||
|
for k, v in new_metadata.items():
|
||||||
|
if k in existing_metadata:
|
||||||
|
if isinstance(v, list):
|
||||||
|
existing_metadata[k].extend(v)
|
||||||
|
else:
|
||||||
|
existing_metadata[k] = v
|
||||||
|
|
||||||
|
for k, v in existing_metadata.items():
|
||||||
|
if isinstance(v, list):
|
||||||
|
existing_metadata[k] = sorted(set(v))
|
||||||
|
elif isinstance(v, dict):
|
||||||
|
for kk, vv in v.items():
|
||||||
|
if isinstance(vv, list):
|
||||||
|
v[kk] = sorted(set(vv))
|
||||||
|
|
||||||
|
self.dict = dict(sorted(existing_metadata.items()))
|
||||||
|
|
||||||
|
def print_keys(self) -> None:
|
||||||
|
"""Print all metadata keys."""
|
||||||
|
columns = Columns(
|
||||||
|
sorted(self.dict.keys()),
|
||||||
|
equal=True,
|
||||||
|
expand=True,
|
||||||
|
title="All metadata keys in Obsidian vault",
|
||||||
|
)
|
||||||
|
print(columns)
|
||||||
|
|
||||||
|
def print_tags(self) -> None:
|
||||||
|
"""Print all tags."""
|
||||||
|
columns = Columns(
|
||||||
|
sorted(self.dict["tags"]),
|
||||||
|
equal=True,
|
||||||
|
expand=True,
|
||||||
|
title="All tags in Obsidian vault",
|
||||||
|
)
|
||||||
|
print(columns)
|
||||||
|
|
||||||
|
def print_metadata(self) -> None:
|
||||||
|
"""Print all metadata."""
|
||||||
|
table = Table(show_footer=False, show_lines=True)
|
||||||
|
table.add_column("Keys")
|
||||||
|
table.add_column("Values")
|
||||||
|
for key, value in sorted(self.dict.items()):
|
||||||
|
values: str | dict[str, list[str]] = (
|
||||||
|
"\n".join(sorted(value)) if isinstance(value, list) else value
|
||||||
|
)
|
||||||
|
table.add_row(f"[bold]{key}[/]", str(values))
|
||||||
|
Console().print(table)
|
||||||
|
|
||||||
|
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a key and/or a value exists in the metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value (str, optional): Value to check.
|
||||||
|
is_regex (bool, optional): Use regex to check. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the key exists.
|
||||||
|
"""
|
||||||
|
return dict_contains(self.dict, key, value, is_regex)
|
||||||
|
|
||||||
|
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||||
|
"""Delete a key or a key's value from the metadata. Regex is supported to allow deleting more than one key or value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value_to_delete (str, optional): Value to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was deleted
|
||||||
|
"""
|
||||||
|
new_dict = self.dict.copy()
|
||||||
|
|
||||||
|
if value_to_delete is None:
|
||||||
|
for _k in list(new_dict):
|
||||||
|
if re.search(key, _k):
|
||||||
|
del new_dict[_k]
|
||||||
|
else:
|
||||||
|
for _k, _v in new_dict.items():
|
||||||
|
if re.search(key, _k):
|
||||||
|
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||||
|
new_dict[_k] = sorted(new_values)
|
||||||
|
|
||||||
|
if new_dict != self.dict:
|
||||||
|
self.dict = dict(new_dict)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||||
|
"""Replace a value in the frontmatter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||||
|
value_2 (str, Optional): New value.
|
||||||
|
bypass_check (bool, optional): Bypass the check if the key exists. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was renamed
|
||||||
|
"""
|
||||||
|
if value_2 is None:
|
||||||
|
if key in self.dict and value_1 not in self.dict:
|
||||||
|
self.dict[value_1] = self.dict.pop(key)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if key in self.dict and value_1 in self.dict[key]:
|
||||||
|
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
class Frontmatter:
|
||||||
|
"""Representation of frontmatter metadata."""
|
||||||
|
|
||||||
|
def __init__(self, file_content: str):
|
||||||
|
|
||||||
|
self.dict: dict[str, list[str]] = self._grab_note_frontmatter(file_content)
|
||||||
|
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
"""Representation of the frontmatter.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: frontmatter
|
||||||
|
"""
|
||||||
|
return f"Frontmatter(frontmatter={self.dict})"
|
||||||
|
|
||||||
|
def _grab_note_frontmatter(self, file_content: str) -> dict:
|
||||||
|
"""Grab metadata from a note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
note_path (Path): Path to the note file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict: Metadata from the note.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
frontmatter_block: str = PATTERNS.frontmatt_block_no_separators.search(
|
||||||
|
file_content
|
||||||
|
).group("frontmatter")
|
||||||
|
except AttributeError:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
yaml = YAML(typ="safe")
|
||||||
|
frontmatter: dict = yaml.load(frontmatter_block)
|
||||||
|
|
||||||
|
for k in frontmatter:
|
||||||
|
if frontmatter[k] is None:
|
||||||
|
frontmatter[k] = []
|
||||||
|
|
||||||
|
return dict_values_to_lists_strings(frontmatter, strip_null_values=True)
|
||||||
|
|
||||||
|
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a key or value exists in the metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value (str, optional): Value to check.
|
||||||
|
is_regex (bool, optional): Use regex to check. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the key exists.
|
||||||
|
"""
|
||||||
|
return dict_contains(self.dict, key, value, is_regex)
|
||||||
|
|
||||||
|
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||||
|
"""Replace a value in the frontmatter.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||||
|
value_2 (str, Optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was renamed
|
||||||
|
"""
|
||||||
|
if value_2 is None:
|
||||||
|
if key in self.dict and value_1 not in self.dict:
|
||||||
|
self.dict[value_1] = self.dict.pop(key)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if key in self.dict and value_1 in self.dict[key]:
|
||||||
|
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||||
|
"""Delete a value or key in the frontmatter. Regex is supported to allow deleting more than one key or value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): If no value, key to delete. If value, key containing the value.
|
||||||
|
value_to_delete (str, optional): Value to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was deleted
|
||||||
|
"""
|
||||||
|
new_dict = dict(self.dict)
|
||||||
|
|
||||||
|
if value_to_delete is None:
|
||||||
|
for _k in list(new_dict):
|
||||||
|
if re.search(key, _k):
|
||||||
|
del new_dict[_k]
|
||||||
|
else:
|
||||||
|
for _k, _v in new_dict.items():
|
||||||
|
if re.search(key, _k):
|
||||||
|
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||||
|
new_dict[_k] = sorted(new_values)
|
||||||
|
|
||||||
|
if new_dict != self.dict:
|
||||||
|
self.dict = dict(new_dict)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_changes(self) -> bool:
|
||||||
|
"""Check if the frontmatter has changes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the frontmatter has changes.
|
||||||
|
"""
|
||||||
|
return self.dict != self.dict_original
|
||||||
|
|
||||||
|
def to_yaml(self, sort_keys: bool = False) -> str:
|
||||||
|
"""Return the frontmatter as a YAML string.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Frontmatter as a YAML string.
|
||||||
|
sort_keys (bool, optional): Sort the keys. Defaults to False.
|
||||||
|
"""
|
||||||
|
dict_to_dump = self.dict.copy()
|
||||||
|
for k in dict_to_dump:
|
||||||
|
if dict_to_dump[k] == []:
|
||||||
|
dict_to_dump[k] = None
|
||||||
|
if isinstance(dict_to_dump[k], list) and len(dict_to_dump[k]) == 1:
|
||||||
|
new_val = dict_to_dump[k][0]
|
||||||
|
dict_to_dump[k] = new_val # type: ignore [assignment]
|
||||||
|
|
||||||
|
# Converting stream to string from https://stackoverflow.com/questions/47614862/best-way-to-use-ruamel-yaml-to-dump-yaml-to-string-not-to-stream/63179923#63179923
|
||||||
|
|
||||||
|
if sort_keys:
|
||||||
|
dict_to_dump = dict(sorted(dict_to_dump.items()))
|
||||||
|
|
||||||
|
yaml = YAML()
|
||||||
|
yaml.indent(mapping=2, sequence=4, offset=2)
|
||||||
|
string_stream = StringIO()
|
||||||
|
yaml.dump(dict_to_dump, string_stream)
|
||||||
|
yaml_value = string_stream.getvalue()
|
||||||
|
string_stream.close()
|
||||||
|
return yaml_value
|
||||||
|
|
||||||
|
|
||||||
|
class InlineMetadata:
|
||||||
|
"""Representation of inline metadata in the form of `key:: value`."""
|
||||||
|
|
||||||
|
def __init__(self, file_content: str):
|
||||||
|
|
||||||
|
self.dict: dict[str, list[str]] = self._grab_inline_metadata(file_content)
|
||||||
|
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
"""Representation of inline metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: inline metadata
|
||||||
|
"""
|
||||||
|
return f"InlineMetadata(inline_metadata={self.dict})"
|
||||||
|
|
||||||
|
def _grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
||||||
|
"""Grab inline metadata from a note.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: Inline metadata from the note.
|
||||||
|
"""
|
||||||
|
content = remove_markdown_sections(
|
||||||
|
file_content,
|
||||||
|
strip_codeblocks=True,
|
||||||
|
strip_inlinecode=True,
|
||||||
|
strip_frontmatter=True,
|
||||||
|
)
|
||||||
|
all_results = PATTERNS.find_inline_metadata.findall(content)
|
||||||
|
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||||
|
|
||||||
|
inline_metadata: dict[str, list[str]] = {}
|
||||||
|
for (k, v) in stripped_null_values:
|
||||||
|
if k in inline_metadata:
|
||||||
|
inline_metadata[k].append(str(v))
|
||||||
|
else:
|
||||||
|
inline_metadata[k] = [str(v)]
|
||||||
|
|
||||||
|
return clean_dictionary(inline_metadata)
|
||||||
|
|
||||||
|
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a key or value exists in the inline metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value (str, Optional): Value to check.
|
||||||
|
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the key exists.
|
||||||
|
"""
|
||||||
|
return dict_contains(self.dict, key, value, is_regex)
|
||||||
|
|
||||||
|
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||||
|
"""Replace a value in the inline metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check.
|
||||||
|
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||||
|
value_2 (str, Optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was renamed
|
||||||
|
"""
|
||||||
|
if value_2 is None:
|
||||||
|
if key in self.dict and value_1 not in self.dict:
|
||||||
|
self.dict[value_1] = self.dict.pop(key)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if key in self.dict and value_1 in self.dict[key]:
|
||||||
|
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||||
|
"""Delete a value or key in the inline metadata. Regex is supported to allow deleting more than one key or value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): If no value, key to delete. If value, key containing the value.
|
||||||
|
value_to_delete (str, optional): Value to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was deleted
|
||||||
|
"""
|
||||||
|
new_dict = dict(self.dict)
|
||||||
|
|
||||||
|
if value_to_delete is None:
|
||||||
|
for _k in list(new_dict):
|
||||||
|
if re.search(key, _k):
|
||||||
|
del new_dict[_k]
|
||||||
|
else:
|
||||||
|
for _k, _v in new_dict.items():
|
||||||
|
if re.search(key, _k):
|
||||||
|
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||||
|
new_dict[_k] = sorted(new_values)
|
||||||
|
|
||||||
|
if new_dict != self.dict:
|
||||||
|
self.dict = dict(new_dict)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_changes(self) -> bool:
|
||||||
|
"""Check if the metadata has changes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the metadata has changes.
|
||||||
|
"""
|
||||||
|
return self.dict != self.dict_original
|
||||||
|
|
||||||
|
|
||||||
|
class InlineTags:
|
||||||
|
"""Representation of inline tags."""
|
||||||
|
|
||||||
|
def __init__(self, file_content: str):
|
||||||
|
|
||||||
|
self.metadata_key = INLINE_TAG_KEY
|
||||||
|
self.list: list[str] = self._grab_inline_tags(file_content)
|
||||||
|
self.list_original: list[str] = self.list.copy()
|
||||||
|
|
||||||
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
|
"""Representation of the inline tags.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: inline tags
|
||||||
|
"""
|
||||||
|
return f"InlineTags(tags={self.list})"
|
||||||
|
|
||||||
|
def _grab_inline_tags(self, file_content: str) -> list[str]:
|
||||||
|
"""Grab inline tags from a note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_content (str): Total contents of the note file (frontmatter and content).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: Inline tags from the note.
|
||||||
|
"""
|
||||||
|
return sorted(
|
||||||
|
PATTERNS.find_inline_tags.findall(
|
||||||
|
remove_markdown_sections(
|
||||||
|
file_content,
|
||||||
|
strip_codeblocks=True,
|
||||||
|
strip_inlinecode=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def contains(self, tag: str, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a tag exists in the metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag (str): Tag to check.
|
||||||
|
is_regex (bool, optional): If True, tag is treated as regex. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the tag exists.
|
||||||
|
"""
|
||||||
|
if is_regex is True:
|
||||||
|
return any(re.search(tag, _t) for _t in self.list)
|
||||||
|
|
||||||
|
if tag in self.list:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def rename(self, old_tag: str, new_tag: str) -> bool:
|
||||||
|
"""Replace an inline tag with another string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
old_tag (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||||
|
new_tag (str, Optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was renamed
|
||||||
|
"""
|
||||||
|
if old_tag in self.list:
|
||||||
|
self.list = sorted([new_tag if i == old_tag else i for i in self.list])
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete(self, tag_to_delete: str) -> bool:
|
||||||
|
"""Delete a specified inline tag. Regex is supported to allow deleting more than one tag.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag_to_delete (str, optional): Value to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if a value was deleted
|
||||||
|
"""
|
||||||
|
new_list = sorted([x for x in self.list if re.search(tag_to_delete, x) is None])
|
||||||
|
|
||||||
|
if new_list != self.list:
|
||||||
|
self.list = new_list
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_changes(self) -> bool:
|
||||||
|
"""Check if the metadata has changes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the metadata has changes.
|
||||||
|
"""
|
||||||
|
return self.list != self.list_original
|
||||||
367
src/obsidian_metadata/models/notes.py
Normal file
367
src/obsidian_metadata/models/notes.py
Normal file
@@ -0,0 +1,367 @@
|
|||||||
|
"""Representation of notes and in the vault."""
|
||||||
|
|
||||||
|
|
||||||
|
import difflib
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import rich.repr
|
||||||
|
import typer
|
||||||
|
from rich import print
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import alerts
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
from obsidian_metadata.models import (
|
||||||
|
Frontmatter,
|
||||||
|
InlineMetadata,
|
||||||
|
InlineTags,
|
||||||
|
Patterns,
|
||||||
|
)
|
||||||
|
|
||||||
|
PATTERNS = Patterns()
|
||||||
|
|
||||||
|
|
||||||
|
@rich.repr.auto
|
||||||
|
class Note:
|
||||||
|
"""Representation of a note in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
note_path (Path): Path to the note file.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
note_path (Path): Path to the note file.
|
||||||
|
dry_run (bool): Whether to run in dry-run mode.
|
||||||
|
file_content (str): Total contents of the note file (frontmatter and content).
|
||||||
|
frontmatter (dict): Frontmatter of the note.
|
||||||
|
inline_tags (list): List of inline tags in the note.
|
||||||
|
inline_metadata (dict): Dictionary of inline metadata in the note.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, note_path: Path, dry_run: bool = False):
|
||||||
|
log.trace(f"Creating Note object for {note_path}")
|
||||||
|
self.note_path: Path = Path(note_path)
|
||||||
|
self.dry_run: bool = dry_run
|
||||||
|
|
||||||
|
try:
|
||||||
|
with self.note_path.open():
|
||||||
|
self.file_content: str = self.note_path.read_text()
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
alerts.error(f"Note {self.note_path} not found. Exiting")
|
||||||
|
raise typer.Exit(code=1) from e
|
||||||
|
|
||||||
|
self.frontmatter: Frontmatter = Frontmatter(self.file_content)
|
||||||
|
self.inline_tags: InlineTags = InlineTags(self.file_content)
|
||||||
|
self.inline_metadata: InlineMetadata = InlineMetadata(self.file_content)
|
||||||
|
self.original_file_content: str = self.file_content
|
||||||
|
|
||||||
|
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||||
|
"""Define rich representation of Vault."""
|
||||||
|
yield "note_path", self.note_path
|
||||||
|
yield "dry_run", self.dry_run
|
||||||
|
yield "frontmatter", self.frontmatter
|
||||||
|
yield "inline_tags", self.inline_tags
|
||||||
|
yield "inline_metadata", self.inline_metadata
|
||||||
|
|
||||||
|
def append(self, string_to_append: str, allow_multiple: bool = False) -> None:
|
||||||
|
"""Appends a string to the end of a note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
string_to_append (str): String to append to the note.
|
||||||
|
allow_multiple (bool): Whether to allow appending the string if it already exists in the note.
|
||||||
|
"""
|
||||||
|
if allow_multiple:
|
||||||
|
self.file_content += f"\n{string_to_append}"
|
||||||
|
else:
|
||||||
|
if len(re.findall(re.escape(string_to_append), self.file_content)) == 0:
|
||||||
|
self.file_content += f"\n{string_to_append}"
|
||||||
|
|
||||||
|
def commit_changes(self) -> None:
|
||||||
|
"""Commits changes to the note to disk."""
|
||||||
|
# TODO: rewrite frontmatter if it has changed
|
||||||
|
pass
|
||||||
|
|
||||||
|
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a note contains the specified inline tag.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag (str): Tag to check for.
|
||||||
|
is_regex (bool, optional): Whether to use regex to match the tag.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the note has inline tags.
|
||||||
|
"""
|
||||||
|
return self.inline_tags.contains(tag, is_regex=is_regex)
|
||||||
|
|
||||||
|
def contains_metadata(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if a note has a key or a key-value pair in its metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check for.
|
||||||
|
value (str, optional): Value to check for.
|
||||||
|
is_regex (bool, optional): Whether to use regex to match the key/value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the note contains the key or key-value pair.
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
if self.frontmatter.contains(key, is_regex=is_regex) or self.inline_metadata.contains(
|
||||||
|
key, is_regex=is_regex
|
||||||
|
):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if self.frontmatter.contains(
|
||||||
|
key, value, is_regex=is_regex
|
||||||
|
) or self.inline_metadata.contains(key, value, is_regex=is_regex):
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _delete_inline_metadata(self, key: str, value: str = None) -> None:
|
||||||
|
"""Deletes an inline metadata key/value pair from the text of the note. This method does not remove the key/value from the metadata attribute of the note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to delete.
|
||||||
|
value (str, optional): Value to delete.
|
||||||
|
"""
|
||||||
|
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||||
|
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||||
|
|
||||||
|
for (_k, _v) in stripped_null_values:
|
||||||
|
if re.search(key, _k):
|
||||||
|
if value is None:
|
||||||
|
_k = re.escape(_k)
|
||||||
|
_v = re.escape(_v)
|
||||||
|
self.sub(rf"\[?{_k}:: ?{_v}]?", "", is_regex=True)
|
||||||
|
return
|
||||||
|
|
||||||
|
if re.search(value, _v):
|
||||||
|
_k = re.escape(_k)
|
||||||
|
_v = re.escape(_v)
|
||||||
|
self.sub(rf"({_k}::) ?{_v}", r"\1", is_regex=True)
|
||||||
|
|
||||||
|
def delete_inline_tag(self, tag: str) -> bool:
|
||||||
|
"""Deletes an inline tag from the `inline_tags` attribute AND removes the tag from the text of the note if it exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag (str): Tag to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the tag was deleted.
|
||||||
|
"""
|
||||||
|
new_list = self.inline_tags.list.copy()
|
||||||
|
|
||||||
|
for _t in new_list:
|
||||||
|
if re.search(tag, _t):
|
||||||
|
_t = re.escape(_t)
|
||||||
|
self.sub(rf"#{_t}([ \|,;:\*\(\)\[\]\\\.\n#&])", r"\1", is_regex=True)
|
||||||
|
self.inline_tags.delete(tag)
|
||||||
|
|
||||||
|
if new_list != self.inline_tags.list:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete_metadata(self, key: str, value: str = None) -> bool:
|
||||||
|
"""Deletes a key or key-value pair from the note's metadata. Regex is supported.
|
||||||
|
|
||||||
|
If no value is provided, will delete an entire key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to delete.
|
||||||
|
value (str, optional): Value to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the key or key-value pair was deleted.
|
||||||
|
"""
|
||||||
|
changed_value: bool = False
|
||||||
|
|
||||||
|
if value is None:
|
||||||
|
if self.frontmatter.delete(key):
|
||||||
|
self.replace_frontmatter()
|
||||||
|
changed_value = True
|
||||||
|
if self.inline_metadata.delete(key):
|
||||||
|
self._delete_inline_metadata(key, value)
|
||||||
|
changed_value = True
|
||||||
|
else:
|
||||||
|
if self.frontmatter.delete(key, value):
|
||||||
|
self.replace_frontmatter()
|
||||||
|
changed_value = True
|
||||||
|
if self.inline_metadata.delete(key, value):
|
||||||
|
self._delete_inline_metadata(key, value)
|
||||||
|
changed_value = True
|
||||||
|
|
||||||
|
if changed_value:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def has_changes(self) -> bool:
|
||||||
|
"""Checks if the note has been updated.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the note has been updated.
|
||||||
|
"""
|
||||||
|
if self.frontmatter.has_changes():
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.inline_tags.has_changes():
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.inline_metadata.has_changes():
|
||||||
|
return True
|
||||||
|
|
||||||
|
if self.file_content != self.original_file_content:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def print_note(self) -> None:
|
||||||
|
"""Prints the note to the console."""
|
||||||
|
print(self.file_content)
|
||||||
|
|
||||||
|
def print_diff(self) -> None:
|
||||||
|
"""Prints a diff of the note's original state and it's new state."""
|
||||||
|
a = self.original_file_content.splitlines()
|
||||||
|
b = self.file_content.splitlines()
|
||||||
|
|
||||||
|
diff = difflib.Differ()
|
||||||
|
result = list(diff.compare(a, b))
|
||||||
|
for line in result:
|
||||||
|
if line.startswith("+"):
|
||||||
|
print(f"[green]{line}[/]")
|
||||||
|
elif line.startswith("-"):
|
||||||
|
print(f"[red]{line}[/]")
|
||||||
|
|
||||||
|
def sub(self, pattern: str, replacement: str, is_regex: bool = False) -> None:
|
||||||
|
"""Substitutes text within the note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
pattern (str): The pattern to replace (plain text or regular expression).
|
||||||
|
replacement (str): What to replace the pattern with.
|
||||||
|
is_regex (bool): Whether the pattern is a regex pattern or plain text.
|
||||||
|
"""
|
||||||
|
if not is_regex:
|
||||||
|
pattern = re.escape(pattern)
|
||||||
|
|
||||||
|
self.file_content = re.sub(pattern, replacement, self.file_content, re.MULTILINE)
|
||||||
|
|
||||||
|
def _rename_inline_metadata(self, key: str, value_1: str, value_2: str = None) -> None:
|
||||||
|
"""Replaces the inline metadata in the note with the current inline metadata object.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to rename.
|
||||||
|
value_1 (str): Value to replace OR new key name (if value_2 is None).
|
||||||
|
value_2 (str, optional): New value.
|
||||||
|
|
||||||
|
"""
|
||||||
|
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||||
|
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||||
|
|
||||||
|
for (_k, _v) in stripped_null_values:
|
||||||
|
if re.search(key, _k):
|
||||||
|
if value_2 is None:
|
||||||
|
if re.search(rf"{key}[^\w\d_-]+", _k):
|
||||||
|
key_text = re.split(r"[^\w\d_-]+$", _k)[0]
|
||||||
|
key_markdown = re.split(r"^[\w\d_-]+", _k)[1]
|
||||||
|
self.sub(
|
||||||
|
rf"{key_text}{key_markdown}::",
|
||||||
|
rf"{value_1}{key_markdown}::",
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.sub(f"{_k}::", f"{value_1}::")
|
||||||
|
else:
|
||||||
|
if re.search(key, _k) and re.search(value_1, _v):
|
||||||
|
_k = re.escape(_k)
|
||||||
|
_v = re.escape(_v)
|
||||||
|
self.sub(f"{_k}:: ?{_v}", f"{_k}:: {value_2}", is_regex=True)
|
||||||
|
|
||||||
|
def rename_inline_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||||
|
"""Renames an inline tag from the note ONLY if it's not in the metadata as well.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag_1 (str): Tag to rename.
|
||||||
|
tag_2 (str): New tag name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the tag was renamed.
|
||||||
|
"""
|
||||||
|
if tag_1 in self.inline_tags.list:
|
||||||
|
self.sub(
|
||||||
|
rf"#{tag_1}([ \|,;:\*\(\)\[\]\\\.\n#&])",
|
||||||
|
rf"#{tag_2}\1",
|
||||||
|
is_regex=True,
|
||||||
|
)
|
||||||
|
self.inline_tags.rename(tag_1, tag_2)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||||
|
"""Renames a key or key-value pair in the note's metadata.
|
||||||
|
|
||||||
|
If no value is provided, will rename an entire key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to rename.
|
||||||
|
value_1 (str): Value to rename or new name of key if no value_2 is provided.
|
||||||
|
value_2 (str, optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the note was updated.
|
||||||
|
"""
|
||||||
|
changed_value: bool = False
|
||||||
|
if value_2 is None:
|
||||||
|
if self.frontmatter.rename(key, value_1):
|
||||||
|
self.replace_frontmatter()
|
||||||
|
changed_value = True
|
||||||
|
if self.inline_metadata.rename(key, value_1):
|
||||||
|
self._rename_inline_metadata(key, value_1)
|
||||||
|
changed_value = True
|
||||||
|
else:
|
||||||
|
if self.frontmatter.rename(key, value_1, value_2):
|
||||||
|
self.replace_frontmatter()
|
||||||
|
changed_value = True
|
||||||
|
if self.inline_metadata.rename(key, value_1, value_2):
|
||||||
|
self._rename_inline_metadata(key, value_1, value_2)
|
||||||
|
changed_value = True
|
||||||
|
|
||||||
|
if changed_value:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def replace_frontmatter(self, sort_keys: bool = False) -> None:
|
||||||
|
"""Replaces the frontmatter in the note with the current frontmatter object."""
|
||||||
|
try:
|
||||||
|
current_frontmatter = PATTERNS.frontmatt_block_with_separators.search(
|
||||||
|
self.file_content
|
||||||
|
).group("frontmatter")
|
||||||
|
except AttributeError:
|
||||||
|
current_frontmatter = None
|
||||||
|
|
||||||
|
if current_frontmatter is None and self.frontmatter.dict == {}:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_frontmatter = self.frontmatter.to_yaml(sort_keys=sort_keys)
|
||||||
|
new_frontmatter = f"---\n{new_frontmatter}---\n"
|
||||||
|
|
||||||
|
if current_frontmatter is None:
|
||||||
|
self.file_content = new_frontmatter + self.file_content
|
||||||
|
return
|
||||||
|
|
||||||
|
self.sub(current_frontmatter, new_frontmatter)
|
||||||
|
|
||||||
|
def write(self, path: Path | None = None) -> None:
|
||||||
|
"""Writes the note's content to disk.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (Path): Path to write the note to. Defaults to the note's path.
|
||||||
|
"""
|
||||||
|
p = self.note_path if path is None else path
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(p, "w") as f:
|
||||||
|
log.trace(f"Writing note {p} to disk")
|
||||||
|
f.write(self.file_content)
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
alerts.error(f"Note {p} not found. Exiting")
|
||||||
|
raise typer.Exit(code=1) from e
|
||||||
41
src/obsidian_metadata/models/patterns.py
Normal file
41
src/obsidian_metadata/models/patterns.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
"""Regexes for parsing frontmatter and note content."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from typing import Pattern
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Patterns:
|
||||||
|
"""Regex patterns for parsing frontmatter and note content."""
|
||||||
|
|
||||||
|
find_inline_tags: Pattern[str] = re.compile(
|
||||||
|
r"""
|
||||||
|
(?:^|[ \|_,;:\*\(\)\[\]\\\.]) # Before tag is start of line or separator
|
||||||
|
\#([^ \|,;:\*\(\)\[\]\\\.\n#&]+) # Match tag until separator or end of line
|
||||||
|
""",
|
||||||
|
re.MULTILINE | re.X,
|
||||||
|
)
|
||||||
|
|
||||||
|
frontmatt_block_with_separators: Pattern[str] = re.compile(
|
||||||
|
r"^\s*(?P<frontmatter>---.*?---)", flags=re.DOTALL
|
||||||
|
)
|
||||||
|
frontmatt_block_no_separators: Pattern[str] = re.compile(
|
||||||
|
r"^\s*---(?P<frontmatter>.*?)---", flags=re.DOTALL
|
||||||
|
)
|
||||||
|
# This pattern will return a tuple of 4 values, two will be empty and will need to be stripped before processing further
|
||||||
|
find_inline_metadata: Pattern[str] = re.compile(
|
||||||
|
r""" # First look for in-text key values
|
||||||
|
(?:^\[| \[) # Find key with starting bracket
|
||||||
|
([-_\w\d\/\*\u263a-\U0001f645]+?)::[ ]? # Find key
|
||||||
|
(.*?)\] # Find value until closing bracket
|
||||||
|
| # Else look for key values at start of line
|
||||||
|
(?:^|[^ \w\d]+| \[) # Any non-word or non-digit character
|
||||||
|
([-_\w\d\/\*\u263a-\U0001f645]+?)::(?!\n)(?:[ ](?!\n))? # Capture the key if not a new line
|
||||||
|
(.*?)$ # Capture the value
|
||||||
|
""",
|
||||||
|
re.X | re.MULTILINE,
|
||||||
|
)
|
||||||
|
|
||||||
|
validate_tag_text: Pattern[str] = re.compile(r"[ \|,;:\*\(\)\[\]\\\.\n#&]")
|
||||||
|
validate_key_text: Pattern[str] = re.compile(r"[^-_\w\d\/\*\u263a-\U0001f645]")
|
||||||
302
src/obsidian_metadata/models/vault.py
Normal file
302
src/obsidian_metadata/models/vault.py
Normal file
@@ -0,0 +1,302 @@
|
|||||||
|
"""Obsidian vault representation."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import rich.repr
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.progress import Progress, SpinnerColumn, TextColumn
|
||||||
|
from rich.prompt import Confirm
|
||||||
|
from rich.table import Table
|
||||||
|
|
||||||
|
from obsidian_metadata._config import Config
|
||||||
|
from obsidian_metadata._utils import alerts
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
from obsidian_metadata.models import Note, VaultMetadata
|
||||||
|
|
||||||
|
|
||||||
|
@rich.repr.auto
|
||||||
|
class Vault:
|
||||||
|
"""Representation of the Obsidian vault.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
vault (Path): Path to the vault.
|
||||||
|
dry_run (bool): Whether to perform a dry run.
|
||||||
|
backup_path (Path): Path to the backup of the vault.
|
||||||
|
new_vault (Path): Path to a new vault.
|
||||||
|
notes (list[Note]): List of all notes in the vault.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, config: Config, dry_run: bool = False, path_filter: str = None):
|
||||||
|
self.vault_path: Path = config.vault_path
|
||||||
|
self.dry_run: bool = dry_run
|
||||||
|
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
||||||
|
self.new_vault_path: Path = self.vault_path.parent / f"{self.vault_path.name}.new"
|
||||||
|
self.exclude_paths: list[Path] = []
|
||||||
|
self.metadata = VaultMetadata()
|
||||||
|
for p in config.exclude_paths:
|
||||||
|
self.exclude_paths.append(Path(self.vault_path / p))
|
||||||
|
|
||||||
|
self.path_filter = path_filter
|
||||||
|
self.note_paths = self._find_markdown_notes(path_filter)
|
||||||
|
|
||||||
|
with Progress(
|
||||||
|
SpinnerColumn(),
|
||||||
|
TextColumn("[progress.description]{task.description}"),
|
||||||
|
transient=True,
|
||||||
|
) as progress:
|
||||||
|
progress.add_task(description="Processing notes...", total=None)
|
||||||
|
self.notes: list[Note] = [
|
||||||
|
Note(note_path=p, dry_run=self.dry_run) for p in self.note_paths
|
||||||
|
]
|
||||||
|
for _note in self.notes:
|
||||||
|
self.metadata.add_metadata(_note.frontmatter.dict)
|
||||||
|
self.metadata.add_metadata(_note.inline_metadata.dict)
|
||||||
|
self.metadata.add_metadata({_note.inline_tags.metadata_key: _note.inline_tags.list})
|
||||||
|
|
||||||
|
def __rich_repr__(self) -> rich.repr.Result:
|
||||||
|
"""Define rich representation of Vault."""
|
||||||
|
yield "vault_path", self.vault_path
|
||||||
|
yield "dry_run", self.dry_run
|
||||||
|
yield "backup_path", self.backup_path
|
||||||
|
yield "new_vault", self.new_vault_path
|
||||||
|
yield "num_notes", self.num_notes()
|
||||||
|
yield "exclude_paths", self.exclude_paths
|
||||||
|
|
||||||
|
def _find_markdown_notes(self, path_filter: str = None) -> list[Path]:
|
||||||
|
"""Build list of all markdown files in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path_filter (str, optional): Regex to filter notes by path.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Path]: List of paths to all matching files in the vault.
|
||||||
|
|
||||||
|
"""
|
||||||
|
notes_list = [
|
||||||
|
p.resolve()
|
||||||
|
for p in self.vault_path.glob("**/*")
|
||||||
|
if p.suffix in [".md", ".MD", ".markdown", ".MARKDOWN"]
|
||||||
|
and not any(item in p.parents for item in self.exclude_paths)
|
||||||
|
]
|
||||||
|
|
||||||
|
if path_filter is not None:
|
||||||
|
notes_list = [
|
||||||
|
p for p in notes_list if re.search(path_filter, str(p.relative_to(self.vault_path)))
|
||||||
|
]
|
||||||
|
|
||||||
|
return notes_list
|
||||||
|
|
||||||
|
def backup(self) -> None:
|
||||||
|
"""Backup the vault."""
|
||||||
|
log.debug("Backing up vault")
|
||||||
|
if self.dry_run:
|
||||||
|
alerts.dryrun(f"Backup up vault to: {self.backup_path}")
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
shutil.copytree(self.vault_path, self.backup_path)
|
||||||
|
|
||||||
|
except FileExistsError: # pragma: no cover
|
||||||
|
log.debug("Backup already exists")
|
||||||
|
if not Confirm.ask("Vault backup already exists. Overwrite?"):
|
||||||
|
alerts.info("Exiting backup not overwritten.")
|
||||||
|
return
|
||||||
|
|
||||||
|
log.debug("Overwriting backup")
|
||||||
|
shutil.rmtree(self.backup_path)
|
||||||
|
shutil.copytree(self.vault_path, self.backup_path)
|
||||||
|
|
||||||
|
alerts.success(f"Vault backed up to: {self.backup_path}")
|
||||||
|
|
||||||
|
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if vault contains the given inline tag.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag (str): Tag to check for.
|
||||||
|
is_regex (bool, optional): Whether to use regex to match tag.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if tag is found in vault.
|
||||||
|
"""
|
||||||
|
return any(_note.contains_inline_tag(tag) for _note in self.notes)
|
||||||
|
|
||||||
|
def contains_metadata(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
|
"""Check if vault contains the given metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to check for. If value is None, will check vault for key.
|
||||||
|
value (str, optional): Value to check for.
|
||||||
|
is_regex (bool, optional): Whether to use regex to match key/value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if tag is found in vault.
|
||||||
|
"""
|
||||||
|
if value is None:
|
||||||
|
return self.metadata.contains(key, is_regex=is_regex)
|
||||||
|
|
||||||
|
return self.metadata.contains(key, value, is_regex=is_regex)
|
||||||
|
|
||||||
|
def delete_backup(self) -> None:
|
||||||
|
"""Delete the vault backup."""
|
||||||
|
log.debug("Deleting vault backup")
|
||||||
|
if self.backup_path.exists() and self.dry_run is False:
|
||||||
|
shutil.rmtree(self.backup_path)
|
||||||
|
alerts.success("Backup deleted")
|
||||||
|
elif self.backup_path.exists() and self.dry_run is True:
|
||||||
|
alerts.dryrun("Delete backup")
|
||||||
|
else:
|
||||||
|
alerts.info("No backup found")
|
||||||
|
|
||||||
|
def delete_inline_tag(self, tag: str) -> bool:
|
||||||
|
"""Delete an inline tag in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tag (str): Tag to delete.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if tag was deleted.
|
||||||
|
"""
|
||||||
|
changes = False
|
||||||
|
|
||||||
|
for _note in self.notes:
|
||||||
|
if _note.delete_inline_tag(tag):
|
||||||
|
changes = True
|
||||||
|
|
||||||
|
if changes:
|
||||||
|
self.metadata.delete(self.notes[0].inline_tags.metadata_key, tag)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def delete_metadata(self, key: str, value: str = None) -> int:
|
||||||
|
"""Delete metadata in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to delete. Regex is supported
|
||||||
|
value (str, optional): Value to delete. Regex is supported
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of notes that had metadata deleted.
|
||||||
|
"""
|
||||||
|
num_changed = 0
|
||||||
|
|
||||||
|
for _note in self.notes:
|
||||||
|
if _note.delete_metadata(key, value):
|
||||||
|
num_changed += 1
|
||||||
|
|
||||||
|
if num_changed > 0:
|
||||||
|
self.metadata.delete(key, value)
|
||||||
|
return num_changed
|
||||||
|
return num_changed
|
||||||
|
|
||||||
|
def get_changed_notes(self) -> list[Note]:
|
||||||
|
"""Returns a list of notes that have changes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Note]: List of notes that have changes.
|
||||||
|
"""
|
||||||
|
changed_notes = []
|
||||||
|
for _note in self.notes:
|
||||||
|
if _note.has_changes():
|
||||||
|
changed_notes.append(_note)
|
||||||
|
|
||||||
|
changed_notes = sorted(changed_notes, key=lambda x: x.note_path)
|
||||||
|
return changed_notes
|
||||||
|
|
||||||
|
def info(self) -> None:
|
||||||
|
"""Print information about the vault."""
|
||||||
|
log.debug("Printing vault info")
|
||||||
|
table = Table(title="Vault Info", show_header=False)
|
||||||
|
table.add_row("Vault", str(self.vault_path))
|
||||||
|
table.add_row("Notes being edited", str(self.num_notes()))
|
||||||
|
table.add_row("Notes excluded from editing", str(self.num_excluded_notes()))
|
||||||
|
if self.backup_path.exists():
|
||||||
|
table.add_row("Backup path", str(self.backup_path))
|
||||||
|
else:
|
||||||
|
table.add_row("Backup", "None")
|
||||||
|
table.add_row("Active path filter", str(self.path_filter))
|
||||||
|
table.add_row("Notes with updates", str(len(self.get_changed_notes())))
|
||||||
|
|
||||||
|
Console().print(table)
|
||||||
|
|
||||||
|
def list_editable_notes(self) -> None:
|
||||||
|
"""Print a list of notes within the scope that are being edited."""
|
||||||
|
for _note in self.notes:
|
||||||
|
print(_note.note_path.relative_to(self.vault_path))
|
||||||
|
|
||||||
|
def num_excluded_notes(self) -> int:
|
||||||
|
"""Count number of excluded notes."""
|
||||||
|
excluded_notes = [
|
||||||
|
p.resolve()
|
||||||
|
for p in self.vault_path.glob("**/*")
|
||||||
|
if p.suffix in [".md", ".MD", ".markdown", ".MARKDOWN"] and p not in self.note_paths
|
||||||
|
]
|
||||||
|
return len(excluded_notes)
|
||||||
|
|
||||||
|
def num_notes(self) -> int:
|
||||||
|
"""Number of notes in the vault.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of notes in the vault.
|
||||||
|
"""
|
||||||
|
return len(self.notes)
|
||||||
|
|
||||||
|
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||||
|
"""Renames a key or key-value pair in the note's metadata.
|
||||||
|
|
||||||
|
If no value is provided, will rename an entire key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to rename.
|
||||||
|
value_1 (str): Value to rename or new name of key if no value_2 is provided.
|
||||||
|
value_2 (str, optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if metadata was renamed.
|
||||||
|
"""
|
||||||
|
changes = False
|
||||||
|
for _note in self.notes:
|
||||||
|
if _note.rename_metadata(key, value_1, value_2):
|
||||||
|
changes = True
|
||||||
|
|
||||||
|
if changes:
|
||||||
|
self.metadata.rename(key, value_1, value_2)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def rename_inline_tag(self, old_tag: str, new_tag: str) -> bool:
|
||||||
|
"""Rename an inline tag in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
old_tag (str): Old tag name.
|
||||||
|
new_tag (str): New tag name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if tag was renamed.
|
||||||
|
"""
|
||||||
|
changes = False
|
||||||
|
for _note in self.notes:
|
||||||
|
if _note.rename_inline_tag(old_tag, new_tag):
|
||||||
|
changes = True
|
||||||
|
|
||||||
|
if changes:
|
||||||
|
self.metadata.rename(self.notes[0].inline_tags.metadata_key, old_tag, new_tag)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def write(self, new_vault: bool = False) -> None:
|
||||||
|
"""Write changes to the vault."""
|
||||||
|
log.debug("Writing changes to vault...")
|
||||||
|
if new_vault:
|
||||||
|
log.debug("Writing changes to backup")
|
||||||
|
for _note in self.notes:
|
||||||
|
_new_note_path: Path = Path(
|
||||||
|
self.new_vault_path / Path(_note.note_path).relative_to(self.vault_path)
|
||||||
|
)
|
||||||
|
log.debug(f"writing to {_new_note_path}")
|
||||||
|
_note.write(path=_new_note_path)
|
||||||
|
else:
|
||||||
|
for _note in self.notes:
|
||||||
|
log.debug(f"writing to {_note.note_path}")
|
||||||
|
_note.write()
|
||||||
1
tests/__init__.py
Normal file
1
tests/__init__.py
Normal file
@@ -0,0 +1 @@
|
|||||||
|
"""obsidian-metadata test suite."""
|
||||||
155
tests/alerts_test.py
Normal file
155
tests/alerts_test.py
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test alerts and logging."""
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import alerts
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
|
||||||
|
def test_dryrun(capsys):
|
||||||
|
"""Test dry run."""
|
||||||
|
alerts.dryrun("This prints in dry run")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "DRYRUN | This prints in dry run\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_success(capsys):
|
||||||
|
"""Test success."""
|
||||||
|
alerts.success("This prints in success")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "SUCCESS | This prints in success\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_error(capsys):
|
||||||
|
"""Test success."""
|
||||||
|
alerts.error("This prints in error")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "ERROR | This prints in error\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_warning(capsys):
|
||||||
|
"""Test warning."""
|
||||||
|
alerts.warning("This prints in warning")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "WARNING | This prints in warning\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_notice(capsys):
|
||||||
|
"""Test notice."""
|
||||||
|
alerts.notice("This prints in notice")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "NOTICE | This prints in notice\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_info(capsys):
|
||||||
|
"""Test info."""
|
||||||
|
alerts.info("This prints in info")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "INFO | This prints in info\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_dim(capsys):
|
||||||
|
"""Test info."""
|
||||||
|
alerts.dim("This prints in dim")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "This prints in dim\n"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("verbosity", "log_to_file"),
|
||||||
|
[(0, False), (1, False), (2, True), (3, True)],
|
||||||
|
)
|
||||||
|
def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
||||||
|
"""Test logging."""
|
||||||
|
tmp_log = tmp_path / "tmp.log"
|
||||||
|
logging = alerts.LoggerManager(
|
||||||
|
log_file=tmp_log,
|
||||||
|
verbosity=verbosity,
|
||||||
|
log_to_file=log_to_file,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert logging.verbosity == verbosity
|
||||||
|
|
||||||
|
if verbosity >= 3:
|
||||||
|
assert logging.is_trace() is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
assert logging.is_trace("trace text") is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "trace text\n"
|
||||||
|
|
||||||
|
log.trace("This is Trace logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.err == Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
||||||
|
else:
|
||||||
|
assert logging.is_trace("trace text") is False
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out != "trace text\n"
|
||||||
|
|
||||||
|
log.trace("This is Trace logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.err != Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
||||||
|
|
||||||
|
if verbosity >= 2:
|
||||||
|
assert logging.is_debug() is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
assert logging.is_debug("debug text") is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "debug text\n"
|
||||||
|
|
||||||
|
log.debug("This is Debug logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.err == Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
||||||
|
else:
|
||||||
|
assert logging.is_debug("debug text") is False
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out != "debug text\n"
|
||||||
|
|
||||||
|
log.debug("This is Debug logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.err != Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
||||||
|
|
||||||
|
if verbosity >= 1:
|
||||||
|
assert logging.is_info() is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
assert logging.is_info("info text") is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "info text\n"
|
||||||
|
|
||||||
|
log.info("This is Info logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.err == "INFO | This is Info logging\n"
|
||||||
|
else:
|
||||||
|
assert logging.is_info("info text") is False
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out != "info text\n"
|
||||||
|
|
||||||
|
log.info("This is Info logging")
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
assert logging.is_default() is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
assert logging.is_default("default text") is True
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == "default text\n"
|
||||||
|
|
||||||
|
if log_to_file:
|
||||||
|
assert tmp_log.exists() is True
|
||||||
|
log_file_content = tmp_log.read_text()
|
||||||
|
assert log_file_content == Regex(
|
||||||
|
r"^\d{4}-\d{2}-\d{2} \d+:\d+:\d+\.\d+ \| DEBUG \| [\w\.:]+:\d+ \- Logging to file:",
|
||||||
|
re.MULTILINE,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
assert tmp_log.exists() is False
|
||||||
16
tests/cli_test.py
Normal file
16
tests/cli_test.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test obsidian-metadata CLI."""
|
||||||
|
|
||||||
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
|
from obsidian_metadata.cli import app
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
runner = CliRunner()
|
||||||
|
|
||||||
|
|
||||||
|
def test_version() -> None:
|
||||||
|
"""Test printing version and then exiting."""
|
||||||
|
result = runner.invoke(app, ["--version"])
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.output == Regex(r"obsidian_metadata: v\d+\.\d+\.\d+$")
|
||||||
28
tests/config_test.py
Normal file
28
tests/config_test.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Tests for the configuration module."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from obsidian_metadata._config import Config
|
||||||
|
|
||||||
|
|
||||||
|
def test_first_run(tmp_path):
|
||||||
|
"""Test creating a config on first run."""
|
||||||
|
config_file = Path(tmp_path / "config.toml")
|
||||||
|
vault_path = Path(tmp_path / "vault/")
|
||||||
|
vault_path.mkdir()
|
||||||
|
|
||||||
|
config = Config(config_path=config_file, vault_path=vault_path)
|
||||||
|
|
||||||
|
assert config_file.exists() is True
|
||||||
|
config.write_config_value("vault", str(vault_path))
|
||||||
|
content = config_file.read_text()
|
||||||
|
assert config.vault_path == vault_path
|
||||||
|
assert re.search(str(vault_path), content) is not None
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_config():
|
||||||
|
"""Test parsing a config file."""
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=None)
|
||||||
|
assert config.vault_path == Path(Path.cwd() / "tests/fixtures/test_vault")
|
||||||
74
tests/conftest.py
Normal file
74
tests/conftest.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Fixtures for tests."""
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
|
||||||
|
def remove_all(root: Path):
|
||||||
|
"""Remove all files and directories in a directory."""
|
||||||
|
for path in root.iterdir():
|
||||||
|
if path.is_file():
|
||||||
|
print(f"Deleting the file: {path}")
|
||||||
|
path.unlink()
|
||||||
|
else:
|
||||||
|
remove_all(path)
|
||||||
|
print(f"Deleting the empty dir: {root}")
|
||||||
|
root.rmdir()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def sample_note(tmp_path) -> Path:
|
||||||
|
"""Fixture which creates a temporary note file."""
|
||||||
|
source_file: Path = Path("tests/fixtures/test_vault/test1.md")
|
||||||
|
if not source_file.exists():
|
||||||
|
raise FileNotFoundError(f"Original file not found: {source_file}")
|
||||||
|
|
||||||
|
dest_file: Path = Path(tmp_path / source_file.name)
|
||||||
|
shutil.copy(source_file, dest_file)
|
||||||
|
yield dest_file
|
||||||
|
|
||||||
|
# after test - remove fixtures
|
||||||
|
dest_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def sample_vault(tmp_path) -> Path:
|
||||||
|
"""Fixture which creates a sample vault."""
|
||||||
|
source_dir = Path(__file__).parent / "fixtures" / "sample_vault"
|
||||||
|
dest_dir = Path(tmp_path / "vault")
|
||||||
|
backup_dir = Path(f"{dest_dir}.bak")
|
||||||
|
|
||||||
|
if not source_dir.exists():
|
||||||
|
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||||
|
|
||||||
|
shutil.copytree(source_dir, dest_dir)
|
||||||
|
yield dest_dir
|
||||||
|
|
||||||
|
# after test - remove fixtures
|
||||||
|
shutil.rmtree(dest_dir)
|
||||||
|
|
||||||
|
if backup_dir.exists():
|
||||||
|
shutil.rmtree(backup_dir)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def test_vault(tmp_path) -> Path:
|
||||||
|
"""Fixture which creates a sample vault."""
|
||||||
|
source_dir = Path(__file__).parent / "fixtures" / "test_vault"
|
||||||
|
dest_dir = Path(tmp_path / "vault")
|
||||||
|
backup_dir = Path(f"{dest_dir}.bak")
|
||||||
|
|
||||||
|
if not source_dir.exists():
|
||||||
|
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||||
|
|
||||||
|
shutil.copytree(source_dir, dest_dir)
|
||||||
|
yield dest_dir
|
||||||
|
|
||||||
|
# after test - remove fixtures
|
||||||
|
shutil.rmtree(dest_dir)
|
||||||
|
|
||||||
|
if backup_dir.exists():
|
||||||
|
shutil.rmtree(backup_dir)
|
||||||
39
tests/fixtures/sample_note.md
vendored
Normal file
39
tests/fixtures/sample_note.md
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
date_created: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- dinner
|
||||||
|
- breakfast
|
||||||
|
- not_food
|
||||||
|
author: John Doe
|
||||||
|
nested_list:
|
||||||
|
nested_list_one:
|
||||||
|
- nested_list_one_a
|
||||||
|
- nested_list_one_b
|
||||||
|
type:
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
---
|
||||||
|
|
||||||
|
area:: mixed
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
inline_key:: inline_key_value
|
||||||
|
type:: [[article]]
|
||||||
|
tags:: from_inline_metadata
|
||||||
|
**bold_key**:: **bold** key value
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, [in_text_key:: in-text value] eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur? #inline_tag
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, #inline_tag2 cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
|
|
||||||
|
#food/fruit/pear
|
||||||
|
#food/fruit/orange
|
||||||
|
#dinner #breakfast
|
||||||
|
#brunch
|
||||||
9
tests/fixtures/sample_vault/+inbox/Untitled.md
vendored
Normal file
9
tests/fixtures/sample_vault/+inbox/Untitled.md
vendored
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
type: note
|
||||||
|
tags:
|
||||||
|
- foo
|
||||||
|
- bar
|
||||||
|
- baz
|
||||||
|
- food/fruit/apple
|
||||||
|
---
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
3
tests/fixtures/sample_vault/.obsidian/app.json
vendored
Normal file
3
tests/fixtures/sample_vault/.obsidian/app.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"alwaysUpdateLinks": true
|
||||||
|
}
|
||||||
3
tests/fixtures/sample_vault/.obsidian/appearance.json
vendored
Normal file
3
tests/fixtures/sample_vault/.obsidian/appearance.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"accentColor": ""
|
||||||
|
}
|
||||||
3
tests/fixtures/sample_vault/.obsidian/community-plugins.json
vendored
Normal file
3
tests/fixtures/sample_vault/.obsidian/community-plugins.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
[
|
||||||
|
"templater-obsidian"
|
||||||
|
]
|
||||||
29
tests/fixtures/sample_vault/.obsidian/core-plugins-migration.json
vendored
Normal file
29
tests/fixtures/sample_vault/.obsidian/core-plugins-migration.json
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"file-explorer": true,
|
||||||
|
"global-search": true,
|
||||||
|
"switcher": true,
|
||||||
|
"graph": true,
|
||||||
|
"backlink": true,
|
||||||
|
"canvas": true,
|
||||||
|
"outgoing-link": true,
|
||||||
|
"tag-pane": true,
|
||||||
|
"page-preview": true,
|
||||||
|
"daily-notes": true,
|
||||||
|
"templates": true,
|
||||||
|
"note-composer": true,
|
||||||
|
"command-palette": true,
|
||||||
|
"slash-command": false,
|
||||||
|
"editor-status": true,
|
||||||
|
"starred": true,
|
||||||
|
"markdown-importer": false,
|
||||||
|
"zk-prefixer": false,
|
||||||
|
"random-note": false,
|
||||||
|
"outline": true,
|
||||||
|
"word-count": true,
|
||||||
|
"slides": false,
|
||||||
|
"audio-recorder": false,
|
||||||
|
"workspaces": false,
|
||||||
|
"file-recovery": true,
|
||||||
|
"publish": false,
|
||||||
|
"sync": false
|
||||||
|
}
|
||||||
20
tests/fixtures/sample_vault/.obsidian/core-plugins.json
vendored
Normal file
20
tests/fixtures/sample_vault/.obsidian/core-plugins.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
[
|
||||||
|
"file-explorer",
|
||||||
|
"global-search",
|
||||||
|
"switcher",
|
||||||
|
"graph",
|
||||||
|
"backlink",
|
||||||
|
"canvas",
|
||||||
|
"outgoing-link",
|
||||||
|
"tag-pane",
|
||||||
|
"page-preview",
|
||||||
|
"daily-notes",
|
||||||
|
"templates",
|
||||||
|
"note-composer",
|
||||||
|
"command-palette",
|
||||||
|
"editor-status",
|
||||||
|
"starred",
|
||||||
|
"outline",
|
||||||
|
"word-count",
|
||||||
|
"file-recovery"
|
||||||
|
]
|
||||||
1
tests/fixtures/sample_vault/.obsidian/hotkeys.json
vendored
Normal file
1
tests/fixtures/sample_vault/.obsidian/hotkeys.json
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
{}
|
||||||
5617
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/main.js
vendored
Normal file
5617
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/main.js
vendored
Normal file
File diff suppressed because one or more lines are too long
10
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/manifest.json
vendored
Normal file
10
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/manifest.json
vendored
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
{
|
||||||
|
"id": "templater-obsidian",
|
||||||
|
"name": "Templater",
|
||||||
|
"version": "1.16.0",
|
||||||
|
"description": "Create and use templates",
|
||||||
|
"minAppVersion": "0.11.13",
|
||||||
|
"author": "SilentVoid",
|
||||||
|
"authorUrl": "https://github.com/SilentVoid13",
|
||||||
|
"isDesktopOnly": false
|
||||||
|
}
|
||||||
281
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/styles.css
vendored
Normal file
281
tests/fixtures/sample_vault/.obsidian/plugins/templater-obsidian/styles.css
vendored
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
.templater_search {
|
||||||
|
width: calc(100% - 20px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_div {
|
||||||
|
border-top: 1px solid var(--background-modifier-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_div > .setting-item {
|
||||||
|
border-top: none !important;
|
||||||
|
align-self: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_div > .setting-item > .setting-item-control {
|
||||||
|
justify-content: space-around;
|
||||||
|
padding: 0;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_div
|
||||||
|
> .setting-item
|
||||||
|
> .setting-item-control
|
||||||
|
> .setting-editor-extra-setting-button {
|
||||||
|
align-self: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_donating {
|
||||||
|
margin: 10px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_title {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
margin-top: 5px;
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_template {
|
||||||
|
align-self: center;
|
||||||
|
margin-left: 5px;
|
||||||
|
margin-right: 5px;
|
||||||
|
width: 70%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_cmd {
|
||||||
|
margin-left: 5px;
|
||||||
|
margin-right: 5px;
|
||||||
|
font-size: 14px;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater_div2 > .setting-item {
|
||||||
|
align-content: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater-prompt-div {
|
||||||
|
display: flex;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater-prompt-form {
|
||||||
|
display: flex;
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater-prompt-input {
|
||||||
|
flex-grow: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.templater-button-div {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
margin-top: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
textarea.templater-prompt-input {
|
||||||
|
height: 10rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
textarea.templater-prompt-input:focus {
|
||||||
|
border-color: var(--interactive-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .templater-command-bg {
|
||||||
|
left: 0px;
|
||||||
|
right: 0px;
|
||||||
|
background-color: var(--background-primary-alt);
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .cm-templater-command {
|
||||||
|
font-size: 0.85em;
|
||||||
|
font-family: var(--font-monospace);
|
||||||
|
line-height: 1.3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .templater-inline .cm-templater-command {
|
||||||
|
background-color: var(--background-primary-alt);
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .cm-templater-command.cm-templater-opening-tag {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .cm-templater-command.cm-templater-closing-tag {
|
||||||
|
font-weight: bold;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .cm-templater-command.cm-templater-interpolation-tag {
|
||||||
|
color: #008bff;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cm-s-obsidian .cm-templater-command.cm-templater-execution-tag {
|
||||||
|
color: #c0d700;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-keyword {
|
||||||
|
color: #00a7aa;
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-atom {
|
||||||
|
color: #f39b35;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-number {
|
||||||
|
color: #a06fca;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-type {
|
||||||
|
color: #a06fca;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-def {
|
||||||
|
color: #98e342;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-property {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-variable {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-variable-2 {
|
||||||
|
color: #da7dae;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-variable-3 {
|
||||||
|
color: #a06fca;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-type.cm-def {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-property.cm-def {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-callee {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-operator {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-qualifier {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-tag {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-tag.cm-bracket {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-attribute {
|
||||||
|
color: #a06fca;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-comment {
|
||||||
|
color: #696d70;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-comment.cm-tag {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-comment.cm-attribute {
|
||||||
|
color: #d4d4d4;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-string {
|
||||||
|
color: #e6db74;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-string-2 {
|
||||||
|
color: #f39b35;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-meta {
|
||||||
|
color: #d4d4d4;
|
||||||
|
background: inherit;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-builtin {
|
||||||
|
color: #fc4384;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-header {
|
||||||
|
color: #da7dae;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-hr {
|
||||||
|
color: #98e342;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-link {
|
||||||
|
color: #696d70;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.cm-error {
|
||||||
|
border-bottom: 1px solid #c42412;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian pre.HyperMD-codeblock .cm-keyword {
|
||||||
|
font-weight: normal;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark
|
||||||
|
.cm-s-obsidian
|
||||||
|
.cm-templater-command.CodeMirror-activeline-background {
|
||||||
|
background: #272727;
|
||||||
|
}
|
||||||
|
|
||||||
|
.theme-dark .cm-s-obsidian .cm-templater-command.CodeMirror-matchingbracket {
|
||||||
|
outline: 1px solid grey;
|
||||||
|
color: #d4d4d4 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror-hints {
|
||||||
|
position: absolute;
|
||||||
|
z-index: 10;
|
||||||
|
overflow: hidden;
|
||||||
|
list-style: none;
|
||||||
|
|
||||||
|
margin: 0;
|
||||||
|
padding: 2px;
|
||||||
|
|
||||||
|
-webkit-box-shadow: 2px 3px 5px rgba(0, 0, 0, 0.2);
|
||||||
|
-moz-box-shadow: 2px 3px 5px rgba(0, 0, 0, 0.2);
|
||||||
|
box-shadow: 2px 3px 5px rgba(0, 0, 0, 0.2);
|
||||||
|
border-radius: 3px;
|
||||||
|
border: 1px solid silver;
|
||||||
|
|
||||||
|
background: white;
|
||||||
|
font-size: 90%;
|
||||||
|
font-family: monospace;
|
||||||
|
|
||||||
|
max-height: 20em;
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.CodeMirror-hint {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0 4px;
|
||||||
|
border-radius: 2px;
|
||||||
|
white-space: pre;
|
||||||
|
color: black;
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
li.CodeMirror-hint-active {
|
||||||
|
background: #08f;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
155
tests/fixtures/sample_vault/.obsidian/workspace.json
vendored
Normal file
155
tests/fixtures/sample_vault/.obsidian/workspace.json
vendored
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
{
|
||||||
|
"main": {
|
||||||
|
"id": "5f828621a37b21ab",
|
||||||
|
"type": "split",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "2f3322f01c16279b",
|
||||||
|
"type": "tabs",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "d16a705340a291b0",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "empty",
|
||||||
|
"state": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"direction": "vertical"
|
||||||
|
},
|
||||||
|
"left": {
|
||||||
|
"id": "aeebc2581160842a",
|
||||||
|
"type": "split",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "cd71abd49c3ceb86",
|
||||||
|
"type": "tabs",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "fe51579e3e74af15",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "file-explorer",
|
||||||
|
"state": {
|
||||||
|
"sortOrder": "alphabetical"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "2a7187c7c8d51306",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "search",
|
||||||
|
"state": {
|
||||||
|
"query": "",
|
||||||
|
"matchingCase": false,
|
||||||
|
"explainSearch": false,
|
||||||
|
"collapseAll": false,
|
||||||
|
"extraContext": false,
|
||||||
|
"sortOrder": "alphabetical"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "574f0713150d5067",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "starred",
|
||||||
|
"state": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"direction": "horizontal",
|
||||||
|
"width": 300
|
||||||
|
},
|
||||||
|
"right": {
|
||||||
|
"id": "b20cf1cec7ad8379",
|
||||||
|
"type": "split",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "e4268aea52a4b751",
|
||||||
|
"type": "tabs",
|
||||||
|
"children": [
|
||||||
|
{
|
||||||
|
"id": "495261df1eda8469",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "backlink",
|
||||||
|
"state": {
|
||||||
|
"collapseAll": false,
|
||||||
|
"extraContext": false,
|
||||||
|
"sortOrder": "alphabetical",
|
||||||
|
"showSearch": false,
|
||||||
|
"searchQuery": "",
|
||||||
|
"backlinkCollapsed": false,
|
||||||
|
"unlinkedCollapsed": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "a552a9e316c497c2",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "outgoing-link",
|
||||||
|
"state": {
|
||||||
|
"linksCollapsed": false,
|
||||||
|
"unlinkedCollapsed": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "49ac9a323fc7a3bb",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "tag",
|
||||||
|
"state": {
|
||||||
|
"sortOrder": "frequency",
|
||||||
|
"useHierarchy": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": "93bd91c8147876e4",
|
||||||
|
"type": "leaf",
|
||||||
|
"state": {
|
||||||
|
"type": "outline",
|
||||||
|
"state": {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"direction": "horizontal",
|
||||||
|
"width": 300,
|
||||||
|
"collapsed": true
|
||||||
|
},
|
||||||
|
"left-ribbon": {
|
||||||
|
"hiddenItems": {
|
||||||
|
"switcher:Open quick switcher": false,
|
||||||
|
"graph:Open graph view": false,
|
||||||
|
"canvas:Create new canvas": false,
|
||||||
|
"daily-notes:Open today's daily note": false,
|
||||||
|
"templates:Insert template": false,
|
||||||
|
"command-palette:Open command palette": false,
|
||||||
|
"templater-obsidian:Templater": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"active": "d16a705340a291b0",
|
||||||
|
"lastOpenFiles": [
|
||||||
|
"00 meta/templates/daily note.md",
|
||||||
|
"+inbox/Untitled.md",
|
||||||
|
"01 frontmatter/frontmatter 1.md",
|
||||||
|
"01 frontmatter/frontmatter 2.md",
|
||||||
|
"01 frontmatter/frontmatter 3.md",
|
||||||
|
"01 frontmatter/frontmatter 4.md",
|
||||||
|
"02 inline/inline 1.md",
|
||||||
|
"02 inline/inline 2.md",
|
||||||
|
"02 inline/inline 3.md",
|
||||||
|
"02 inline/inline 4.md"
|
||||||
|
]
|
||||||
|
}
|
||||||
19
tests/fixtures/sample_vault/00 meta/templates/daily note.md
vendored
Normal file
19
tests/fixtures/sample_vault/00 meta/templates/daily note.md
vendored
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
<%* let title = tp.file.title
|
||||||
|
if (title.startsWith("Untitled")) {
|
||||||
|
title = await tp.system.prompt("Title");
|
||||||
|
await tp.file.rename(title);
|
||||||
|
}
|
||||||
|
-%>
|
||||||
|
<%*
|
||||||
|
let result = title.replace(/-/g, ' ')
|
||||||
|
result = result.charAt(0).toUpperCase() + result.slice(1);
|
||||||
|
tR += "---"
|
||||||
|
%>
|
||||||
|
title: <%* tR += "\"" + result + "\"" %>
|
||||||
|
tags:
|
||||||
|
<% tp.file.cursor(1) %>
|
||||||
|
programming-languagues:
|
||||||
|
created: <% tp.date.now("YYYY-MM-DD") %>
|
||||||
|
---
|
||||||
|
# <%* tR += result %>
|
||||||
|
---
|
||||||
17
tests/fixtures/sample_vault/00 meta/templates/data sample.md
vendored
Normal file
17
tests/fixtures/sample_vault/00 meta/templates/data sample.md
vendored
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
area:
|
||||||
|
date_created: 2022-12-21
|
||||||
|
date_modified: 2022-12-20
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- food/fruit/pear
|
||||||
|
- dinner
|
||||||
|
- lunch
|
||||||
|
- breakfast
|
||||||
|
author: John Doe
|
||||||
|
status: new
|
||||||
|
type:
|
||||||
|
- book
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
---
|
||||||
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 1.md
vendored
Normal file
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 1.md
vendored
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
---
|
||||||
|
area: frontmatter
|
||||||
|
date_created: 2022-12-22
|
||||||
|
date_modified: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- food/fruit/pear
|
||||||
|
- dinner
|
||||||
|
- lunch
|
||||||
|
- breakfast
|
||||||
|
thoughts:
|
||||||
|
rating: 8
|
||||||
|
reviewable: false
|
||||||
|
levels:
|
||||||
|
level1:
|
||||||
|
- level1a
|
||||||
|
- level1b
|
||||||
|
level2:
|
||||||
|
- level2a
|
||||||
|
- level2b
|
||||||
|
author: John Doe
|
||||||
|
status: new
|
||||||
|
type: ["book", "article", "note", "one-off"]
|
||||||
|
---
|
||||||
|
# Page Title H1
|
||||||
|
|
||||||
|
# Headings
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Heading 2
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
### Heading 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
#### Heading 4
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
##### Heading 5
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
###### Heading 6
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Text styles
|
||||||
|
|
||||||
|
Lorem ipsum **dolor sit amet**, consectetur adipisicing elit, sed do _eiusmod tempor incididunt_ ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud [[internal link]] exercitation ullamco laboris nisi ut [external link](https://google.com) aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. [[frontmatter 1]] Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Lorem ipsum `inline code looks like this` dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt.
|
||||||
|
|
||||||
|
Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
```css
|
||||||
|
This is a code block
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bold
|
||||||
|
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
<strong>The quick brown fox jumps over the lazy dog.</strong>
|
||||||
|
|
||||||
|
## Italic
|
||||||
|
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
<em>The quick brown fox jumps over the lazy dog.</em>
|
||||||
|
|
||||||
|
## Bold and Italic
|
||||||
|
|
||||||
|
**_The quick brown fox jumps over the lazy dog._**
|
||||||
|
<strong><em>The quick brown fox jumps over the lazy dog.</em></strong>
|
||||||
|
|
||||||
|
## Blockquotes
|
||||||
|
|
||||||
|
> Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua
|
||||||
|
|
||||||
|
> The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog.
|
||||||
|
|
||||||
|
> **The quick brown fox** _jumps over the lazy dog._
|
||||||
|
|
||||||
|
## Monospaced
|
||||||
|
|
||||||
|
<samp>The quick brown fox jumps over the lazy dog.</samp>
|
||||||
|
|
||||||
|
## Underlined
|
||||||
|
|
||||||
|
<ins>The quick brown fox jumps over the lazy dog.</ins>
|
||||||
|
|
||||||
|
## Strike-through
|
||||||
|
|
||||||
|
~~The quick brown fox jumps over the lazy dog.~~
|
||||||
|
|
||||||
|
## sub and super
|
||||||
|
|
||||||
|
Subscript <sub>The quick brown fox jumps over the lazy dog.</sub>
|
||||||
|
Superscript <sup>The quick brown fox jumps over the lazy dog.</sup>
|
||||||
|
|
||||||
|
## Syntax Highlighting
|
||||||
|
|
||||||
|
A class method is an instance method of the class object. When a new class is created, an object of type `Class` is initialized and assigned to a global constant (Mobile in this case).
|
||||||
|
|
||||||
|
```
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```css
|
||||||
|
button.mod-cta a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tables
|
||||||
|
|
||||||
|
| one | two | three |
|
||||||
|
| ---- | :--: | ----- |
|
||||||
|
| 9999 | 9999 | 9999 |
|
||||||
|
| 1 | 2 | 3 |
|
||||||
|
| 44 | 55 | 66 |
|
||||||
|
|
||||||
|
| Default | Left align | Center align | Right align |
|
||||||
|
| ---------- | :--------- | :----------: | ----------: |
|
||||||
|
| 9999999999 | 9999999999 | 9999999999 | 9999999999 |
|
||||||
|
| 999999999 | 999999999 | 999999999 | 999999999 |
|
||||||
|
| 99999999 | 99999999 | 99999999 | 99999999 |
|
||||||
|
| 9999999 | 9999999 | 9999999 | 9999999 |
|
||||||
|
|
||||||
|
| A | B | C |
|
||||||
|
| --- | --- | ----------------- |
|
||||||
|
| 1 | 2 | 3 <br/> 4 <br/> 5 |
|
||||||
|
|
||||||
|
## Links
|
||||||
|
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet](https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet)
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][reference text]
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][1]
|
||||||
|
[Markdown-Cheat-Sheet]
|
||||||
|
|
||||||
|
[reference text]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[1]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[markdown-cheat-sheet]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
|
||||||
|
[Example of a relative link](rl.md)
|
||||||
|
Visit https://github.com/
|
||||||
|
|
||||||
|
## Images
|
||||||
|
|
||||||
|
![alt text][image]
|
||||||
|
|
||||||
|
<img src="https://media.giphy.com/media/qLHzYjlA2FW8g/giphy.gif" />
|
||||||
|
|
||||||
|
<img src="https://img.shields.io/badge/theultimatemarkdowncheatsheet-brightgreen.svg" />
|
||||||
|
## Lists
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
1. One
|
||||||
|
2. Two
|
||||||
|
3. Three
|
||||||
|
|
||||||
|
## Multi-level Lists
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
1. First level
|
||||||
|
1. Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
2. First level
|
||||||
|
1. Second level
|
||||||
|
3. First level
|
||||||
|
1. Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- [x] Fix Bug 223 ✅ 2022-08-08
|
||||||
|
- [x] Add Feature 33 ✅ 2022-08-08
|
||||||
|
- [x] Add unit tests ✅ 2022-08-08
|
||||||
|
|
||||||
|
## Horizontal Rules
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Miscellaneous
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Lorem ipsum dolor sit amet
|
||||||
|
-->
|
||||||
|
|
||||||
|
- Asterisk
|
||||||
|
\ Backslash
|
||||||
|
` Backtick
|
||||||
|
{} Curly braces
|
||||||
|
. Dot
|
||||||
|
! Exclamation mark
|
||||||
|
|
||||||
|
## Hash symbol
|
||||||
|
|
||||||
|
- Hyphen symbol
|
||||||
|
() Parentheses
|
||||||
|
|
||||||
|
* Plus symbol
|
||||||
|
[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
\* Asterisk
|
||||||
|
\\ Backslash
|
||||||
|
\` Backtick
|
||||||
|
\{} Curly braces
|
||||||
|
\. Dot
|
||||||
|
\! Exclamation mark
|
||||||
|
\# Hash symbol
|
||||||
|
\- Hyphen symbol
|
||||||
|
\() Parentheses
|
||||||
|
\+ Plus symbol
|
||||||
|
\[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
:octocat:
|
||||||
|
|
||||||
|
@lifeparticle
|
||||||
|
|
||||||
|
\#
|
||||||
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 2.md
vendored
Normal file
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 2.md
vendored
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
---
|
||||||
|
area: frontmatter
|
||||||
|
date_created: 2022-12-22
|
||||||
|
date_modified: 2022-11-14
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- food/fruit/pear
|
||||||
|
- dinner
|
||||||
|
- lunch
|
||||||
|
- breakfast
|
||||||
|
thoughts:
|
||||||
|
rating: 8
|
||||||
|
reviewable: false
|
||||||
|
levels:
|
||||||
|
level1:
|
||||||
|
- level1a
|
||||||
|
- level1b
|
||||||
|
level2:
|
||||||
|
- level2a
|
||||||
|
- level2b
|
||||||
|
author: John Doe
|
||||||
|
status: new
|
||||||
|
type: ["book", "article", "note"]
|
||||||
|
---
|
||||||
|
# Page Title H1
|
||||||
|
|
||||||
|
# Headings
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Heading 2
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
### Heading 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
#### Heading 4
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
##### Heading 5
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
###### Heading 6
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Text styles
|
||||||
|
|
||||||
|
Lorem ipsum **dolor sit amet**, consectetur adipisicing elit, sed do _eiusmod tempor incididunt_ ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud [[internal link]] exercitation ullamco laboris nisi ut [external link](https://google.com) aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. [[frontmatter 1]] Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Lorem ipsum `inline code looks like this` dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt.
|
||||||
|
|
||||||
|
Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
```css
|
||||||
|
This is a code block
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bold
|
||||||
|
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
<strong>The quick brown fox jumps over the lazy dog.</strong>
|
||||||
|
|
||||||
|
## Italic
|
||||||
|
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
<em>The quick brown fox jumps over the lazy dog.</em>
|
||||||
|
|
||||||
|
## Bold and Italic
|
||||||
|
|
||||||
|
**_The quick brown fox jumps over the lazy dog._**
|
||||||
|
<strong><em>The quick brown fox jumps over the lazy dog.</em></strong>
|
||||||
|
|
||||||
|
## Blockquotes
|
||||||
|
|
||||||
|
> Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua
|
||||||
|
|
||||||
|
> The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog.
|
||||||
|
|
||||||
|
> **The quick brown fox** _jumps over the lazy dog._
|
||||||
|
|
||||||
|
## Monospaced
|
||||||
|
|
||||||
|
<samp>The quick brown fox jumps over the lazy dog.</samp>
|
||||||
|
|
||||||
|
## Underlined
|
||||||
|
|
||||||
|
<ins>The quick brown fox jumps over the lazy dog.</ins>
|
||||||
|
|
||||||
|
## Strike-through
|
||||||
|
|
||||||
|
~~The quick brown fox jumps over the lazy dog.~~
|
||||||
|
|
||||||
|
## sub and super
|
||||||
|
|
||||||
|
Subscript <sub>The quick brown fox jumps over the lazy dog.</sub>
|
||||||
|
Superscript <sup>The quick brown fox jumps over the lazy dog.</sup>
|
||||||
|
|
||||||
|
## Syntax Highlighting
|
||||||
|
|
||||||
|
A class method is an instance method of the class object. When a new class is created, an object of type `Class` is initialized and assigned to a global constant (Mobile in this case).
|
||||||
|
|
||||||
|
```
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```css
|
||||||
|
button.mod-cta a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tables
|
||||||
|
|
||||||
|
| one | two | three |
|
||||||
|
| ---- | :--: | ----- |
|
||||||
|
| 9999 | 9999 | 9999 |
|
||||||
|
| 1 | 2 | 3 |
|
||||||
|
| 44 | 55 | 66 |
|
||||||
|
|
||||||
|
| Default | Left align | Center align | Right align |
|
||||||
|
| ---------- | :--------- | :----------: | ----------: |
|
||||||
|
| 9999999999 | 9999999999 | 9999999999 | 9999999999 |
|
||||||
|
| 999999999 | 999999999 | 999999999 | 999999999 |
|
||||||
|
| 99999999 | 99999999 | 99999999 | 99999999 |
|
||||||
|
| 9999999 | 9999999 | 9999999 | 9999999 |
|
||||||
|
|
||||||
|
| A | B | C |
|
||||||
|
| --- | --- | ----------------- |
|
||||||
|
| 1 | 2 | 3 <br/> 4 <br/> 5 |
|
||||||
|
|
||||||
|
## Links
|
||||||
|
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet](https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet)
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][reference text]
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][1]
|
||||||
|
[Markdown-Cheat-Sheet]
|
||||||
|
|
||||||
|
[reference text]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[1]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[markdown-cheat-sheet]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
|
||||||
|
[Example of a relative link](rl.md)
|
||||||
|
Visit https://github.com/
|
||||||
|
|
||||||
|
## Images
|
||||||
|
|
||||||
|
![alt text][image]
|
||||||
|
|
||||||
|
<img src="https://media.giphy.com/media/qLHzYjlA2FW8g/giphy.gif" />
|
||||||
|
|
||||||
|
<img src="https://img.shields.io/badge/theultimatemarkdowncheatsheet-brightgreen.svg" />
|
||||||
|
## Lists
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
1. One
|
||||||
|
2. Two
|
||||||
|
3. Three
|
||||||
|
|
||||||
|
## Multi-level Lists
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
1. First level
|
||||||
|
1. Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
2. First level
|
||||||
|
1. Second level
|
||||||
|
3. First level
|
||||||
|
1. Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- [x] Fix Bug 223 ✅ 2022-08-08
|
||||||
|
- [x] Add Feature 33 ✅ 2022-08-08
|
||||||
|
- [x] Add unit tests ✅ 2022-08-08
|
||||||
|
|
||||||
|
## Horizontal Rules
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Miscellaneous
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Lorem ipsum dolor sit amet
|
||||||
|
-->
|
||||||
|
|
||||||
|
- Asterisk
|
||||||
|
\ Backslash
|
||||||
|
` Backtick
|
||||||
|
{} Curly braces
|
||||||
|
. Dot
|
||||||
|
! Exclamation mark
|
||||||
|
|
||||||
|
## Hash symbol
|
||||||
|
|
||||||
|
- Hyphen symbol
|
||||||
|
() Parentheses
|
||||||
|
|
||||||
|
* Plus symbol
|
||||||
|
[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
\* Asterisk
|
||||||
|
\\ Backslash
|
||||||
|
\` Backtick
|
||||||
|
\{} Curly braces
|
||||||
|
\. Dot
|
||||||
|
\! Exclamation mark
|
||||||
|
\# Hash symbol
|
||||||
|
\- Hyphen symbol
|
||||||
|
\() Parentheses
|
||||||
|
\+ Plus symbol
|
||||||
|
\[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
:octocat:
|
||||||
|
|
||||||
|
@lifeparticle
|
||||||
|
|
||||||
|
\#
|
||||||
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 3.md
vendored
Normal file
255
tests/fixtures/sample_vault/01 frontmatter/frontmatter 3.md
vendored
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
---
|
||||||
|
area: frontmatter
|
||||||
|
date_created: 2022-12-22
|
||||||
|
date_modified: 2022-10-01
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- food/fruit/pear
|
||||||
|
- dinner
|
||||||
|
- lunch
|
||||||
|
- breakfast
|
||||||
|
thoughts:
|
||||||
|
rating: 8
|
||||||
|
reviewable: false
|
||||||
|
levels:
|
||||||
|
level1:
|
||||||
|
- level1a
|
||||||
|
- level1b
|
||||||
|
level2:
|
||||||
|
- level2a
|
||||||
|
- level2b
|
||||||
|
author: John Doe
|
||||||
|
status: new
|
||||||
|
type: ["book", "article", "note"]
|
||||||
|
---
|
||||||
|
# Page Title H1
|
||||||
|
|
||||||
|
# Headings
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Heading 2
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
### Heading 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
#### Heading 4
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
##### Heading 5
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
###### Heading 6
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
## Text styles
|
||||||
|
|
||||||
|
Lorem ipsum **dolor sit amet**, consectetur adipisicing elit, sed do _eiusmod tempor incididunt_ ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud [[internal link]] exercitation ullamco laboris nisi ut [external link](https://google.com) aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. [[frontmatter 1]] Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Lorem ipsum `inline code looks like this` dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt.
|
||||||
|
|
||||||
|
Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
```css
|
||||||
|
This is a code block
|
||||||
|
```
|
||||||
|
|
||||||
|
## Bold
|
||||||
|
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
**The quick brown fox jumps over the lazy dog.**
|
||||||
|
<strong>The quick brown fox jumps over the lazy dog.</strong>
|
||||||
|
|
||||||
|
## Italic
|
||||||
|
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
_The quick brown fox jumps over the lazy dog._
|
||||||
|
<em>The quick brown fox jumps over the lazy dog.</em>
|
||||||
|
|
||||||
|
## Bold and Italic
|
||||||
|
|
||||||
|
**_The quick brown fox jumps over the lazy dog._**
|
||||||
|
<strong><em>The quick brown fox jumps over the lazy dog.</em></strong>
|
||||||
|
|
||||||
|
## Blockquotes
|
||||||
|
|
||||||
|
> Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua
|
||||||
|
|
||||||
|
> The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog. The quick brown fox jumps over the lazy dog.
|
||||||
|
|
||||||
|
> **The quick brown fox** _jumps over the lazy dog._
|
||||||
|
|
||||||
|
## Monospaced
|
||||||
|
|
||||||
|
<samp>The quick brown fox jumps over the lazy dog.</samp>
|
||||||
|
|
||||||
|
## Underlined
|
||||||
|
|
||||||
|
<ins>The quick brown fox jumps over the lazy dog.</ins>
|
||||||
|
|
||||||
|
## Strike-through
|
||||||
|
|
||||||
|
~~The quick brown fox jumps over the lazy dog.~~
|
||||||
|
|
||||||
|
## sub and super
|
||||||
|
|
||||||
|
Subscript <sub>The quick brown fox jumps over the lazy dog.</sub>
|
||||||
|
Superscript <sup>The quick brown fox jumps over the lazy dog.</sup>
|
||||||
|
|
||||||
|
## Syntax Highlighting
|
||||||
|
|
||||||
|
A class method is an instance method of the class object. When a new class is created, an object of type `Class` is initialized and assigned to a global constant (Mobile in this case).
|
||||||
|
|
||||||
|
```
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```java
|
||||||
|
public static String monthNames[] = {"January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December"};
|
||||||
|
```
|
||||||
|
|
||||||
|
```css
|
||||||
|
button.mod-cta a {
|
||||||
|
color: inherit;
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Tables
|
||||||
|
|
||||||
|
| one | two | three |
|
||||||
|
| ---- | :--: | ----- |
|
||||||
|
| 9999 | 9999 | 9999 |
|
||||||
|
| 1 | 2 | 3 |
|
||||||
|
| 44 | 55 | 66 |
|
||||||
|
|
||||||
|
| Default | Left align | Center align | Right align |
|
||||||
|
| ---------- | :--------- | :----------: | ----------: |
|
||||||
|
| 9999999999 | 9999999999 | 9999999999 | 9999999999 |
|
||||||
|
| 999999999 | 999999999 | 999999999 | 999999999 |
|
||||||
|
| 99999999 | 99999999 | 99999999 | 99999999 |
|
||||||
|
| 9999999 | 9999999 | 9999999 | 9999999 |
|
||||||
|
|
||||||
|
| A | B | C |
|
||||||
|
| --- | --- | ----------------- |
|
||||||
|
| 1 | 2 | 3 <br/> 4 <br/> 5 |
|
||||||
|
|
||||||
|
## Links
|
||||||
|
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet](https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet)
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][reference text]
|
||||||
|
[The-Ultimate-Markdown-Cheat-Sheet][1]
|
||||||
|
[Markdown-Cheat-Sheet]
|
||||||
|
|
||||||
|
[reference text]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[1]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
[markdown-cheat-sheet]: https://github.com/lifeparticle/The-Ultimate-Markdown-Cheat-Sheet
|
||||||
|
|
||||||
|
[Example of a relative link](rl.md)
|
||||||
|
Visit https://github.com/
|
||||||
|
|
||||||
|
## Images
|
||||||
|
|
||||||
|
![alt text][image]
|
||||||
|
|
||||||
|
<img src="https://media.giphy.com/media/qLHzYjlA2FW8g/giphy.gif" />
|
||||||
|
|
||||||
|
<img src="https://img.shields.io/badge/theultimatemarkdowncheatsheet-brightgreen.svg" />
|
||||||
|
## Lists
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
1. One
|
||||||
|
2. Two
|
||||||
|
3. Three
|
||||||
|
|
||||||
|
## Multi-level Lists
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
1. First level
|
||||||
|
1. Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
2. First level
|
||||||
|
1. Second level
|
||||||
|
3. First level
|
||||||
|
1. Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- 1
|
||||||
|
- 2
|
||||||
|
- 3
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- Third level
|
||||||
|
- Fourth level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
- First level
|
||||||
|
- Second level
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.
|
||||||
|
|
||||||
|
- [x] Fix Bug 223 ✅ 2022-08-08
|
||||||
|
- [x] Add Feature 33 ✅ 2022-08-08
|
||||||
|
- [x] Add unit tests ✅ 2022-08-08
|
||||||
|
|
||||||
|
## Horizontal Rules
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Miscellaneous
|
||||||
|
|
||||||
|
<!--
|
||||||
|
Lorem ipsum dolor sit amet
|
||||||
|
-->
|
||||||
|
|
||||||
|
- Asterisk
|
||||||
|
\ Backslash
|
||||||
|
` Backtick
|
||||||
|
{} Curly braces
|
||||||
|
. Dot
|
||||||
|
! Exclamation mark
|
||||||
|
|
||||||
|
## Hash symbol
|
||||||
|
|
||||||
|
- Hyphen symbol
|
||||||
|
() Parentheses
|
||||||
|
|
||||||
|
* Plus symbol
|
||||||
|
[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
\* Asterisk
|
||||||
|
\\ Backslash
|
||||||
|
\` Backtick
|
||||||
|
\{} Curly braces
|
||||||
|
\. Dot
|
||||||
|
\! Exclamation mark
|
||||||
|
\# Hash symbol
|
||||||
|
\- Hyphen symbol
|
||||||
|
\() Parentheses
|
||||||
|
\+ Plus symbol
|
||||||
|
\[] Square brackets
|
||||||
|
\_ Underscore
|
||||||
|
|
||||||
|
:octocat:
|
||||||
|
|
||||||
|
@lifeparticle
|
||||||
|
|
||||||
|
\#
|
||||||
29
tests/fixtures/sample_vault/01 frontmatter/frontmatter 4.md
vendored
Normal file
29
tests/fixtures/sample_vault/01 frontmatter/frontmatter 4.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
---
|
||||||
|
area: frontmatter
|
||||||
|
date_created: 2022-12-22
|
||||||
|
date_modified: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- food/fruit/pear
|
||||||
|
- dinner
|
||||||
|
- lunch
|
||||||
|
- breakfast
|
||||||
|
thoughts:
|
||||||
|
rating: 8
|
||||||
|
reviewable: false
|
||||||
|
levels:
|
||||||
|
level1:
|
||||||
|
- level1a
|
||||||
|
- level1b
|
||||||
|
level2:
|
||||||
|
- level2a
|
||||||
|
- level2b
|
||||||
|
author: John Doe
|
||||||
|
status: new
|
||||||
|
type: ["book", "article", "note"]
|
||||||
|
something_new_here: I-am-new
|
||||||
|
---
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
18
tests/fixtures/sample_vault/02 inline/inline 1.md
vendored
Normal file
18
tests/fixtures/sample_vault/02 inline/inline 1.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
area:: frontmatter
|
||||||
|
date_created:: 2022-12-22
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
author:: John Doe
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
type:: article
|
||||||
|
#food/fruit/apple
|
||||||
|
#food/fruit/pear
|
||||||
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
18
tests/fixtures/sample_vault/02 inline/inline 2.md
vendored
Normal file
18
tests/fixtures/sample_vault/02 inline/inline 2.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
area:: frontmatter
|
||||||
|
date_created:: 2022-12-22
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
author:: John Doe
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
type:: article
|
||||||
|
#food/fruit/apple
|
||||||
|
#food/fruit/pear
|
||||||
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
18
tests/fixtures/sample_vault/02 inline/inline 3.md
vendored
Normal file
18
tests/fixtures/sample_vault/02 inline/inline 3.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
area:: frontmatter
|
||||||
|
date_created:: 2022-12-22
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
author:: John Doe
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
type:: article
|
||||||
|
#food/fruit/apple
|
||||||
|
#food/fruit/pear
|
||||||
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
18
tests/fixtures/sample_vault/02 inline/inline 4.md
vendored
Normal file
18
tests/fixtures/sample_vault/02 inline/inline 4.md
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
|
||||||
|
area:: frontmatter
|
||||||
|
date_created:: 2022-12-22
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
author:: John Doe
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
type:: article
|
||||||
|
#food/fruit/apple
|
||||||
|
#food/fruit/pear
|
||||||
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
39
tests/fixtures/sample_vault/03 mixed/mixed 1.md
vendored
Normal file
39
tests/fixtures/sample_vault/03 mixed/mixed 1.md
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
date_created: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- food/fruit/apple
|
||||||
|
- dinner
|
||||||
|
- breakfast
|
||||||
|
- not_food
|
||||||
|
author: John Doe
|
||||||
|
nested_list:
|
||||||
|
nested_list_one:
|
||||||
|
- nested_list_one_a
|
||||||
|
- nested_list_one_b
|
||||||
|
type:
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
---
|
||||||
|
|
||||||
|
area:: mixed
|
||||||
|
date_modified:: 2022-12-22
|
||||||
|
status:: new
|
||||||
|
type:: book
|
||||||
|
inline_key:: inline_key_value
|
||||||
|
type:: [[article]]
|
||||||
|
tags:: from_inline_metadata
|
||||||
|
**bold_key**:: **bold** key value
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, [in_text_key:: in-text value] eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur? #inline_tag
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, #inline_tag2 cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
|
|
||||||
|
#food/fruit/pear
|
||||||
|
#food/fruit/orange
|
||||||
|
#dinner #breakfast
|
||||||
|
#brunch
|
||||||
5
tests/fixtures/sample_vault/04 no metadata/no_metadata_1.md
vendored
Normal file
5
tests/fixtures/sample_vault/04 no metadata/no_metadata_1.md
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
.lLorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repella
|
||||||
8
tests/fixtures/sample_vault_config.toml
vendored
Normal file
8
tests/fixtures/sample_vault_config.toml
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
vault = "tests/fixtures/sample_vault"
|
||||||
|
|
||||||
|
# folders to ignore when parsing content
|
||||||
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
|
||||||
|
[metadata]
|
||||||
|
metadata_location = "frontmatter" # "frontmatter", "top", "bottom"
|
||||||
|
tags_location = "top" # "frontmatter", "top", "bottom"
|
||||||
39
tests/fixtures/test_vault/ignore_folder/file_to_ignore.md
vendored
Normal file
39
tests/fixtures/test_vault/ignore_folder/file_to_ignore.md
vendored
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
date_created: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- shared_tag
|
||||||
|
- frontmatter_tag1
|
||||||
|
- frontmatter_tag2
|
||||||
|
- frontmatter_tag3
|
||||||
|
- ignored_file_tag1
|
||||||
|
author: author name
|
||||||
|
type: ["article", "note"]
|
||||||
|
---
|
||||||
|
#inline_tag_top1 #inline_tag_top2
|
||||||
|
#ignored_file_tag2
|
||||||
|
|
||||||
|
top_key1:: top_key1_value
|
||||||
|
top_key2:: top_key2_value
|
||||||
|
|
||||||
|
# Heading 1
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. #intext_tag1 Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla (#intext_tag2) pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est lab
|
||||||
|
|
||||||
|
```python
|
||||||
|
#ffffff
|
||||||
|
# This is sample text with tags and metadata
|
||||||
|
#in_codeblock_tag1
|
||||||
|
#ffffff;
|
||||||
|
codeblock_key:: some text
|
||||||
|
The quick brown fox jumped over the #in_codeblock_tag2
|
||||||
|
```
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab `this is #inline_code_tag1` illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? `this is #inline_code_tag2` Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pari
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
bottom_key1:: bottom_key1_value
|
||||||
|
bottom_key2:: bottom_key2_value
|
||||||
|
|
||||||
|
#inline_tag_bottom1
|
||||||
|
#inline_tag_bottom2
|
||||||
|
#shared_tag
|
||||||
3
tests/fixtures/test_vault/no_metadata.md
vendored
Normal file
3
tests/fixtures/test_vault/no_metadata.md
vendored
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla paria
|
||||||
44
tests/fixtures/test_vault/test1.md
vendored
Normal file
44
tests/fixtures/test_vault/test1.md
vendored
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
---
|
||||||
|
date_created: 2022-12-22
|
||||||
|
tags:
|
||||||
|
- shared_tag
|
||||||
|
- frontmatter_tag1
|
||||||
|
- frontmatter_tag2
|
||||||
|
-
|
||||||
|
- 📅/frontmatter_tag3
|
||||||
|
frontmatter_Key1: author name
|
||||||
|
frontmatter_Key2: ["article", "note"]
|
||||||
|
shared_key1: shared_key1_value
|
||||||
|
shared_key2: shared_key2_value1
|
||||||
|
---
|
||||||
|
|
||||||
|
#inline_tag_top1 #inline_tag_top2
|
||||||
|
|
||||||
|
top_key1:: top_key1_value
|
||||||
|
**top_key2:: top_key2_value**
|
||||||
|
top_key3:: [[top_key3_value_as_link]]
|
||||||
|
shared_key1:: shared_key1_value
|
||||||
|
shared_key2:: shared_key2_value2
|
||||||
|
emoji_📅_key:: emoji_📅_key_value
|
||||||
|
|
||||||
|
# Heading 1
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. #intext_tag1 Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu [intext_key:: intext_value] fugiat nulla (#intext_tag2) pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est lab
|
||||||
|
|
||||||
|
```python
|
||||||
|
#ffffff
|
||||||
|
# This is sample text with tags and metadata
|
||||||
|
#in_codeblock_tag1
|
||||||
|
#ffffff;
|
||||||
|
codeblock_key:: some text
|
||||||
|
in_codeblock_key:: in_codeblock_value
|
||||||
|
The quick brown fox jumped over the #in_codeblock_tag2
|
||||||
|
```
|
||||||
|
|
||||||
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab `this is #inline_code_tag1` illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? `this is #inline_code_tag2` Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pari
|
||||||
|
|
||||||
|
bottom_key1:: bottom_key1_value
|
||||||
|
bottom_key2:: bottom_key2_value
|
||||||
|
|
||||||
|
#inline_tag_bottom1
|
||||||
|
#inline_tag_bottom2
|
||||||
|
#shared_tag
|
||||||
8
tests/fixtures/test_vault_config.toml
vendored
Normal file
8
tests/fixtures/test_vault_config.toml
vendored
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
vault = "tests/fixtures/test_vault"
|
||||||
|
|
||||||
|
# folders to ignore when parsing content
|
||||||
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
|
||||||
|
[metadata]
|
||||||
|
metadata_location = "frontmatter" # "frontmatter", "top", "bottom"
|
||||||
|
tags_location = "top" # "frontmatter", "top", "bottom"
|
||||||
32
tests/helpers.py
Normal file
32
tests/helpers.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Helper functions for tests."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
class Regex:
|
||||||
|
"""Assert that a given string meets some expectations.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
assert caplog.text == Regex(r"^.*$", re.I)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, pattern, flags=0):
|
||||||
|
self._regex = re.compile(pattern, flags)
|
||||||
|
|
||||||
|
def __eq__(self, actual):
|
||||||
|
"""Define equality.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
actual (str): String to be matched to the regex
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if the actual string matches the regex, False otherwise.
|
||||||
|
"""
|
||||||
|
return bool(self._regex.search(actual))
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
"""Error printed on failed tests."""
|
||||||
|
return f"Regex: '{self._regex.pattern}'"
|
||||||
491
tests/metadata_test.py
Normal file
491
tests/metadata_test.py
Normal file
@@ -0,0 +1,491 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test metadata.py."""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from obsidian_metadata.models.metadata import (
|
||||||
|
Frontmatter,
|
||||||
|
InlineMetadata,
|
||||||
|
InlineTags,
|
||||||
|
VaultMetadata,
|
||||||
|
)
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
FILE_CONTENT: str = Path("tests/fixtures/test_vault/test1.md").read_text()
|
||||||
|
METADATA: dict[str, list[str]] = {
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["note", "article"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 2", "tag 1", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
}
|
||||||
|
FRONTMATTER_CONTENT: str = """
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: "shared_key1_value"
|
||||||
|
---
|
||||||
|
more content
|
||||||
|
|
||||||
|
---
|
||||||
|
horizontal: rule
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
INLINE_CONTENT = """\
|
||||||
|
repeated_key:: repeated_key_value1
|
||||||
|
|
||||||
|
#inline_tag_top1,#inline_tag_top2
|
||||||
|
**bold_key1**:: bold_key1_value
|
||||||
|
**bold_key2:: bold_key2_value**
|
||||||
|
link_key:: [[link_key_value]]
|
||||||
|
tag_key:: #tag_key_value
|
||||||
|
emoji_📅_key:: emoji_📅_key_value
|
||||||
|
**#bold_tag**
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. [in_text_key1:: in_text_key1_value] Ut enim ad minim veniam, quis nostrud exercitation [in_text_key2:: in_text_key2_value] ullamco laboris nisi ut aliquip ex ea commodo consequat. #in_text_tag Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
```python
|
||||||
|
#ffffff
|
||||||
|
# This is sample text [no_key:: value]with tags and metadata
|
||||||
|
#in_codeblock_tag1
|
||||||
|
#ffffff;
|
||||||
|
in_codeblock_key:: in_codeblock_value
|
||||||
|
The quick brown fox jumped over the #in_codeblock_tag2
|
||||||
|
```
|
||||||
|
repeated_key:: repeated_key_value2
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_metadata(capsys) -> None:
|
||||||
|
"""Test VaultMetadata class."""
|
||||||
|
vm = VaultMetadata()
|
||||||
|
assert vm.dict == {}
|
||||||
|
|
||||||
|
vm.add_metadata(METADATA)
|
||||||
|
assert vm.dict == {
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
vm.print_keys()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"frontmatter_Key1 +frontmatter_Key2 +intext_key")
|
||||||
|
|
||||||
|
vm.print_tags()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"tag 1 +tag 2 +tag 3")
|
||||||
|
|
||||||
|
vm.print_metadata()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"┃ Keys +┃ Values +┃")
|
||||||
|
assert captured.out == Regex(r"│ +│ tag 3 +│")
|
||||||
|
assert captured.out == Regex(r"│ frontmatter_Key1 +│ author name +│")
|
||||||
|
|
||||||
|
new_metadata = {"added_key": ["added_value"], "frontmatter_Key2": ["new_value"]}
|
||||||
|
vm.add_metadata(new_metadata)
|
||||||
|
assert vm.dict == {
|
||||||
|
"added_key": ["added_value"],
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "new_value", "note"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_metadata_contains() -> None:
|
||||||
|
"""Test contains method."""
|
||||||
|
vm = VaultMetadata()
|
||||||
|
vm.add_metadata(METADATA)
|
||||||
|
assert vm.dict == {
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert vm.contains("frontmatter_Key1") is True
|
||||||
|
assert vm.contains("frontmatter_Key2", "article") is True
|
||||||
|
assert vm.contains("frontmatter_Key3") is False
|
||||||
|
assert vm.contains("frontmatter_Key2", "no value") is False
|
||||||
|
assert vm.contains("1$", is_regex=True) is True
|
||||||
|
assert vm.contains("5$", is_regex=True) is False
|
||||||
|
assert vm.contains("tags", r"\d", is_regex=True) is True
|
||||||
|
assert vm.contains("tags", r"^\d", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_metadata_delete() -> None:
|
||||||
|
"""Test delete method."""
|
||||||
|
vm = VaultMetadata()
|
||||||
|
vm.add_metadata(METADATA)
|
||||||
|
assert vm.dict == {
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert vm.delete("no key") is False
|
||||||
|
assert vm.delete("tags", "no value") is False
|
||||||
|
assert vm.delete("tags", "tag 2") is True
|
||||||
|
assert vm.dict["tags"] == ["tag 1", "tag 3"]
|
||||||
|
assert vm.delete("tags") is True
|
||||||
|
assert "tags" not in vm.dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_metadata_rename() -> None:
|
||||||
|
"""Test rename method."""
|
||||||
|
vm = VaultMetadata()
|
||||||
|
vm.add_metadata(METADATA)
|
||||||
|
assert vm.dict == {
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"intext_key": ["intext_key_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value"],
|
||||||
|
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert vm.rename("no key", "new key") is False
|
||||||
|
assert vm.rename("tags", "no tag", "new key") is False
|
||||||
|
assert vm.rename("tags", "tag 2", "new tag") is True
|
||||||
|
assert vm.dict["tags"] == ["new tag", "tag 1", "tag 3"]
|
||||||
|
assert vm.rename("tags", "old_tags") is True
|
||||||
|
assert vm.dict["old_tags"] == ["new tag", "tag 1", "tag 3"]
|
||||||
|
assert "tags" not in vm.dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_frontmatter_create() -> None:
|
||||||
|
"""Test frontmatter creation."""
|
||||||
|
frontmatter = Frontmatter(INLINE_CONTENT)
|
||||||
|
assert frontmatter.dict == {}
|
||||||
|
|
||||||
|
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||||
|
assert frontmatter.dict == {
|
||||||
|
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||||
|
}
|
||||||
|
assert frontmatter.dict_original == {
|
||||||
|
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_frontmatter_contains() -> None:
|
||||||
|
"""Test frontmatter contains."""
|
||||||
|
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||||
|
|
||||||
|
assert frontmatter.contains("frontmatter_Key1") is True
|
||||||
|
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
||||||
|
assert frontmatter.contains("frontmatter_Key3") is False
|
||||||
|
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
||||||
|
|
||||||
|
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
||||||
|
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
||||||
|
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||||
|
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_frontmatter_rename() -> None:
|
||||||
|
"""Test frontmatter rename."""
|
||||||
|
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||||
|
assert frontmatter.dict == {
|
||||||
|
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert frontmatter.rename("no key", "new key") is False
|
||||||
|
assert frontmatter.rename("tags", "no tag", "new key") is False
|
||||||
|
|
||||||
|
assert frontmatter.has_changes() is False
|
||||||
|
assert frontmatter.rename("tags", "tag_2", "new tag") is True
|
||||||
|
|
||||||
|
assert frontmatter.dict["tags"] == ["new tag", "tag_1", "📅/tag_3"]
|
||||||
|
assert frontmatter.rename("tags", "old_tags") is True
|
||||||
|
assert frontmatter.dict["old_tags"] == ["new tag", "tag_1", "📅/tag_3"]
|
||||||
|
assert "tags" not in frontmatter.dict
|
||||||
|
|
||||||
|
assert frontmatter.has_changes() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_frontmatter_delete() -> None:
|
||||||
|
"""Test Frontmatter delete method."""
|
||||||
|
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||||
|
assert frontmatter.dict == {
|
||||||
|
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert frontmatter.delete("no key") is False
|
||||||
|
assert frontmatter.delete("tags", "no value") is False
|
||||||
|
assert frontmatter.delete(r"\d{3}") is False
|
||||||
|
assert frontmatter.has_changes() is False
|
||||||
|
assert frontmatter.delete("tags", "tag_2") is True
|
||||||
|
assert frontmatter.dict["tags"] == ["tag_1", "📅/tag_3"]
|
||||||
|
assert frontmatter.delete("tags") is True
|
||||||
|
assert "tags" not in frontmatter.dict
|
||||||
|
assert frontmatter.has_changes() is True
|
||||||
|
assert frontmatter.delete("shared_key1", r"\w+") is True
|
||||||
|
assert frontmatter.dict["shared_key1"] == []
|
||||||
|
assert frontmatter.delete(r"\w.tter") is True
|
||||||
|
assert frontmatter.dict == {"shared_key1": []}
|
||||||
|
|
||||||
|
|
||||||
|
def test_frontmatter_yaml_conversion():
|
||||||
|
"""Test Frontmatter to_yaml method."""
|
||||||
|
new_frontmatter: str = """\
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: frontmatter_Key1_value
|
||||||
|
frontmatter_Key2:
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
shared_key1: shared_key1_value
|
||||||
|
"""
|
||||||
|
new_frontmatter_sorted: str = """\
|
||||||
|
frontmatter_Key1: frontmatter_Key1_value
|
||||||
|
frontmatter_Key2:
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
shared_key1: shared_key1_value
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
- 📅/tag_3
|
||||||
|
"""
|
||||||
|
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||||
|
assert frontmatter.to_yaml() == new_frontmatter
|
||||||
|
assert frontmatter.to_yaml(sort_keys=True) == new_frontmatter_sorted
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_metadata_create() -> None:
|
||||||
|
"""Test inline metadata creation."""
|
||||||
|
inline = InlineMetadata(FRONTMATTER_CONTENT)
|
||||||
|
assert inline.dict == {}
|
||||||
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
assert inline.dict == {
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
assert inline.dict_original == {
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_contains() -> None:
|
||||||
|
"""Test inline metadata contains method."""
|
||||||
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
|
||||||
|
assert inline.contains("bold_key1") is True
|
||||||
|
assert inline.contains("bold_key2", "bold_key2_value") is True
|
||||||
|
assert inline.contains("bold_key3") is False
|
||||||
|
assert inline.contains("bold_key2", "no value") is False
|
||||||
|
|
||||||
|
assert inline.contains(r"\w{4}_key", is_regex=True) is True
|
||||||
|
assert inline.contains(r"^\d", is_regex=True) is False
|
||||||
|
assert inline.contains("1$", r"\d_value", is_regex=True) is True
|
||||||
|
assert inline.contains("key", r"^\d_value", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_metadata_rename() -> None:
|
||||||
|
"""Test inline metadata rename."""
|
||||||
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
assert inline.dict == {
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert inline.rename("no key", "new key") is False
|
||||||
|
assert inline.rename("repeated_key", "no value", "new key") is False
|
||||||
|
assert inline.has_changes() is False
|
||||||
|
assert inline.rename("repeated_key", "repeated_key_value1", "new value") is True
|
||||||
|
assert inline.dict["repeated_key"] == ["new value", "repeated_key_value2"]
|
||||||
|
assert inline.rename("repeated_key", "old_key") is True
|
||||||
|
assert inline.dict["old_key"] == ["new value", "repeated_key_value2"]
|
||||||
|
assert "repeated_key" not in inline.dict
|
||||||
|
assert inline.has_changes() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_metadata_delete() -> None:
|
||||||
|
"""Test inline metadata delete."""
|
||||||
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
assert inline.dict == {
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert inline.delete("no key") is False
|
||||||
|
assert inline.delete("repeated_key", "no value") is False
|
||||||
|
assert inline.has_changes() is False
|
||||||
|
assert inline.delete("repeated_key", "repeated_key_value1") is True
|
||||||
|
assert inline.dict["repeated_key"] == ["repeated_key_value2"]
|
||||||
|
assert inline.delete("repeated_key") is True
|
||||||
|
assert "repeated_key" not in inline.dict
|
||||||
|
assert inline.has_changes() is True
|
||||||
|
assert inline.delete(r"\d{3}") is False
|
||||||
|
assert inline.delete(r"bold_key\d") is True
|
||||||
|
assert inline.dict == {
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
assert inline.delete("emoji_📅_key", ".*📅.*") is True
|
||||||
|
assert inline.dict == {
|
||||||
|
"emoji_📅_key": [],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_tags_create() -> None:
|
||||||
|
"""Test inline tags creation."""
|
||||||
|
tags = InlineTags(FRONTMATTER_CONTENT)
|
||||||
|
tags.metadata_key
|
||||||
|
assert tags.list == []
|
||||||
|
|
||||||
|
tags = InlineTags(INLINE_CONTENT)
|
||||||
|
assert tags.list == [
|
||||||
|
"bold_tag",
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
assert tags.list_original == [
|
||||||
|
"bold_tag",
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_tags_contains() -> None:
|
||||||
|
"""Test inline tags contains."""
|
||||||
|
tags = InlineTags(INLINE_CONTENT)
|
||||||
|
assert tags.contains("bold_tag") is True
|
||||||
|
assert tags.contains("no tag") is False
|
||||||
|
|
||||||
|
assert tags.contains(r"\w_\w", is_regex=True) is True
|
||||||
|
assert tags.contains(r"\d_\d", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_tags_rename() -> None:
|
||||||
|
"""Test inline tags rename."""
|
||||||
|
tags = InlineTags(INLINE_CONTENT)
|
||||||
|
assert tags.list == [
|
||||||
|
"bold_tag",
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert tags.rename("no tag", "new tag") is False
|
||||||
|
assert tags.has_changes() is False
|
||||||
|
assert tags.rename("bold_tag", "new tag") is True
|
||||||
|
assert tags.list == [
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"new tag",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
assert tags.has_changes() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_tags_delete() -> None:
|
||||||
|
"""Test inline tags delete."""
|
||||||
|
tags = InlineTags(INLINE_CONTENT)
|
||||||
|
assert tags.list == [
|
||||||
|
"bold_tag",
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert tags.delete("no tag") is False
|
||||||
|
assert tags.has_changes() is False
|
||||||
|
assert tags.delete("bold_tag") is True
|
||||||
|
assert tags.list == [
|
||||||
|
"in_text_tag",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"tag_key_value",
|
||||||
|
]
|
||||||
|
assert tags.has_changes() is True
|
||||||
|
assert tags.delete(r"\d{3}") is False
|
||||||
|
assert tags.delete(r"inline_tag_top\d") is True
|
||||||
|
assert tags.list == ["in_text_tag", "tag_key_value"]
|
||||||
358
tests/notes_test.py
Normal file
358
tests/notes_test.py
Normal file
@@ -0,0 +1,358 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test notes.py."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from obsidian_metadata.models.notes import Note
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
|
||||||
|
def test_note_not_exists() -> None:
|
||||||
|
"""Test target not found."""
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
note = Note(note_path="nonexistent_file.md")
|
||||||
|
|
||||||
|
assert note.note_path == "tests/test_data/test_note.md"
|
||||||
|
assert note.file_content == "This is a test note."
|
||||||
|
assert note.frontmatter == {}
|
||||||
|
assert note.inline_tags == []
|
||||||
|
assert note.inline_metadata == {}
|
||||||
|
assert note.dry_run is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_note_create(sample_note) -> None:
|
||||||
|
"""Test creating note class."""
|
||||||
|
note = Note(note_path=sample_note, dry_run=True)
|
||||||
|
assert note.note_path == Path(sample_note)
|
||||||
|
|
||||||
|
assert note.dry_run is True
|
||||||
|
assert "Lorem ipsum dolor" in note.file_content
|
||||||
|
assert note.frontmatter.dict == {
|
||||||
|
"date_created": ["2022-12-22"],
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value1"],
|
||||||
|
"tags": [
|
||||||
|
"frontmatter_tag1",
|
||||||
|
"frontmatter_tag2",
|
||||||
|
"shared_tag",
|
||||||
|
"📅/frontmatter_tag3",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert note.inline_tags.list == [
|
||||||
|
"inline_tag_bottom1",
|
||||||
|
"inline_tag_bottom2",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"intext_tag1",
|
||||||
|
"intext_tag2",
|
||||||
|
"shared_tag",
|
||||||
|
]
|
||||||
|
assert note.inline_metadata.dict == {
|
||||||
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"intext_key": ["intext_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value2"],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
|
}
|
||||||
|
|
||||||
|
with sample_note.open():
|
||||||
|
content = sample_note.read_text()
|
||||||
|
|
||||||
|
assert note.file_content == content
|
||||||
|
assert note.original_file_content == content
|
||||||
|
|
||||||
|
|
||||||
|
def test_append(sample_note) -> None:
|
||||||
|
"""Test appending to note."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert note.dry_run is False
|
||||||
|
|
||||||
|
string = "This is a test string."
|
||||||
|
string2 = "Lorem ipsum dolor sit"
|
||||||
|
|
||||||
|
note.append(string_to_append=string)
|
||||||
|
assert string in note.file_content
|
||||||
|
assert len(re.findall(re.escape(string), note.file_content)) == 1
|
||||||
|
|
||||||
|
note.append(string_to_append=string)
|
||||||
|
assert string in note.file_content
|
||||||
|
assert len(re.findall(re.escape(string), note.file_content)) == 1
|
||||||
|
|
||||||
|
note.append(string_to_append=string, allow_multiple=True)
|
||||||
|
assert string in note.file_content
|
||||||
|
assert len(re.findall(re.escape(string), note.file_content)) == 2
|
||||||
|
|
||||||
|
note.append(string_to_append=string2)
|
||||||
|
assert string2 in note.file_content
|
||||||
|
assert len(re.findall(re.escape(string2), note.file_content)) == 1
|
||||||
|
|
||||||
|
note.append(string_to_append=string2, allow_multiple=True)
|
||||||
|
assert string2 in note.file_content
|
||||||
|
assert len(re.findall(re.escape(string2), note.file_content)) == 2
|
||||||
|
|
||||||
|
|
||||||
|
def test_contains_inline_tag(sample_note) -> None:
|
||||||
|
"""Test contains inline tag."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert note.contains_inline_tag("intext_tag1") is True
|
||||||
|
assert note.contains_inline_tag("nonexistent_tag") is False
|
||||||
|
assert note.contains_inline_tag(r"\d$", is_regex=True) is True
|
||||||
|
assert note.contains_inline_tag(r"^\d", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_contains_metadata(sample_note) -> None:
|
||||||
|
"""Test contains metadata."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.contains_metadata("no key") is False
|
||||||
|
assert note.contains_metadata("frontmatter_Key2") is True
|
||||||
|
assert note.contains_metadata(r"^\d", is_regex=True) is False
|
||||||
|
assert note.contains_metadata(r"^[\w_]+\d", is_regex=True) is True
|
||||||
|
assert note.contains_metadata("frontmatter_Key2", "no value") is False
|
||||||
|
assert note.contains_metadata("frontmatter_Key2", "article") is True
|
||||||
|
assert note.contains_metadata("bottom_key1", "bottom_key1_value") is True
|
||||||
|
assert note.contains_metadata(r"bottom_key\d$", r"bottom_key\d_value", is_regex=True) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_inline_metadata(sample_note) -> None:
|
||||||
|
"""Test deleting inline metadata."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
note._delete_inline_metadata("nonexistent_key")
|
||||||
|
assert note.file_content == note.original_file_content
|
||||||
|
note._delete_inline_metadata("frontmatter_Key1")
|
||||||
|
assert note.file_content == note.original_file_content
|
||||||
|
|
||||||
|
note._delete_inline_metadata("intext_key")
|
||||||
|
assert note.file_content == Regex(r"dolore eu fugiat", re.DOTALL)
|
||||||
|
|
||||||
|
note._delete_inline_metadata("bottom_key2", "bottom_key2_value")
|
||||||
|
assert note.file_content != Regex(r"bottom_key2_value")
|
||||||
|
assert note.file_content == Regex(r"bottom_key2::")
|
||||||
|
note._delete_inline_metadata("bottom_key1")
|
||||||
|
assert note.file_content != Regex(r"bottom_key1::")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_inline_tag(sample_note) -> None:
|
||||||
|
"""Test deleting inline tags."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.delete_inline_tag("not_a_tag") is False
|
||||||
|
assert note.delete_inline_tag("intext_tag[1]") is True
|
||||||
|
assert "intext_tag1" not in note.inline_tags.list
|
||||||
|
assert note.file_content == Regex("consequat. Duis")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_metadata(sample_note) -> Note:
|
||||||
|
"""Test deleting metadata."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.delete_metadata("nonexistent_key") is False
|
||||||
|
assert note.delete_metadata("frontmatter_Key1", "no value") is False
|
||||||
|
assert note.delete_metadata("frontmatter_Key1") is True
|
||||||
|
assert "frontmatter_Key1" not in note.frontmatter.dict
|
||||||
|
|
||||||
|
assert note.delete_metadata("frontmatter_Key2", "article") is True
|
||||||
|
assert note.frontmatter.dict["frontmatter_Key2"] == ["note"]
|
||||||
|
|
||||||
|
assert note.delete_metadata("bottom_key1", "bottom_key1_value") is True
|
||||||
|
assert note.inline_metadata.dict["bottom_key1"] == []
|
||||||
|
assert note.file_content == Regex(r"bottom_key1::\n")
|
||||||
|
|
||||||
|
assert note.delete_metadata("bottom_key2") is True
|
||||||
|
assert "bottom_key2" not in note.inline_metadata.dict
|
||||||
|
assert note.file_content != Regex(r"bottom_key2")
|
||||||
|
|
||||||
|
|
||||||
|
def test_has_changes(sample_note) -> None:
|
||||||
|
"""Test has changes."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.has_changes() is False
|
||||||
|
note.append("This is a test string.")
|
||||||
|
assert note.has_changes() is True
|
||||||
|
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert note.has_changes() is False
|
||||||
|
note.delete_metadata("frontmatter_Key1")
|
||||||
|
assert note.has_changes() is True
|
||||||
|
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert note.has_changes() is False
|
||||||
|
note.delete_metadata("bottom_key2")
|
||||||
|
assert note.has_changes() is True
|
||||||
|
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert note.has_changes() is False
|
||||||
|
note.delete_inline_tag("intext_tag1")
|
||||||
|
assert note.has_changes() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_print_note(sample_note, capsys) -> None:
|
||||||
|
"""Test printing note."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
note.print_note()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "```python" in captured.out
|
||||||
|
assert "---" in captured.out
|
||||||
|
assert "#shared_tag" in captured.out
|
||||||
|
|
||||||
|
|
||||||
|
def test_print_diff(sample_note, capsys) -> None:
|
||||||
|
"""Test printing diff."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
note.print_diff()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == ""
|
||||||
|
|
||||||
|
note.append("This is a test string.")
|
||||||
|
note.print_diff()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "+ This is a test string." in captured.out
|
||||||
|
|
||||||
|
note.sub("The quick brown fox", "The quick brown hedgehog")
|
||||||
|
note.print_diff()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert "- The quick brown fox" in captured.out
|
||||||
|
assert "+ The quick brown hedgehog" in captured.out
|
||||||
|
|
||||||
|
|
||||||
|
def test_sub(sample_note) -> None:
|
||||||
|
"""Test substituting text in a note."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
note.sub("#shared_tag", "#unshared_tags", is_regex=True)
|
||||||
|
assert note.file_content != Regex(r"#shared_tag")
|
||||||
|
assert note.file_content == Regex(r"#unshared_tags")
|
||||||
|
|
||||||
|
note.sub(" ut ", "")
|
||||||
|
assert note.file_content != Regex(r" ut ")
|
||||||
|
assert note.file_content == Regex(r"laboriosam, nisialiquid ex ea")
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_inline_tag(sample_note) -> None:
|
||||||
|
"""Test renaming an inline tag."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.rename_inline_tag("no_note_tag", "intext_tag2") is False
|
||||||
|
assert note.rename_inline_tag("intext_tag1", "intext_tag26") is True
|
||||||
|
assert note.inline_tags.list == [
|
||||||
|
"inline_tag_bottom1",
|
||||||
|
"inline_tag_bottom2",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"intext_tag2",
|
||||||
|
"intext_tag26",
|
||||||
|
"shared_tag",
|
||||||
|
]
|
||||||
|
assert note.file_content == Regex(r"#intext_tag26")
|
||||||
|
assert note.file_content != Regex(r"#intext_tag1")
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_inline_metadata(sample_note) -> None:
|
||||||
|
"""Test renaming inline metadata."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
note._rename_inline_metadata("nonexistent_key", "new_key")
|
||||||
|
assert note.file_content == note.original_file_content
|
||||||
|
note._rename_inline_metadata("bottom_key1", "no_value", "new_value")
|
||||||
|
assert note.file_content == note.original_file_content
|
||||||
|
|
||||||
|
note._rename_inline_metadata("bottom_key1", "new_key")
|
||||||
|
assert note.file_content != Regex(r"bottom_key1::")
|
||||||
|
assert note.file_content == Regex(r"new_key::")
|
||||||
|
|
||||||
|
note._rename_inline_metadata("emoji_📅_key", "emoji_📅_key_value", "new_value")
|
||||||
|
assert note.file_content != Regex(r"emoji_📅_key:: ?emoji_📅_key_value")
|
||||||
|
assert note.file_content == Regex(r"emoji_📅_key:: ?new_value")
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_metadata(sample_note) -> None:
|
||||||
|
"""Test renaming metadata."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
assert note.rename_metadata("nonexistent_key", "new_key") is False
|
||||||
|
assert note.rename_metadata("frontmatter_Key1", "nonexistent_value", "article") is False
|
||||||
|
|
||||||
|
assert note.rename_metadata("frontmatter_Key1", "new_key") is True
|
||||||
|
assert "frontmatter_Key1" not in note.frontmatter.dict
|
||||||
|
assert "new_key" in note.frontmatter.dict
|
||||||
|
assert note.frontmatter.dict["new_key"] == ["author name"]
|
||||||
|
assert note.file_content == Regex(r"new_key: author name")
|
||||||
|
|
||||||
|
assert note.rename_metadata("frontmatter_Key2", "article", "new_key") is True
|
||||||
|
assert note.frontmatter.dict["frontmatter_Key2"] == ["new_key", "note"]
|
||||||
|
assert note.file_content == Regex(r" - new_key")
|
||||||
|
assert note.file_content != Regex(r" - article")
|
||||||
|
|
||||||
|
assert note.rename_metadata("bottom_key1", "new_key") is True
|
||||||
|
assert "bottom_key1" not in note.inline_metadata.dict
|
||||||
|
assert "new_key" in note.inline_metadata.dict
|
||||||
|
assert note.file_content == Regex(r"new_key:: bottom_key1_value")
|
||||||
|
|
||||||
|
assert note.rename_metadata("new_key", "bottom_key1_value", "new_value") is True
|
||||||
|
assert note.inline_metadata.dict["new_key"] == ["new_value"]
|
||||||
|
assert note.file_content == Regex(r"new_key:: new_value")
|
||||||
|
|
||||||
|
|
||||||
|
def test_replace_frontmatter(sample_note) -> None:
|
||||||
|
"""Test replacing frontmatter."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
|
note.rename_metadata("frontmatter_Key1", "author name", "some_new_key_here")
|
||||||
|
note.replace_frontmatter()
|
||||||
|
new_frontmatter = """---
|
||||||
|
date_created: '2022-12-22'
|
||||||
|
tags:
|
||||||
|
- frontmatter_tag1
|
||||||
|
- frontmatter_tag2
|
||||||
|
- shared_tag
|
||||||
|
- 📅/frontmatter_tag3
|
||||||
|
frontmatter_Key1: some_new_key_here
|
||||||
|
frontmatter_Key2:
|
||||||
|
- article
|
||||||
|
- note
|
||||||
|
shared_key1: shared_key1_value
|
||||||
|
shared_key2: shared_key2_value1
|
||||||
|
---"""
|
||||||
|
assert new_frontmatter in note.file_content
|
||||||
|
assert "# Heading 1" in note.file_content
|
||||||
|
assert "```python" in note.file_content
|
||||||
|
|
||||||
|
note2 = Note(note_path="tests/fixtures/test_vault/no_metadata.md")
|
||||||
|
note2.replace_frontmatter()
|
||||||
|
note2.frontmatter.dict = {"key1": "value1", "key2": "value2"}
|
||||||
|
note2.replace_frontmatter()
|
||||||
|
new_frontmatter = """---
|
||||||
|
key1: value1
|
||||||
|
key2: value2
|
||||||
|
---"""
|
||||||
|
assert new_frontmatter in note2.file_content
|
||||||
|
assert "Lorem ipsum dolor sit amet" in note2.file_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_write(sample_note, tmp_path) -> None:
|
||||||
|
"""Test writing note to file."""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
note.sub(pattern="Heading 1", replacement="Heading 2")
|
||||||
|
|
||||||
|
note.write()
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
assert "Heading 2" in note.file_content
|
||||||
|
assert "Heading 1" not in note.file_content
|
||||||
|
|
||||||
|
new_path = Path(tmp_path / "new_note.md")
|
||||||
|
note.write(new_path)
|
||||||
|
note2 = Note(note_path=new_path)
|
||||||
|
assert "Heading 2" in note2.file_content
|
||||||
|
assert "Heading 1" not in note2.file_content
|
||||||
112
tests/patterns_test.py
Normal file
112
tests/patterns_test.py
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Tests for the regex module."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from obsidian_metadata.models.patterns import Patterns
|
||||||
|
|
||||||
|
TAG_CONTENT: str = "#1 #2 **#3** [[#4]] [[#5|test]] #6#notag #7_8 #9/10 #11-12 #13; #14, #15. #16: #17* #18(#19) #20[#21] #22\\ #23& #24# #25 **#26** #📅/tag"
|
||||||
|
INLINE_METADATA: str = """
|
||||||
|
**1:: 1**
|
||||||
|
2_2:: [[2_2]] | 2
|
||||||
|
asdfasdf [3:: 3] asdfasdf [7::7] asdf
|
||||||
|
[4:: 4] [5:: 5]
|
||||||
|
> 6:: 6
|
||||||
|
**8**:: **8**
|
||||||
|
10::
|
||||||
|
📅11:: 11/📅/11
|
||||||
|
emoji_📅_key:: 📅emoji_📅_key_value
|
||||||
|
"""
|
||||||
|
FRONTMATTER_CONTENT: str = """
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: 'shared_key1_value'
|
||||||
|
---
|
||||||
|
more content
|
||||||
|
|
||||||
|
---
|
||||||
|
horizontal: rule
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
CORRECT_FRONTMATTER_WITH_SEPARATORS: str = """---
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: 'shared_key1_value'
|
||||||
|
---"""
|
||||||
|
CORRECT_FRONTMATTER_NO_SEPARATORS: str = """
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: 'shared_key1_value'
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
def test_regex():
|
||||||
|
"""Test regexes."""
|
||||||
|
pattern = Patterns()
|
||||||
|
|
||||||
|
assert pattern.find_inline_tags.findall(TAG_CONTENT) == [
|
||||||
|
"1",
|
||||||
|
"2",
|
||||||
|
"3",
|
||||||
|
"4",
|
||||||
|
"5",
|
||||||
|
"6",
|
||||||
|
"7_8",
|
||||||
|
"9/10",
|
||||||
|
"11-12",
|
||||||
|
"13",
|
||||||
|
"14",
|
||||||
|
"15",
|
||||||
|
"16",
|
||||||
|
"17",
|
||||||
|
"18",
|
||||||
|
"19",
|
||||||
|
"20",
|
||||||
|
"21",
|
||||||
|
"22",
|
||||||
|
"23",
|
||||||
|
"24",
|
||||||
|
"25",
|
||||||
|
"26",
|
||||||
|
"📅/tag",
|
||||||
|
]
|
||||||
|
|
||||||
|
result = pattern.find_inline_metadata.findall(INLINE_METADATA)
|
||||||
|
assert result == [
|
||||||
|
("", "", "1", "1**"),
|
||||||
|
("", "", "2_2", "[[2_2]] | 2"),
|
||||||
|
("3", "3", "", ""),
|
||||||
|
("7", "7", "", ""),
|
||||||
|
("", "", "4", "4] [5:: 5]"),
|
||||||
|
("", "", "8**", "**8**"),
|
||||||
|
("", "", "11", "11/📅/11"),
|
||||||
|
("", "", "emoji_📅_key", "📅emoji_📅_key_value"),
|
||||||
|
]
|
||||||
|
|
||||||
|
found = pattern.frontmatt_block_with_separators.search(FRONTMATTER_CONTENT).group("frontmatter")
|
||||||
|
assert found == CORRECT_FRONTMATTER_WITH_SEPARATORS
|
||||||
|
|
||||||
|
found = pattern.frontmatt_block_no_separators.search(FRONTMATTER_CONTENT).group("frontmatter")
|
||||||
|
assert found == CORRECT_FRONTMATTER_NO_SEPARATORS
|
||||||
|
|
||||||
|
with pytest.raises(AttributeError):
|
||||||
|
pattern.frontmatt_block_no_separators.search(TAG_CONTENT).group("frontmatter")
|
||||||
|
|
||||||
|
assert pattern.validate_tag_text.search("test_tag") is None
|
||||||
|
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
||||||
116
tests/utilities_test.py
Normal file
116
tests/utilities_test.py
Normal file
@@ -0,0 +1,116 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test the utilities module."""
|
||||||
|
|
||||||
|
|
||||||
|
from obsidian_metadata._utils import (
|
||||||
|
clean_dictionary,
|
||||||
|
dict_contains,
|
||||||
|
dict_values_to_lists_strings,
|
||||||
|
remove_markdown_sections,
|
||||||
|
vault_validation,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains() -> None:
|
||||||
|
"""Test dict_contains."""
|
||||||
|
d = {"key1": ["value1", "value2"], "key2": ["value3", "value4"], "key3": ["value5", "value6"]}
|
||||||
|
|
||||||
|
assert dict_contains(d, "key1") is True
|
||||||
|
assert dict_contains(d, "key5") is False
|
||||||
|
assert dict_contains(d, "key1", "value1") is True
|
||||||
|
assert dict_contains(d, "key1", "value5") is False
|
||||||
|
assert dict_contains(d, "key[1-2]", is_regex=True) is True
|
||||||
|
assert dict_contains(d, "^1", is_regex=True) is False
|
||||||
|
assert dict_contains(d, r"key\d", r"value\d", is_regex=True) is True
|
||||||
|
assert dict_contains(d, "key1$", "^alue", is_regex=True) is False
|
||||||
|
assert dict_contains(d, r"key\d", "value5", is_regex=True) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_values_to_lists_strings():
|
||||||
|
"""Test converting dictionary values to lists of strings."""
|
||||||
|
dictionary = {
|
||||||
|
"key1": "value1",
|
||||||
|
"key2": ["value2", "value3", None],
|
||||||
|
"key3": {"key4": "value4"},
|
||||||
|
"key5": {"key6": {"key7": "value7"}},
|
||||||
|
"key6": None,
|
||||||
|
"key8": [1, 3, None, 4],
|
||||||
|
"key9": [None, "", "None"],
|
||||||
|
"key10": "None",
|
||||||
|
"key11": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
result = dict_values_to_lists_strings(dictionary)
|
||||||
|
assert result == {
|
||||||
|
"key1": ["value1"],
|
||||||
|
"key10": ["None"],
|
||||||
|
"key11": [""],
|
||||||
|
"key2": ["None", "value2", "value3"],
|
||||||
|
"key3": {"key4": ["value4"]},
|
||||||
|
"key5": {"key6": {"key7": ["value7"]}},
|
||||||
|
"key6": ["None"],
|
||||||
|
"key8": ["1", "3", "4", "None"],
|
||||||
|
"key9": ["", "None", "None"],
|
||||||
|
}
|
||||||
|
|
||||||
|
result = dict_values_to_lists_strings(dictionary, strip_null_values=True)
|
||||||
|
assert result == {
|
||||||
|
"key1": ["value1"],
|
||||||
|
"key10": [],
|
||||||
|
"key11": [],
|
||||||
|
"key2": ["value2", "value3"],
|
||||||
|
"key3": {"key4": ["value4"]},
|
||||||
|
"key5": {"key6": {"key7": ["value7"]}},
|
||||||
|
"key6": [],
|
||||||
|
"key8": ["1", "3", "4"],
|
||||||
|
"key9": ["", "None"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_validation():
|
||||||
|
"""Test vault validation."""
|
||||||
|
assert vault_validation("tests/") is True
|
||||||
|
assert "Path is not a directory" in vault_validation("pyproject.toml")
|
||||||
|
assert "Path does not exist" in vault_validation("tests/vault2")
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_markdown_sections():
|
||||||
|
"""Test removing markdown sections."""
|
||||||
|
text: str = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
Lorem ipsum `dolor sit` amet.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "Hello World"
|
||||||
|
```
|
||||||
|
---
|
||||||
|
dd
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
result = remove_markdown_sections(
|
||||||
|
text,
|
||||||
|
strip_codeblocks=True,
|
||||||
|
strip_frontmatter=True,
|
||||||
|
strip_inlinecode=True,
|
||||||
|
)
|
||||||
|
assert "```bash" not in result
|
||||||
|
assert "`dolor sit`" not in result
|
||||||
|
assert "---\nkey: value" not in result
|
||||||
|
assert "`" not in result
|
||||||
|
|
||||||
|
result = remove_markdown_sections(text)
|
||||||
|
assert "```bash" in result
|
||||||
|
assert "`dolor sit`" in result
|
||||||
|
assert "---\nkey: value" in result
|
||||||
|
assert "`" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary():
|
||||||
|
"""Test cleaning a dictionary."""
|
||||||
|
dictionary = {" *key* ": ["**value**", "[[value2]]", "#value3"]}
|
||||||
|
|
||||||
|
new_dict = clean_dictionary(dictionary)
|
||||||
|
assert new_dict == {"key": ["value", "value2", "value3"]}
|
||||||
248
tests/vault_test.py
Normal file
248
tests/vault_test.py
Normal file
@@ -0,0 +1,248 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Tests for the Vault module."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from obsidian_metadata._config import Config
|
||||||
|
from obsidian_metadata.models import Vault
|
||||||
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
|
||||||
|
def test_vault_creation(test_vault):
|
||||||
|
"""Test creating a Vault object."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.vault_path == vault_path
|
||||||
|
assert vault.backup_path == Path(f"{vault_path}.bak")
|
||||||
|
assert vault.new_vault_path == Path(f"{vault_path}.new")
|
||||||
|
assert vault.dry_run is False
|
||||||
|
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
||||||
|
assert vault.num_notes() == 2
|
||||||
|
|
||||||
|
assert vault.metadata.dict == {
|
||||||
|
"Inline Tags": [
|
||||||
|
"inline_tag_bottom1",
|
||||||
|
"inline_tag_bottom2",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"intext_tag1",
|
||||||
|
"intext_tag2",
|
||||||
|
"shared_tag",
|
||||||
|
],
|
||||||
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
|
"date_created": ["2022-12-22"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"frontmatter_Key1": ["author name"],
|
||||||
|
"frontmatter_Key2": ["article", "note"],
|
||||||
|
"intext_key": ["intext_value"],
|
||||||
|
"shared_key1": ["shared_key1_value"],
|
||||||
|
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||||
|
"tags": [
|
||||||
|
"frontmatter_tag1",
|
||||||
|
"frontmatter_tag2",
|
||||||
|
"shared_tag",
|
||||||
|
"📅/frontmatter_tag3",
|
||||||
|
],
|
||||||
|
"top_key1": ["top_key1_value"],
|
||||||
|
"top_key2": ["top_key2_value"],
|
||||||
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes(sample_vault) -> None:
|
||||||
|
"""Test filtering notes."""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config, path_filter="front")
|
||||||
|
|
||||||
|
assert vault.num_notes() == 4
|
||||||
|
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault2 = Vault(config=config, path_filter="mixed")
|
||||||
|
|
||||||
|
assert vault2.num_notes() == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_backup(test_vault, capsys):
|
||||||
|
"""Test backing up the vault."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config, dry_run=False)
|
||||||
|
|
||||||
|
vault.backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert Path(f"{vault_path}.bak").exists() is True
|
||||||
|
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
||||||
|
|
||||||
|
|
||||||
|
def test_backup_dryrun(test_vault, capsys):
|
||||||
|
"""Test backing up the vault."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config, dry_run=True)
|
||||||
|
|
||||||
|
print(f"vault.dry_run: {vault.dry_run}")
|
||||||
|
vault.backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert vault.backup_path.exists() is False
|
||||||
|
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_backup(test_vault, capsys):
|
||||||
|
"""Test deleting the vault backup."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config, dry_run=False)
|
||||||
|
|
||||||
|
vault.backup()
|
||||||
|
vault.delete_backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup deleted")
|
||||||
|
assert vault.backup_path.exists() is False
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_backup_dryrun(test_vault, capsys):
|
||||||
|
"""Test deleting the vault backup."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config, dry_run=True)
|
||||||
|
|
||||||
|
Path.mkdir(vault.backup_path)
|
||||||
|
vault.delete_backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"DRYRUN +| Delete backup")
|
||||||
|
assert vault.backup_path.exists() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_info(test_vault, capsys):
|
||||||
|
"""Test printing vault information."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Vault +\│ /[\d\w]+")
|
||||||
|
assert captured.out == Regex(r"Notes being edited +\│ \d+")
|
||||||
|
assert captured.out == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
|
def test_contains_inline_tag(test_vault) -> None:
|
||||||
|
"""Test if the vault contains an inline tag."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.contains_inline_tag("tag") is False
|
||||||
|
assert vault.contains_inline_tag("intext_tag2") is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_contains_metadata(test_vault) -> None:
|
||||||
|
"""Test if the vault contains a metadata key."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.contains_metadata("key") is False
|
||||||
|
assert vault.contains_metadata("top_key1") is True
|
||||||
|
assert vault.contains_metadata("top_key1", "no_value") is False
|
||||||
|
assert vault.contains_metadata("top_key1", "top_key1_value") is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_inline_tag(test_vault) -> None:
|
||||||
|
"""Test deleting an inline tag."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.delete_inline_tag("no tag") is False
|
||||||
|
assert vault.delete_inline_tag("intext_tag2") is True
|
||||||
|
assert vault.metadata.dict["Inline Tags"] == [
|
||||||
|
"inline_tag_bottom1",
|
||||||
|
"inline_tag_bottom2",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"intext_tag1",
|
||||||
|
"shared_tag",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_metadata(test_vault) -> None:
|
||||||
|
"""Test deleting a metadata key/value."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.delete_metadata("no key") == 0
|
||||||
|
assert vault.delete_metadata("top_key1", "no_value") == 0
|
||||||
|
|
||||||
|
assert vault.delete_metadata("top_key1", "top_key1_value") == 1
|
||||||
|
assert vault.metadata.dict["top_key1"] == []
|
||||||
|
|
||||||
|
assert vault.delete_metadata("top_key2") == 1
|
||||||
|
assert "top_key2" not in vault.metadata.dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_inline_tag(test_vault) -> None:
|
||||||
|
"""Test renaming an inline tag."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.rename_inline_tag("no tag", "new_tag") is False
|
||||||
|
assert vault.rename_inline_tag("intext_tag2", "new_tag") is True
|
||||||
|
assert vault.metadata.dict["Inline Tags"] == [
|
||||||
|
"inline_tag_bottom1",
|
||||||
|
"inline_tag_bottom2",
|
||||||
|
"inline_tag_top1",
|
||||||
|
"inline_tag_top2",
|
||||||
|
"intext_tag1",
|
||||||
|
"new_tag",
|
||||||
|
"shared_tag",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_metadata(test_vault) -> None:
|
||||||
|
"""Test renaming a metadata key/value."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault = Vault(config=config)
|
||||||
|
|
||||||
|
assert vault.rename_metadata("no key", "new_key") is False
|
||||||
|
assert vault.rename_metadata("tags", "nonexistent_value", "new_vaule") is False
|
||||||
|
|
||||||
|
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") is True
|
||||||
|
assert vault.metadata.dict["tags"] == [
|
||||||
|
"frontmatter_tag2",
|
||||||
|
"new_vaule",
|
||||||
|
"shared_tag",
|
||||||
|
"📅/frontmatter_tag3",
|
||||||
|
]
|
||||||
|
|
||||||
|
assert vault.rename_metadata("tags", "new_key") is True
|
||||||
|
assert "tags" not in vault.metadata.dict
|
||||||
|
assert vault.metadata.dict["new_key"] == [
|
||||||
|
"frontmatter_tag2",
|
||||||
|
"new_vaule",
|
||||||
|
"shared_tag",
|
||||||
|
"📅/frontmatter_tag3",
|
||||||
|
]
|
||||||
Reference in New Issue
Block a user