mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-16 00:43:48 -05:00
Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8968127c95 | ||
|
|
4bf1acb775 | ||
|
|
98fa996462 | ||
|
|
fdb1b8b5bc | ||
|
|
08999cb055 | ||
|
|
4e053bda29 | ||
|
|
fa568de369 | ||
|
|
696e19f3e2 | ||
|
|
7b762f1a11 | ||
|
|
c1a40ed8a4 | ||
|
|
6f14076e33 | ||
|
|
ca42823a2f | ||
|
|
36adfece51 | ||
|
|
d636fb2672 | ||
|
|
593dbc3b55 | ||
|
|
009801a691 | ||
|
|
2493db5f23 | ||
|
|
a2d69d034d | ||
|
|
556acc0d46 | ||
|
|
8cefca2639 | ||
|
|
82e1cba34a | ||
|
|
7f431353e1 | ||
|
|
4e49445b08 | ||
|
|
5f9c79a9c1 | ||
|
|
34e7c07dd9 | ||
|
|
32a838c8e4 | ||
|
|
000ac1a16c | ||
|
|
1eb2d30d47 | ||
|
|
b6a3d115fd | ||
|
|
03e6ad59c4 | ||
|
|
0b744f65ee | ||
|
|
bf869cfc15 | ||
|
|
bd4b94aefa | ||
|
|
3932717c7e | ||
|
|
755151e2ed | ||
|
|
8f8174a902 | ||
|
|
3bbcf3a987 | ||
|
|
347dd4271f | ||
|
|
167997f527 | ||
|
|
0143967db8 | ||
|
|
446374b335 | ||
|
|
401d830942 | ||
|
|
7eb8ff5fa8 |
4
.github/workflows/automated-tests.yml
vendored
4
.github/workflows/automated-tests.yml
vendored
@@ -38,7 +38,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.10", "3.11"]
|
||||
steps:
|
||||
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
- uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
disable-sudo: true
|
||||
@@ -67,7 +67,7 @@ jobs:
|
||||
- name: Lint with Mypy
|
||||
run: poetry run mypy src/
|
||||
- name: lint with ruff
|
||||
run: poetry run ruff --extend-ignore=I001,D301,D401,PLR2004,PLR0913 src/
|
||||
run: poetry run ruff --extend-ignore=I001,D301,D401 src/
|
||||
- name: check pyproject.toml
|
||||
run: poetry run poetry check
|
||||
|
||||
|
||||
9
.github/workflows/commit-linter.yml
vendored
9
.github/workflows/commit-linter.yml
vendored
@@ -2,11 +2,14 @@
|
||||
name: Commit Linter
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
permissions: # added using https://github.com/step-security/secure-workflows
|
||||
contents: read
|
||||
@@ -20,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
|
||||
2
.github/workflows/create-release.yml
vendored
2
.github/workflows/create-release.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
- uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
disable-sudo: true
|
||||
|
||||
13
.github/workflows/devcontainer-checker.yml
vendored
13
.github/workflows/devcontainer-checker.yml
vendored
@@ -3,14 +3,17 @@ name: "Dev Container Checker"
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
types: [opened, reopened]
|
||||
push:
|
||||
paths:
|
||||
- ".devcontainer/**"
|
||||
- ".github/workflows/devcontainer-checker.yml"
|
||||
branches:
|
||||
- main
|
||||
push:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
paths:
|
||||
- ".devcontainer/**"
|
||||
- ".github/workflows/devcontainer-checker.yml"
|
||||
@@ -24,7 +27,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
- uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
@@ -55,7 +58,7 @@ jobs:
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Build and run dev container task
|
||||
uses: devcontainers/ci@v0.2
|
||||
uses: devcontainers/ci@v0.3
|
||||
with:
|
||||
runCmd: |
|
||||
poe lint
|
||||
|
||||
2
.github/workflows/labeler.yml
vendored
2
.github/workflows/labeler.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
|
||||
2
.github/workflows/pr-linter.yml
vendored
2
.github/workflows/pr-linter.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Harden Runner
|
||||
uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
allowed-endpoints: >
|
||||
|
||||
2
.github/workflows/pypi-release.yml
vendored
2
.github/workflows/pypi-release.yml
vendored
@@ -18,7 +18,7 @@ jobs:
|
||||
matrix:
|
||||
python-version: ["3.11"]
|
||||
steps:
|
||||
- uses: step-security/harden-runner@18bf8ad2ca49c14cbb28b91346d626ccfb00c518 # v2.1.0
|
||||
- uses: step-security/harden-runner@c8454efe5d0bdefd25384362fe217428ca277d57 # v2.2.0
|
||||
with:
|
||||
egress-policy: block
|
||||
disable-sudo: true
|
||||
|
||||
@@ -5,7 +5,7 @@ default_stages: [commit, manual]
|
||||
fail_fast: true
|
||||
repos:
|
||||
- repo: "https://github.com/commitizen-tools/commitizen"
|
||||
rev: v2.40.0
|
||||
rev: v2.42.1
|
||||
hooks:
|
||||
- id: commitizen
|
||||
- id: commitizen-branch
|
||||
@@ -61,10 +61,10 @@ repos:
|
||||
entry: yamllint --strict --config-file .yamllint.yml
|
||||
|
||||
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
||||
rev: "v0.0.240"
|
||||
rev: "v0.0.257"
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--extend-ignore", "I001,D301,D401,PLR2004,PLR0913"]
|
||||
args: ["--extend-ignore", "I001,D301,D401"]
|
||||
exclude: tests/
|
||||
|
||||
- repo: "https://github.com/jendrikseipp/vulture"
|
||||
|
||||
75
CHANGELOG.md
75
CHANGELOG.md
@@ -1,3 +1,78 @@
|
||||
## v0.10.0 (2023-03-21)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `--export-template` cli option
|
||||
|
||||
### Fix
|
||||
|
||||
- `--export-template` correctly exports all notes
|
||||
- `--export-csv` exports csv not json
|
||||
- **csv-import**: fail if `type` does not validate
|
||||
|
||||
### Refactor
|
||||
|
||||
- pave the way for non-regex key/value deletions
|
||||
- remove unused code
|
||||
- cleanup rename and delete from dict functions
|
||||
|
||||
## v0.9.0 (2023-03-20)
|
||||
|
||||
### Feat
|
||||
|
||||
- bulk update metadata from a CSV file
|
||||
|
||||
### Fix
|
||||
|
||||
- find more instances of inline metadata
|
||||
- ensure frontmatter values are unique within a key
|
||||
- improve validation of bulk imports
|
||||
- improve logging to screen
|
||||
|
||||
## v0.8.0 (2023-03-12)
|
||||
|
||||
### Feat
|
||||
|
||||
- move inline metadata to specific location in note (#27)
|
||||
|
||||
### Fix
|
||||
|
||||
- add `back` option to transpose menus
|
||||
|
||||
## v0.7.0 (2023-03-11)
|
||||
|
||||
### Feat
|
||||
|
||||
- transpose metadata between frontmatter and inline
|
||||
- select insert location for new inline metadata
|
||||
|
||||
### Fix
|
||||
|
||||
- exit after committing changes
|
||||
- fix typo and sort order of options
|
||||
|
||||
## v0.6.1 (2023-03-03)
|
||||
|
||||
### Fix
|
||||
|
||||
- improve error handling when frontmatter malformed
|
||||
|
||||
### Refactor
|
||||
|
||||
- use single console instance
|
||||
|
||||
## v0.6.0 (2023-02-06)
|
||||
|
||||
### Feat
|
||||
|
||||
- transpose metadata (#18)
|
||||
|
||||
### Fix
|
||||
|
||||
- **ui**: add seperator to top of select lists
|
||||
- allow adding inline tags with same key different values (#17)
|
||||
- remove unnecessary question when viewing diffs
|
||||
|
||||
## v0.5.0 (2023-02-04)
|
||||
|
||||
### Feat
|
||||
|
||||
65
README.md
65
README.md
@@ -5,7 +5,7 @@
|
||||
A script to make batch updates to metadata in an Obsidian vault. No changes are
|
||||
made to the Vault until they are explicitly committed.
|
||||
|
||||
[](https://asciinema.org/a/555789)
|
||||
[](https://asciinema.org/a/DQk0ufza1azwU3QFkE6XV33nm)
|
||||
|
||||
## Important Disclaimer
|
||||
|
||||
@@ -27,6 +27,7 @@ pip install obsidian-metadata
|
||||
- `--dry-run`: Make no destructive changes
|
||||
- `--export-csv`: Specify a path and create a CSV export of all metadata
|
||||
- `--export-json`: Specify a path and create a JSON export of all metadata
|
||||
- `--export-template`: Specify a path and export all notes with their associated metadata to a CSV file for use as a bulk import template
|
||||
- `--help`: Shows interactive help and exits
|
||||
- `--log-file`: Specify a log file location
|
||||
- `--log-to-file`: Will log to a file
|
||||
@@ -43,13 +44,18 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- Backup: Create a backup of the vault.
|
||||
- Delete Backup: Delete a backup of the vault.
|
||||
|
||||
**Export Metadata**
|
||||
|
||||
- Export all metadata to a CSV organized by metadata type
|
||||
- Export all metadata to a CSV organized by note path
|
||||
- Export all metadata to a JSON file organized by metadata type
|
||||
|
||||
**Inspect Metadata**
|
||||
|
||||
- **View all metadata in the vault**
|
||||
- View all **frontmatter**
|
||||
- View all **inline metadata**
|
||||
- View all **inline tags**
|
||||
- **Export all metadata to CSV or JSON file**
|
||||
|
||||
**Filter Notes in Scope**: Limit the scope of notes to be processed with one or more filters.
|
||||
|
||||
@@ -59,8 +65,12 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- **List and clear filters**: List all current filters and clear one or all
|
||||
- **List notes in scope**: List notes that will be processed.
|
||||
|
||||
**Bulk Edit Metadata** from a CSV file (See the _[Make Bulk Updates](https://github.com/natelandau/obsidian-metadata#make-bulk-updates)_ section below)
|
||||
|
||||
**Add Metadata**: Add new metadata to your vault.
|
||||
|
||||
When adding a new key to inline metadata, the `insert location` value in the config file will specify where in the note it will be inserted.
|
||||
|
||||
- **Add new metadata to the frontmatter**
|
||||
- **Add new inline metadata** - Set `insert_location` in the config to control where the new metadata is inserted. (Default: Bottom)
|
||||
- **Add new inline tag** - Set `insert_location` in the config to control where the new tag is inserted. (Default: Bottom)
|
||||
@@ -77,6 +87,20 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- **Delete a value from a key**
|
||||
- **Delete an inline tag**
|
||||
|
||||
**Move Inline Metadata**: Move inline metadata to a specified location with a note
|
||||
|
||||
- **Move to Top**: Move all inline metadata beneath the frontmatter
|
||||
- **Move to After Title**: Move all inline metadata beneath the first markdown header
|
||||
- **Move to Bottom**: Move all inline metadata to the bottom of the note
|
||||
|
||||
**Transpose Metadata**: Move metadata from inline to frontmatter or the reverse.
|
||||
|
||||
When transposing to inline metadata, the `insert location` value in the config file will specify where in the note it will be inserted.
|
||||
|
||||
- **Transpose all metadata** - Moves all frontmatter to inline metadata, or the reverse
|
||||
- **Transpose key** - Transposes a specific key and all it's values
|
||||
- **Transpose value**- Transpose a specific key:value pair
|
||||
|
||||
**Review Changes**: Prior to committing changes, review all changes that will be made.
|
||||
|
||||
- **View a diff of the changes** that will be made
|
||||
@@ -104,7 +128,7 @@ Below is an example with two vaults.
|
||||
|
||||
# Location to add metadata. One of:
|
||||
# TOP: Directly after frontmatter.
|
||||
# AFTER_TITLE: After a header following frontmatter.
|
||||
# AFTER_TITLE: After the first header following frontmatter.
|
||||
# BOTTOM: The bottom of the note
|
||||
insert_location = "BOTTOM"
|
||||
|
||||
@@ -116,13 +140,46 @@ Below is an example with two vaults.
|
||||
|
||||
To bypass the configuration file and specify a vault to use at runtime use the `--vault-path` option.
|
||||
|
||||
### Make Bulk Updates
|
||||
|
||||
Bulk edits are supported by importing a CSV file containing the following columns. Column headers must be lowercase.
|
||||
|
||||
1. `path` - Path to note relative to the vault root folder
|
||||
2. `type` - Type of metadata. One of `frontmatter`, `inline_metadata`, or `tag`
|
||||
3. `key` - The key to add (leave blank for a tag)
|
||||
4. `value` - the value to add to the key
|
||||
|
||||
An example valid CSV file is
|
||||
|
||||
```csv
|
||||
path,type,key,value
|
||||
folder 1/note1.md,frontmatter,fruits,apple
|
||||
folder 1/note1.md,frontmatter,fruits,banana
|
||||
folder 1/note1.md,inline_metadata,cars,toyota
|
||||
folder 1/note1.md,inline_metadata,cars,honda
|
||||
folder 1/note1.md,tag,,tag1
|
||||
folder 1/note1.md,tag,,tag2
|
||||
```
|
||||
|
||||
How bulk imports work:
|
||||
|
||||
- **Only notes which match the path in the CSV file are updated**
|
||||
- **Effected notes will have ALL of their metadata changed** to reflect the values in the CSV file
|
||||
- **Existing metadata in a matching note will be rewritten**. This may result in it's location and/or formatting within the note being changed
|
||||
- Inline tags ignore any value added to the `key` column
|
||||
|
||||
Create a CSV template for making bulk updates containing all your notes and their associated metadata by
|
||||
|
||||
1. Using the `--export-template` cli command; or
|
||||
2. Selecting the `Metadata by note` option within the `Export Metadata` section of the app
|
||||
|
||||
# Contributing
|
||||
|
||||
## Setup: Once per project
|
||||
|
||||
There are two ways to contribute to this project.
|
||||
|
||||
### 1. Containerized development (Recommended)
|
||||
### 1. Containerized development
|
||||
|
||||
1. Clone this repository. `git clone https://github.com/natelandau/obsidian-metadata`
|
||||
2. Open the repository in Visual Studio Code
|
||||
|
||||
@@ -7,8 +7,8 @@ coverage:
|
||||
threshold: 5% # the leniency in hitting the target
|
||||
patch:
|
||||
default:
|
||||
target: 50%
|
||||
threshold: 5%
|
||||
target: 50%
|
||||
threshold: 5%
|
||||
ignore:
|
||||
- tests/
|
||||
|
||||
|
||||
478
poetry.lock
generated
478
poetry.lock
generated
@@ -1,31 +1,20 @@
|
||||
# This file is automatically @generated by Poetry and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "absolufy-imports"
|
||||
version = "0.3.1"
|
||||
description = "A tool to automatically replace relative imports with absolute ones."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.1"
|
||||
files = [
|
||||
{file = "absolufy_imports-0.3.1-py2.py3-none-any.whl", hash = "sha256:49bf7c753a9282006d553ba99217f48f947e3eef09e18a700f8a82f75dc7fc5c"},
|
||||
{file = "absolufy_imports-0.3.1.tar.gz", hash = "sha256:c90638a6c0b66826d1fb4880ddc20ef7701af34192c94faf40b95d32b59f9793"},
|
||||
]
|
||||
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "argcomplete"
|
||||
version = "2.0.0"
|
||||
version = "2.0.6"
|
||||
description = "Bash tab completion for argparse"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "argcomplete-2.0.0-py2.py3-none-any.whl", hash = "sha256:cffa11ea77999bb0dd27bb25ff6dc142a6796142f68d45b1a26b11f58724561e"},
|
||||
{file = "argcomplete-2.0.0.tar.gz", hash = "sha256:6372ad78c89d662035101418ae253668445b391755cfe94ea52f1b9d22425b20"},
|
||||
{file = "argcomplete-2.0.6-py3-none-any.whl", hash = "sha256:6c2170b3e0ab54683cb28d319b65261bde1f11388be688b68118b7d281e34c94"},
|
||||
{file = "argcomplete-2.0.6.tar.gz", hash = "sha256:dc33528d96727882b576b24bc89ed038f3c6abbb6855ff9bb6be23384afff9d6"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
test = ["coverage", "flake8", "pexpect", "wheel"]
|
||||
lint = ["flake8", "mypy"]
|
||||
test = ["coverage", "flake8", "mypy", "pexpect", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "attrs"
|
||||
@@ -151,14 +140,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "commitizen"
|
||||
version = "2.40.0"
|
||||
version = "2.42.1"
|
||||
description = "Python commitizen client tool"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2,<4.0.0"
|
||||
files = [
|
||||
{file = "commitizen-2.40.0-py3-none-any.whl", hash = "sha256:44b589869529c297d4ef594bb7560388d3367b3ae8af36b0664d2f51a28e8f87"},
|
||||
{file = "commitizen-2.40.0.tar.gz", hash = "sha256:8f1a09589ffb87bb17df17261423e88299bd63432dbfc4e6fc6657fea23dddc0"},
|
||||
{file = "commitizen-2.42.1-py3-none-any.whl", hash = "sha256:fad7d37cfae361a859b713d4ac591859d5ca03137dd52de4e1bd208f7f45d5dc"},
|
||||
{file = "commitizen-2.42.1.tar.gz", hash = "sha256:eac18c7c65587061aac6829534907aeb208405b8230bfd35ec08503c228a7f17"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -176,63 +165,63 @@ typing-extensions = ">=4.0.1,<5.0.0"
|
||||
|
||||
[[package]]
|
||||
name = "coverage"
|
||||
version = "7.1.0"
|
||||
version = "7.2.2"
|
||||
description = "Code coverage measurement for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "coverage-7.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3b946bbcd5a8231383450b195cfb58cb01cbe7f8949f5758566b881df4b33baf"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ec8e767f13be637d056f7e07e61d089e555f719b387a7070154ad80a0ff31801"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4a5a5879a939cb84959d86869132b00176197ca561c664fc21478c1eee60d75"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b643cb30821e7570c0aaf54feaf0bfb630b79059f85741843e9dc23f33aaca2c"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32df215215f3af2c1617a55dbdfb403b772d463d54d219985ac7cd3bf124cada"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:33d1ae9d4079e05ac4cc1ef9e20c648f5afabf1a92adfaf2ccf509c50b85717f"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:29571503c37f2ef2138a306d23e7270687c0efb9cab4bd8038d609b5c2393a3a"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:63ffd21aa133ff48c4dff7adcc46b7ec8b565491bfc371212122dd999812ea1c"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-win32.whl", hash = "sha256:4b14d5e09c656de5038a3f9bfe5228f53439282abcab87317c9f7f1acb280352"},
|
||||
{file = "coverage-7.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8361be1c2c073919500b6601220a6f2f98ea0b6d2fec5014c1d9cfa23dd07038"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da9b41d4539eefd408c46725fb76ecba3a50a3367cafb7dea5f250d0653c1040"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c5b15ed7644ae4bee0ecf74fee95808dcc34ba6ace87e8dfbf5cb0dc20eab45a"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d12d076582507ea460ea2a89a8c85cb558f83406c8a41dd641d7be9a32e1274f"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2617759031dae1bf183c16cef8fcfb3de7617f394c813fa5e8e46e9b82d4222"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4e4881fa9e9667afcc742f0c244d9364d197490fbc91d12ac3b5de0bf2df146"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9d58885215094ab4a86a6aef044e42994a2bd76a446dc59b352622655ba6621b"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ffeeb38ee4a80a30a6877c5c4c359e5498eec095878f1581453202bfacc8fbc2"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3baf5f126f30781b5e93dbefcc8271cb2491647f8283f20ac54d12161dff080e"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-win32.whl", hash = "sha256:ded59300d6330be27bc6cf0b74b89ada58069ced87c48eaf9344e5e84b0072f7"},
|
||||
{file = "coverage-7.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:6a43c7823cd7427b4ed763aa7fb63901ca8288591323b58c9cd6ec31ad910f3c"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7a726d742816cb3a8973c8c9a97539c734b3a309345236cd533c4883dda05b8d"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc7c85a150501286f8b56bd8ed3aa4093f4b88fb68c0843d21ff9656f0009d6a"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5b4198d85a3755d27e64c52f8c95d6333119e49fd001ae5798dac872c95e0f8"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddb726cb861c3117a553f940372a495fe1078249ff5f8a5478c0576c7be12050"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:51b236e764840a6df0661b67e50697aaa0e7d4124ca95e5058fa3d7cbc240b7c"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:7ee5c9bb51695f80878faaa5598040dd6c9e172ddcf490382e8aedb8ec3fec8d"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c31b75ae466c053a98bf26843563b3b3517b8f37da4d47b1c582fdc703112bc3"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-win32.whl", hash = "sha256:3b155caf3760408d1cb903b21e6a97ad4e2bdad43cbc265e3ce0afb8e0057e73"},
|
||||
{file = "coverage-7.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:2a60d6513781e87047c3e630b33b4d1e89f39836dac6e069ffee28c4786715f5"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f2cba5c6db29ce991029b5e4ac51eb36774458f0a3b8d3137241b32d1bb91f06"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beeb129cacea34490ffd4d6153af70509aa3cda20fdda2ea1a2be870dfec8d52"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c45948f613d5d18c9ec5eaa203ce06a653334cf1bd47c783a12d0dd4fd9c851"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef382417db92ba23dfb5864a3fc9be27ea4894e86620d342a116b243ade5d35d"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c7c0d0827e853315c9bbd43c1162c006dd808dbbe297db7ae66cd17b07830f0"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e5cdbb5cafcedea04924568d990e20ce7f1945a1dd54b560f879ee2d57226912"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9817733f0d3ea91bea80de0f79ef971ae94f81ca52f9b66500c6a2fea8e4b4f8"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:218fe982371ac7387304153ecd51205f14e9d731b34fb0568181abaf7b443ba0"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-win32.whl", hash = "sha256:04481245ef966fbd24ae9b9e537ce899ae584d521dfbe78f89cad003c38ca2ab"},
|
||||
{file = "coverage-7.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8ae125d1134bf236acba8b83e74c603d1b30e207266121e76484562bc816344c"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2bf1d5f2084c3932b56b962a683074a3692bce7cabd3aa023c987a2a8e7612f6"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:98b85dd86514d889a2e3dd22ab3c18c9d0019e696478391d86708b805f4ea0fa"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38da2db80cc505a611938d8624801158e409928b136c8916cd2e203970dde4dc"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3164d31078fa9efe406e198aecd2a02d32a62fecbdef74f76dad6a46c7e48311"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db61a79c07331e88b9a9974815c075fbd812bc9dbc4dc44b366b5368a2936063"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9ccb092c9ede70b2517a57382a601619d20981f56f440eae7e4d7eaafd1d1d09"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:33ff26d0f6cc3ca8de13d14fde1ff8efe1456b53e3f0273e63cc8b3c84a063d8"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d47dd659a4ee952e90dc56c97d78132573dc5c7b09d61b416a9deef4ebe01a0c"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-win32.whl", hash = "sha256:d248cd4a92065a4d4543b8331660121b31c4148dd00a691bfb7a5cdc7483cfa4"},
|
||||
{file = "coverage-7.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7ed681b0f8e8bcbbffa58ba26fcf5dbc8f79e7997595bf071ed5430d8c08d6f3"},
|
||||
{file = "coverage-7.1.0-pp37.pp38.pp39-none-any.whl", hash = "sha256:755e89e32376c850f826c425ece2c35a4fc266c081490eb0a841e7c1cb0d3bda"},
|
||||
{file = "coverage-7.1.0.tar.gz", hash = "sha256:10188fe543560ec4874f974b5305cd1a8bdcfa885ee00ea3a03733464c4ca265"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c90e73bdecb7b0d1cea65a08cb41e9d672ac6d7995603d6465ed4914b98b9ad7"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e2926b8abedf750c2ecf5035c07515770944acf02e1c46ab08f6348d24c5f94d"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57b77b9099f172804e695a40ebaa374f79e4fb8b92f3e167f66facbf92e8e7f5"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:efe1c0adad110bf0ad7fb59f833880e489a61e39d699d37249bdf42f80590169"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2199988e0bc8325d941b209f4fd1c6fa007024b1442c5576f1a32ca2e48941e6"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:81f63e0fb74effd5be736cfe07d710307cc0a3ccb8f4741f7f053c057615a137"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:186e0fc9cf497365036d51d4d2ab76113fb74f729bd25da0975daab2e107fd90"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:420f94a35e3e00a2b43ad5740f935358e24478354ce41c99407cddd283be00d2"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-win32.whl", hash = "sha256:38004671848b5745bb05d4d621526fca30cee164db42a1f185615f39dc997292"},
|
||||
{file = "coverage-7.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:0ce383d5f56d0729d2dd40e53fe3afeb8f2237244b0975e1427bfb2cf0d32bab"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3eb55b7b26389dd4f8ae911ba9bc8c027411163839dea4c8b8be54c4ee9ae10b"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d2b96123a453a2d7f3995ddb9f28d01fd112319a7a4d5ca99796a7ff43f02af5"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:299bc75cb2a41e6741b5e470b8c9fb78d931edbd0cd009c58e5c84de57c06731"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e1df45c23d4230e3d56d04414f9057eba501f78db60d4eeecfcb940501b08fd"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:006ed5582e9cbc8115d2e22d6d2144a0725db542f654d9d4fda86793832f873d"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d683d230b5774816e7d784d7ed8444f2a40e7a450e5720d58af593cb0b94a212"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8efb48fa743d1c1a65ee8787b5b552681610f06c40a40b7ef94a5b517d885c54"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c752d5264053a7cf2fe81c9e14f8a4fb261370a7bb344c2a011836a96fb3f57"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-win32.whl", hash = "sha256:55272f33da9a5d7cccd3774aeca7a01e500a614eaea2a77091e9be000ecd401d"},
|
||||
{file = "coverage-7.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:92ebc1619650409da324d001b3a36f14f63644c7f0a588e331f3b0f67491f512"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5afdad4cc4cc199fdf3e18088812edcf8f4c5a3c8e6cb69127513ad4cb7471a9"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0484d9dd1e6f481b24070c87561c8d7151bdd8b044c93ac99faafd01f695c78e"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d530191aa9c66ab4f190be8ac8cc7cfd8f4f3217da379606f3dd4e3d83feba69"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ac0f522c3b6109c4b764ffec71bf04ebc0523e926ca7cbe6c5ac88f84faced0"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:ba279aae162b20444881fc3ed4e4f934c1cf8620f3dab3b531480cf602c76b7f"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:53d0fd4c17175aded9c633e319360d41a1f3c6e352ba94edcb0fa5167e2bad67"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c99cb7c26a3039a8a4ee3ca1efdde471e61b4837108847fb7d5be7789ed8fd9"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-win32.whl", hash = "sha256:5cc0783844c84af2522e3a99b9b761a979a3ef10fb87fc4048d1ee174e18a7d8"},
|
||||
{file = "coverage-7.2.2-cp37-cp37m-win_amd64.whl", hash = "sha256:817295f06eacdc8623dc4df7d8b49cea65925030d4e1e2a7c7218380c0072c25"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6146910231ece63facfc5984234ad1b06a36cecc9fd0c028e59ac7c9b18c38c6"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:387fb46cb8e53ba7304d80aadca5dca84a2fbf6fe3faf6951d8cf2d46485d1e5"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:046936ab032a2810dcaafd39cc4ef6dd295df1a7cbead08fe996d4765fca9fe4"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e627dee428a176ffb13697a2c4318d3f60b2ccdde3acdc9b3f304206ec130ccd"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fa54fb483decc45f94011898727802309a109d89446a3c76387d016057d2c84"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3668291b50b69a0c1ef9f462c7df2c235da3c4073f49543b01e7eb1dee7dd540"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7c20b731211261dc9739bbe080c579a1835b0c2d9b274e5fcd903c3a7821cf88"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5764e1f7471cb8f64b8cda0554f3d4c4085ae4b417bfeab236799863703e5de2"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-win32.whl", hash = "sha256:4f01911c010122f49a3e9bdc730eccc66f9b72bd410a3a9d3cb8448bb50d65d3"},
|
||||
{file = "coverage-7.2.2-cp38-cp38-win_amd64.whl", hash = "sha256:c448b5c9e3df5448a362208b8d4b9ed85305528313fca1b479f14f9fe0d873b8"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bfe7085783cda55e53510482fa7b5efc761fad1abe4d653b32710eb548ebdd2d"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9d22e94e6dc86de981b1b684b342bec5e331401599ce652900ec59db52940005"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507e4720791977934bba016101579b8c500fb21c5fa3cd4cf256477331ddd988"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc4803779f0e4b06a2361f666e76f5c2e3715e8e379889d02251ec911befd149"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db8c2c5ace167fd25ab5dd732714c51d4633f58bac21fb0ff63b0349f62755a8"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4f68ee32d7c4164f1e2c8797535a6d0a3733355f5861e0f667e37df2d4b07140"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d52f0a114b6a58305b11a5cdecd42b2e7f1ec77eb20e2b33969d702feafdd016"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:797aad79e7b6182cb49c08cc5d2f7aa7b2128133b0926060d0a8889ac43843be"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-win32.whl", hash = "sha256:db45eec1dfccdadb179b0f9ca616872c6f700d23945ecc8f21bb105d74b1c5fc"},
|
||||
{file = "coverage-7.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:8dbe2647bf58d2c5a6c5bcc685f23b5f371909a5624e9f5cd51436d6a9f6c6ef"},
|
||||
{file = "coverage-7.2.2-pp37.pp38.pp39-none-any.whl", hash = "sha256:872d6ce1f5be73f05bea4df498c140b9e7ee5418bfa2cc8204e7f9b817caa968"},
|
||||
{file = "coverage-7.2.2.tar.gz", hash = "sha256:36dd42da34fe94ed98c39887b86db9d06777b1c8f860520e21126a75507024f2"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -264,14 +253,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.1.0"
|
||||
version = "1.1.1"
|
||||
description = "Backport of PEP 654 (exception groups)"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"},
|
||||
{file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"},
|
||||
{file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"},
|
||||
{file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -294,47 +283,30 @@ testing = ["pre-commit"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.9.0"
|
||||
version = "3.10.0"
|
||||
description = "A platform independent file lock."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"},
|
||||
{file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"},
|
||||
{file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"},
|
||||
{file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
|
||||
testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "flake8"
|
||||
version = "6.0.0"
|
||||
description = "the modular source code checker: pep8 pyflakes and co"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1"
|
||||
files = [
|
||||
{file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"},
|
||||
{file = "flake8-6.0.0.tar.gz", hash = "sha256:c61007e76655af75e6785a931f452915b371dc48f56efd765247c8fe68f2b181"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mccabe = ">=0.7.0,<0.8.0"
|
||||
pycodestyle = ">=2.10.0,<2.11.0"
|
||||
pyflakes = ">=3.0.0,<3.1.0"
|
||||
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.17"
|
||||
version = "2.5.21"
|
||||
description = "File identification library for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "identify-2.5.17-py2.py3-none-any.whl", hash = "sha256:7d526dd1283555aafcc91539acc061d8f6f59adb0a7bba462735b0a318bff7ed"},
|
||||
{file = "identify-2.5.17.tar.gz", hash = "sha256:93cc61a861052de9d4c541a7acb7e3dcc9c11b398a2144f6e52ae5285f5f4f06"},
|
||||
{file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"},
|
||||
{file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -417,24 +389,24 @@ dev = ["Sphinx (>=4.1.1)", "black (>=19.10b0)", "colorama (>=0.3.4)", "docutils
|
||||
|
||||
[[package]]
|
||||
name = "markdown-it-py"
|
||||
version = "2.1.0"
|
||||
version = "2.2.0"
|
||||
description = "Python port of markdown-it. Markdown parsing, done right!"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "markdown-it-py-2.1.0.tar.gz", hash = "sha256:cf7e59fed14b5ae17c0006eff14a2d9a00ed5f3a846148153899a0224e2c07da"},
|
||||
{file = "markdown_it_py-2.1.0-py3-none-any.whl", hash = "sha256:93de681e5c021a432c63147656fe21790bc01231e0cd2da73626f1aa3ac0fe27"},
|
||||
{file = "markdown-it-py-2.2.0.tar.gz", hash = "sha256:7c9a5e412688bc771c67432cbfebcdd686c93ce6484913dccf06cb5a0bea35a1"},
|
||||
{file = "markdown_it_py-2.2.0-py3-none-any.whl", hash = "sha256:5a35f8d1870171d9acc47b99612dc146129b631baf04970128b568f190d0cc30"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mdurl = ">=0.1,<1.0"
|
||||
|
||||
[package.extras]
|
||||
benchmarking = ["psutil", "pytest", "pytest-benchmark (>=3.2,<4.0)"]
|
||||
code-style = ["pre-commit (==2.6)"]
|
||||
compare = ["commonmark (>=0.9.1,<0.10.0)", "markdown (>=3.3.6,<3.4.0)", "mistletoe (>=0.8.1,<0.9.0)", "mistune (>=2.0.2,<2.1.0)", "panflute (>=2.1.3,<2.2.0)"]
|
||||
linkify = ["linkify-it-py (>=1.0,<2.0)"]
|
||||
benchmarking = ["psutil", "pytest", "pytest-benchmark"]
|
||||
code-style = ["pre-commit (>=3.0,<4.0)"]
|
||||
compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
|
||||
linkify = ["linkify-it-py (>=1,<3)"]
|
||||
plugins = ["mdit-py-plugins"]
|
||||
profiling = ["gprof2dot"]
|
||||
rtd = ["attrs", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
|
||||
@@ -500,18 +472,6 @@ files = [
|
||||
{file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mccabe"
|
||||
version = "0.7.0"
|
||||
description = "McCabe checker, plugin for flake8"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"},
|
||||
{file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mdurl"
|
||||
version = "0.1.2"
|
||||
@@ -526,46 +486,42 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "mypy"
|
||||
version = "0.991"
|
||||
version = "1.1.1"
|
||||
description = "Optional static typing for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7d17e0a9707d0772f4a7b878f04b4fd11f6f5bcb9b3813975a9b13c9332153ab"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0714258640194d75677e86c786e80ccf294972cc76885d3ebbb560f11db0003d"},
|
||||
{file = "mypy-0.991-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0c8f3be99e8a8bd403caa8c03be619544bc2c77a7093685dcf308c6b109426c6"},
|
||||
{file = "mypy-0.991-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc9ec663ed6c8f15f4ae9d3c04c989b744436c16d26580eaa760ae9dd5d662eb"},
|
||||
{file = "mypy-0.991-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4307270436fd7694b41f913eb09210faff27ea4979ecbcd849e57d2da2f65305"},
|
||||
{file = "mypy-0.991-cp310-cp310-win_amd64.whl", hash = "sha256:901c2c269c616e6cb0998b33d4adbb4a6af0ac4ce5cd078afd7bc95830e62c1c"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:d13674f3fb73805ba0c45eb6c0c3053d218aa1f7abead6e446d474529aafc372"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1c8cd4fb70e8584ca1ed5805cbc7c017a3d1a29fb450621089ffed3e99d1857f"},
|
||||
{file = "mypy-0.991-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:209ee89fbb0deed518605edddd234af80506aec932ad28d73c08f1400ef80a33"},
|
||||
{file = "mypy-0.991-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:37bd02ebf9d10e05b00d71302d2c2e6ca333e6c2a8584a98c00e038db8121f05"},
|
||||
{file = "mypy-0.991-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:26efb2fcc6b67e4d5a55561f39176821d2adf88f2745ddc72751b7890f3194ad"},
|
||||
{file = "mypy-0.991-cp311-cp311-win_amd64.whl", hash = "sha256:3a700330b567114b673cf8ee7388e949f843b356a73b5ab22dd7cff4742a5297"},
|
||||
{file = "mypy-0.991-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1f7d1a520373e2272b10796c3ff721ea1a0712288cafaa95931e66aa15798813"},
|
||||
{file = "mypy-0.991-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:641411733b127c3e0dab94c45af15fea99e4468f99ac88b39efb1ad677da5711"},
|
||||
{file = "mypy-0.991-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3d80e36b7d7a9259b740be6d8d906221789b0d836201af4234093cae89ced0cd"},
|
||||
{file = "mypy-0.991-cp37-cp37m-win_amd64.whl", hash = "sha256:e62ebaad93be3ad1a828a11e90f0e76f15449371ffeecca4a0a0b9adc99abcef"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b86ce2c1866a748c0f6faca5232059f881cda6dda2a893b9a8373353cfe3715a"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ac6e503823143464538efda0e8e356d871557ef60ccd38f8824a4257acc18d93"},
|
||||
{file = "mypy-0.991-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0cca5adf694af539aeaa6ac633a7afe9bbd760df9d31be55ab780b77ab5ae8bf"},
|
||||
{file = "mypy-0.991-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a12c56bf73cdab116df96e4ff39610b92a348cc99a1307e1da3c3768bbb5b135"},
|
||||
{file = "mypy-0.991-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:652b651d42f155033a1967739788c436491b577b6a44e4c39fb340d0ee7f0d70"},
|
||||
{file = "mypy-0.991-cp38-cp38-win_amd64.whl", hash = "sha256:4175593dc25d9da12f7de8de873a33f9b2b8bdb4e827a7cae952e5b1a342e243"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:98e781cd35c0acf33eb0295e8b9c55cdbef64fcb35f6d3aa2186f289bed6e80d"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6d7464bac72a85cb3491c7e92b5b62f3dcccb8af26826257760a552a5e244aa5"},
|
||||
{file = "mypy-0.991-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c9166b3f81a10cdf9b49f2d594b21b31adadb3d5e9db9b834866c3258b695be3"},
|
||||
{file = "mypy-0.991-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8472f736a5bfb159a5e36740847808f6f5b659960115ff29c7cecec1741c648"},
|
||||
{file = "mypy-0.991-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e80e758243b97b618cdf22004beb09e8a2de1af481382e4d84bc52152d1c476"},
|
||||
{file = "mypy-0.991-cp39-cp39-win_amd64.whl", hash = "sha256:74e259b5c19f70d35fcc1ad3d56499065c601dfe94ff67ae48b85596b9ec1461"},
|
||||
{file = "mypy-0.991-py3-none-any.whl", hash = "sha256:de32edc9b0a7e67c2775e574cb061a537660e51210fbf6006b0b36ea695ae9bb"},
|
||||
{file = "mypy-0.991.tar.gz", hash = "sha256:3c0165ba8f354a6d9881809ef29f1a9318a236a6d81c690094c5df32107bde06"},
|
||||
{file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"},
|
||||
{file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"},
|
||||
{file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"},
|
||||
{file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"},
|
||||
{file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"},
|
||||
{file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"},
|
||||
{file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"},
|
||||
{file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"},
|
||||
{file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"},
|
||||
{file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"},
|
||||
{file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"},
|
||||
{file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"},
|
||||
{file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"},
|
||||
{file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"},
|
||||
{file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"},
|
||||
{file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"},
|
||||
{file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"},
|
||||
{file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"},
|
||||
{file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"},
|
||||
{file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"},
|
||||
{file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"},
|
||||
{file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"},
|
||||
{file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"},
|
||||
{file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"},
|
||||
{file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"},
|
||||
{file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
mypy-extensions = ">=0.4.3"
|
||||
mypy-extensions = ">=1.0.0"
|
||||
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
|
||||
typing-extensions = ">=3.10"
|
||||
|
||||
@@ -628,26 +584,26 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pathspec"
|
||||
version = "0.11.0"
|
||||
version = "0.11.1"
|
||||
description = "Utility library for gitignore style pattern matching of file paths."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"},
|
||||
{file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"},
|
||||
{file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"},
|
||||
{file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pdoc"
|
||||
version = "12.3.1"
|
||||
version = "13.0.1"
|
||||
description = "API Documentation for Python Projects"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pdoc-12.3.1-py3-none-any.whl", hash = "sha256:c3f24f31286e634de9c76fa6e67bd5c0c5e74360b41dc91e6b82499831eb52d8"},
|
||||
{file = "pdoc-12.3.1.tar.gz", hash = "sha256:453236f225feddb8a9071428f1982a78d74b9b3da4bc4433aedb64dbd0cc87ab"},
|
||||
{file = "pdoc-13.0.1-py3-none-any.whl", hash = "sha256:16a24914280ed318896ad798674e2b0d11832297fdea95632fa472e3d171e247"},
|
||||
{file = "pdoc-13.0.1.tar.gz", hash = "sha256:4d84056847728203b8789ca8a8d0c8003f25002b3caef3365f6f21a1e4228a1b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -656,38 +612,23 @@ MarkupSafe = "*"
|
||||
pygments = ">=2.12.0"
|
||||
|
||||
[package.extras]
|
||||
dev = ["black", "hypothesis", "mypy", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"]
|
||||
|
||||
[[package]]
|
||||
name = "pep8-naming"
|
||||
version = "0.13.3"
|
||||
description = "Check PEP-8 naming conventions, plugin for flake8"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pep8-naming-0.13.3.tar.gz", hash = "sha256:1705f046dfcd851378aac3be1cd1551c7c1e5ff363bacad707d43007877fa971"},
|
||||
{file = "pep8_naming-0.13.3-py3-none-any.whl", hash = "sha256:1a86b8c71a03337c97181917e2b472f0f5e4ccb06844a0d6f0a33522549e7a80"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
flake8 = ">=5.0.0"
|
||||
dev = ["black", "hypothesis", "mypy", "pygments (>=2.14.0)", "pytest", "pytest-cov", "pytest-timeout", "ruff", "tox", "types-pygments"]
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "2.6.2"
|
||||
version = "3.1.1"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"},
|
||||
{file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"},
|
||||
{file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
|
||||
{file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
@@ -738,14 +679,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "3.0.4"
|
||||
version = "3.2.0"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pre_commit-3.0.4-py2.py3-none-any.whl", hash = "sha256:9e3255edb0c9e7fe9b4f328cb3dc86069f8fdc38026f1bf521018a05eaf4d67b"},
|
||||
{file = "pre_commit-3.0.4.tar.gz", hash = "sha256:bc4687478d55578c4ac37272fe96df66f73d9b5cf81be6f28627d4e712e752d5"},
|
||||
{file = "pre_commit-3.2.0-py2.py3-none-any.whl", hash = "sha256:f712d3688102e13c8e66b7d7dbd8934a6dda157e58635d89f7d6fecdca39ce8a"},
|
||||
{file = "pre_commit-3.2.0.tar.gz", hash = "sha256:818f0d998059934d0f81bb3667e3ccdc32da6ed7ccaac33e43dc231561ddaaa9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -757,14 +698,14 @@ virtualenv = ">=20.10.0"
|
||||
|
||||
[[package]]
|
||||
name = "prompt-toolkit"
|
||||
version = "3.0.36"
|
||||
version = "3.0.38"
|
||||
description = "Library for building powerful interactive command lines in Python"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2"
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"},
|
||||
{file = "prompt_toolkit-3.0.36.tar.gz", hash = "sha256:3e163f254bef5a03b146397d7c1963bd3e2812f0964bb9a24e6ec761fd28db63"},
|
||||
{file = "prompt_toolkit-3.0.38-py3-none-any.whl", hash = "sha256:45ea77a2f7c60418850331366c81cf6b5b9cf4c7fd34616f733c5427e6abbb1f"},
|
||||
{file = "prompt_toolkit-3.0.38.tar.gz", hash = "sha256:23ac5d50538a9a38c8bde05fecb47d0b403ecd0662857a86f886f798563d5b9b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -782,30 +723,6 @@ files = [
|
||||
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pycodestyle"
|
||||
version = "2.10.0"
|
||||
description = "Python style guide checker"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pycodestyle-2.10.0-py2.py3-none-any.whl", hash = "sha256:8a4eaf0d0495c7395bdab3589ac2db602797d76207242c17d470186815706610"},
|
||||
{file = "pycodestyle-2.10.0.tar.gz", hash = "sha256:347187bdb476329d98f695c213d7295a846d1152ff4fe9bacb8a9590b8ee7053"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pyflakes"
|
||||
version = "3.0.1"
|
||||
description = "passive checker of Python programs"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
{file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"},
|
||||
{file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.14.0"
|
||||
@@ -821,31 +738,16 @@ files = [
|
||||
[package.extras]
|
||||
plugins = ["importlib-metadata"]
|
||||
|
||||
[[package]]
|
||||
name = "pysnooper"
|
||||
version = "1.1.1"
|
||||
description = "A poor man's debugger for Python."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "PySnooper-1.1.1-py2.py3-none-any.whl", hash = "sha256:378f13d731a3e04d3d0350e5f295bdd0f1b49fc8a8b8bf2067fe1e5290bd20be"},
|
||||
{file = "PySnooper-1.1.1.tar.gz", hash = "sha256:d17dc91cca1593c10230dce45e46b1d3ff0f8910f0c38e941edf6ba1260b3820"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
tests = ["pytest"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "7.2.1"
|
||||
version = "7.2.2"
|
||||
description = "pytest: simple powerful testing with Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-7.2.1-py3-none-any.whl", hash = "sha256:c7c6ca206e93355074ae32f7403e8ea12163b1163c976fee7d4d84027c162be5"},
|
||||
{file = "pytest-7.2.1.tar.gz", hash = "sha256:d45e0952f3727241918b8fd0f376f5ff6b301cc0777c6f9a556935c92d8a7d42"},
|
||||
{file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"},
|
||||
{file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -914,14 +816,14 @@ test = ["pytest-adaptavist (>=5.1.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "pytest-xdist"
|
||||
version = "3.1.0"
|
||||
version = "3.2.1"
|
||||
description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pytest-xdist-3.1.0.tar.gz", hash = "sha256:40fdb8f3544921c5dfcd486ac080ce22870e71d82ced6d2e78fa97c2addd480c"},
|
||||
{file = "pytest_xdist-3.1.0-py3-none-any.whl", hash = "sha256:70a76f191d8a1d2d6be69fc440cdf85f3e4c03c08b520fd5dc5d338d6cf07d89"},
|
||||
{file = "pytest-xdist-3.2.1.tar.gz", hash = "sha256:1849bd98d8b242b948e472db7478e090bf3361912a8fed87992ed94085f54727"},
|
||||
{file = "pytest_xdist-3.2.1-py3-none-any.whl", hash = "sha256:37290d161638a20b672401deef1cba812d110ac27e35d213f091d15b8beb40c9"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -948,6 +850,13 @@ files = [
|
||||
{file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"},
|
||||
{file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"},
|
||||
{file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"},
|
||||
{file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"},
|
||||
@@ -1094,19 +1003,19 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.3.1"
|
||||
version = "13.3.2"
|
||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "rich-13.3.1-py3-none-any.whl", hash = "sha256:8aa57747f3fc3e977684f0176a88e789be314a99f99b43b75d1e9cb5dc6db9e9"},
|
||||
{file = "rich-13.3.1.tar.gz", hash = "sha256:125d96d20c92b946b983d0d392b84ff945461e5a06d3867e9f9e575f8697b67f"},
|
||||
{file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
|
||||
{file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
markdown-it-py = ">=2.1.0,<3.0.0"
|
||||
pygments = ">=2.14.0,<3.0.0"
|
||||
markdown-it-py = ">=2.2.0,<3.0.0"
|
||||
pygments = ">=2.13.0,<3.0.0"
|
||||
|
||||
[package.extras]
|
||||
jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
@@ -1146,6 +1055,9 @@ files = [
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"},
|
||||
{file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"},
|
||||
@@ -1175,40 +1087,41 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.240"
|
||||
version = "0.0.257"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.240-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:222dd5a5f7cf2f155d7bb77ac484b9afd6f8aaecd963a91c8dbb93355ef42fd2"},
|
||||
{file = "ruff-0.0.240-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:2c956a037671b5ab81546346f3e7f0b3f0e13d0b2e5a3e88c1b2227a1e9aae82"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b43c73fc165f8c7de7c095208d05653744aee6fb0a71680449c2ff1cf59183ea"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f58f1122001150d70909885ccf43d869237be814d4cfc74bb60b3883635e440a"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b427050336b8967755e305f506e84e550591fa47766b5b0cb0c8bcb5c8ca9e7"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:0fe8cc47c4c3423548a074e163388f943a14b1e349be88e5dc4cd43df81b6344"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2f40f07d030e7a8cbe365a62fe8543e146b9bcd2a31f5625c2beaccad0d1b8c1"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c222ad12e4bf795e3cec64d56178af1bfbc5d97929a0abf685564937e52c9862"},
|
||||
{file = "ruff-0.0.240-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a26eb3cd68527bcae2543027a0a674d37d03f239f6f025049149115c9775438d"},
|
||||
{file = "ruff-0.0.240-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4591c9104b6898cbd0df57f6b6f8e2907b08fa85ff5196750f0a7b370ae9f78e"},
|
||||
{file = "ruff-0.0.240-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7fed973319ca0a8c2e5c80732217b9b1ec069305839f480907469791e596b150"},
|
||||
{file = "ruff-0.0.240-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4ce049d1fedb1b785fef29403d26e6109b77287b51afd10b74edc986f609c4af"},
|
||||
{file = "ruff-0.0.240-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5127cfaec1f78bd7104174eeacee85dea64796905812b448efd60f504cfa5eec"},
|
||||
{file = "ruff-0.0.240-py3-none-win32.whl", hash = "sha256:071e01a980ffd638a5ce7960ce662fa9b434962f78e7c575478c64e5f147aac8"},
|
||||
{file = "ruff-0.0.240-py3-none-win_amd64.whl", hash = "sha256:d0b1ac5d1d882db25ca4b7dff8aa813ecc7912bdde4ad8f59f2d922b1996cbc7"},
|
||||
{file = "ruff-0.0.240.tar.gz", hash = "sha256:0f1a0b04ce6f3d59894c64f3c3a5a0a35ff4803b8dc51e962d7de42fdb0f5eb1"},
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7280640690c1d0046b20e0eb924319a89d8e22925d7d232180ce31196e7478f8"},
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4582b73da61ab410ffda35b2987a6eacb33f18263e1c91810f0b9779ec4f41a9"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5acae9878f1136893e266348acdb9d30dfae23c296d3012043816432a5abdd51"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9f0912d045eee15e8e02e335c16d7a7f9fb6821aa5eb1628eeb5bbfa3d88908"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9542c34ee5298b31be6c6ba304f14b672dcf104846ee65adb2466d3e325870"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3464f1ad4cea6c4b9325da13ae306bd22bf15d226e18d19c52db191b1f4355ac"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a54bfd559e558ee0df2a2f3756423fe6a9de7307bc290d807c3cdf351cb4c24"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3438fd38446e1a0915316f4085405c9feca20fe00a4b614995ab7034dbfaa7ff"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358cc2b547bd6451dcf2427b22a9c29a2d9c34e66576c693a6381c5f2ed3011d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:783390f1e94a168c79d7004426dae3e4ae2999cc85f7d00fdd86c62262b71854"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:aaa3b5b6929c63a854b6bcea7a229453b455ab26337100b2905fae4523ca5667"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4ecd7a84db4816df2dcd0f11c5365a9a2cf4fa70a19b3ac161b7b0bfa592959d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3db8d77d5651a2c0d307102d717627a025d4488d406f54c2764b21cfbe11d822"},
|
||||
{file = "ruff-0.0.257-py3-none-win32.whl", hash = "sha256:d2c8755fa4f6c5e5ec032ad341ca3beeecd16786e12c3f26e6b0cc40418ae998"},
|
||||
{file = "ruff-0.0.257-py3-none-win_amd64.whl", hash = "sha256:3cec07d6fecb1ebbc45ea8eeb1047b929caa2f7dfb8dd4b0e1869ff789326da5"},
|
||||
{file = "ruff-0.0.257-py3-none-win_arm64.whl", hash = "sha256:352f1bdb9b433b3b389aee512ffb0b82226ae1e25b3d92e4eaf0e7be6b1b6f6a"},
|
||||
{file = "ruff-0.0.257.tar.gz", hash = "sha256:fedfd06a37ddc17449203c3e38fc83fb68de7f20b5daa0ee4e60d3599b38bab0"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "67.1.0"
|
||||
version = "67.6.0"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "setuptools-67.1.0-py3-none-any.whl", hash = "sha256:a7687c12b444eaac951ea87a9627c4f904ac757e7abdc5aac32833234af90378"},
|
||||
{file = "setuptools-67.1.0.tar.gz", hash = "sha256:e261cdf010c11a41cb5cb5f1bf3338a7433832029f559a6a7614bd42a967c300"},
|
||||
{file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
|
||||
{file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1216,6 +1129,18 @@ docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-g
|
||||
testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"]
|
||||
testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"]
|
||||
|
||||
[[package]]
|
||||
name = "sh"
|
||||
version = "2.0.3"
|
||||
description = "Python subprocess replacement"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8.1,<4.0"
|
||||
files = [
|
||||
{file = "sh-2.0.3-py3-none-any.whl", hash = "sha256:351f8968a2ed99755665fef62f038d60b5245999d73c2f1b1705f48b22e2a853"},
|
||||
{file = "sh-2.0.3.tar.gz", hash = "sha256:800efeda403b63879b0a5625f65a0021fd1ea61ed181954da0346372a7b2a341"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shellingham"
|
||||
version = "1.5.0.post1"
|
||||
@@ -1296,19 +1221,22 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typeguard"
|
||||
version = "2.13.3"
|
||||
version = "3.0.1"
|
||||
description = "Run-time type checker for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.5.3"
|
||||
python-versions = ">=3.7.4"
|
||||
files = [
|
||||
{file = "typeguard-2.13.3-py3-none-any.whl", hash = "sha256:5e3e3be01e887e7eafae5af63d1f36c849aaa94e3a0112097312aabfa16284f1"},
|
||||
{file = "typeguard-2.13.3.tar.gz", hash = "sha256:00edaa8da3a133674796cf5ea87d9f4b4c367d77476e185e80251cc13dfbb8c4"},
|
||||
{file = "typeguard-3.0.1-py3-none-any.whl", hash = "sha256:15628045c830abf68533247afd2cb04683b5ce6f4e30d5401a5ef6f5182280de"},
|
||||
{file = "typeguard-3.0.1.tar.gz", hash = "sha256:beb0e67c5dc76eea4a6d00a6606d444d899589908362960769d0c4a1d32bca70"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
typing-extensions = {version = ">=4.4.0", markers = "python_version < \"3.11\""}
|
||||
|
||||
[package.extras]
|
||||
doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||
test = ["mypy", "pytest", "typing-extensions"]
|
||||
doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"]
|
||||
test = ["mypy (>=0.991)", "pytest (>=7)"]
|
||||
|
||||
[[package]]
|
||||
name = "typer"
|
||||
@@ -1333,48 +1261,48 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
|
||||
|
||||
[[package]]
|
||||
name = "types-python-dateutil"
|
||||
version = "2.8.19.6"
|
||||
version = "2.8.19.10"
|
||||
description = "Typing stubs for python-dateutil"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-python-dateutil-2.8.19.6.tar.gz", hash = "sha256:4a6f4cc19ce4ba1a08670871e297bf3802f55d4f129e6aa2443f540b6cf803d2"},
|
||||
{file = "types_python_dateutil-2.8.19.6-py3-none-any.whl", hash = "sha256:cfb7d31021c6bce6f3362c69af6e3abb48fe3e08854f02487e844ff910deec2a"},
|
||||
{file = "types-python-dateutil-2.8.19.10.tar.gz", hash = "sha256:c640f2eb71b4b94a9d3bfda4c04250d29a24e51b8bad6e12fddec0cf6e96f7a3"},
|
||||
{file = "types_python_dateutil-2.8.19.10-py3-none-any.whl", hash = "sha256:fbecd02c19cac383bf4a16248d45ffcff17c93a04c0794be5f95d42c6aa5de39"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.4.0"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"},
|
||||
{file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"},
|
||||
{file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"},
|
||||
{file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "virtualenv"
|
||||
version = "20.17.1"
|
||||
version = "20.21.0"
|
||||
description = "Virtual Python Environment builder"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "virtualenv-20.17.1-py3-none-any.whl", hash = "sha256:ce3b1684d6e1a20a3e5ed36795a97dfc6af29bc3970ca8dab93e11ac6094b3c4"},
|
||||
{file = "virtualenv-20.17.1.tar.gz", hash = "sha256:f8b927684efc6f1cc206c9db297a570ab9ad0e51c16fa9e45487d36d1905c058"},
|
||||
{file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"},
|
||||
{file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
distlib = ">=0.3.6,<1"
|
||||
filelock = ">=3.4.1,<4"
|
||||
platformdirs = ">=2.4,<3"
|
||||
platformdirs = ">=2.4,<4"
|
||||
|
||||
[package.extras]
|
||||
docs = ["proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-argparse (>=0.3.2)", "sphinx-rtd-theme (>=1)", "towncrier (>=22.8)"]
|
||||
testing = ["coverage (>=6.2)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=21.3)", "pytest (>=7.0.1)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.6.1)", "pytest-randomly (>=3.10.3)", "pytest-timeout (>=2.1)"]
|
||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"]
|
||||
test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "vulture"
|
||||
@@ -1421,4 +1349,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "5bb4866827da1d2e417218c8120be075f1a61a25f015d97183feb63098c64afa"
|
||||
content-hash = "8fa62f96cc77eac773497573dcbdd5666173cbec56374fea73a814f3fb7f5338"
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
[virtualenvs]
|
||||
in-project = true
|
||||
in-project = true
|
||||
|
||||
216
pyproject.toml
216
pyproject.toml
@@ -11,7 +11,7 @@
|
||||
name = "obsidian-metadata"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||
version = "0.5.0"
|
||||
version = "0.10.0"
|
||||
|
||||
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||
@@ -21,94 +21,44 @@
|
||||
python = "^3.10"
|
||||
questionary = "^1.10.0"
|
||||
regex = "^2022.10.31"
|
||||
rich = "^13.2.0"
|
||||
rich = "^13.3.2"
|
||||
ruamel-yaml = "^0.17.21"
|
||||
shellingham = "^1.4.0"
|
||||
shellingham = "^1.5.0.post1"
|
||||
tomlkit = "^0.11.6"
|
||||
typer = "^0.7.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.2.0"
|
||||
pytest = "^7.2.2"
|
||||
pytest-clarity = "^1.0.1"
|
||||
pytest-mock = "^3.10.0"
|
||||
pytest-pretty-terminal = "^1.1.0"
|
||||
pytest-xdist = "^3.1.0"
|
||||
pytest-xdist = "^3.2.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
absolufy-imports = "^0.3.1"
|
||||
black = "^23.1.0"
|
||||
commitizen = "^2.40.0"
|
||||
coverage = "^7.1.0"
|
||||
commitizen = "^2.42.1"
|
||||
coverage = "^7.2.2"
|
||||
interrogate = "^1.5.0"
|
||||
mypy = "^0.991"
|
||||
pdoc = "^12.3.1"
|
||||
pep8-naming = "^0.13.3"
|
||||
poethepoet = "^0.18.0"
|
||||
pre-commit = "^3.0.4"
|
||||
pysnooper = "^1.1.1"
|
||||
ruff = "^0.0.240"
|
||||
typeguard = "^2.13.3"
|
||||
types-python-dateutil = "^2.8.19.5"
|
||||
mypy = "^1.1.1"
|
||||
pdoc = "^13.0.1"
|
||||
poethepoet = "^0.18.1"
|
||||
pre-commit = "^3.2.0"
|
||||
ruff = "^0.0.257"
|
||||
sh = "2.0.3"
|
||||
typeguard = "^3.0.1"
|
||||
types-python-dateutil = "^2.8.19.10"
|
||||
vulture = "^2.7"
|
||||
|
||||
[tool.ruff] # https://github.com/charliermarsh/ruff
|
||||
fix = true
|
||||
ignore = [
|
||||
"B006",
|
||||
"B008",
|
||||
"D107",
|
||||
"D203",
|
||||
"D204",
|
||||
"D213",
|
||||
"D215",
|
||||
"D404",
|
||||
"D406",
|
||||
"D407",
|
||||
"D408",
|
||||
"D409",
|
||||
"D413",
|
||||
"E501",
|
||||
"N805",
|
||||
"PGH001",
|
||||
"PGH003",
|
||||
"UP007",
|
||||
]
|
||||
ignore-init-module-imports = true
|
||||
[tool.black]
|
||||
line-length = 100
|
||||
per-file-ignores = { "cli.py" = [
|
||||
"PLR0913",
|
||||
], "tests/*.py" = [
|
||||
"E999",
|
||||
"PLR2004",
|
||||
] }
|
||||
select = [
|
||||
"A",
|
||||
"B",
|
||||
"BLE",
|
||||
"C4",
|
||||
"C90",
|
||||
"D",
|
||||
"E",
|
||||
"ERA",
|
||||
"F",
|
||||
"I",
|
||||
"N",
|
||||
"PGH",
|
||||
"PLC",
|
||||
"PLE",
|
||||
"PLR",
|
||||
"PLW",
|
||||
"RET",
|
||||
"RUF",
|
||||
"SIM",
|
||||
"TID",
|
||||
"UP",
|
||||
"W",
|
||||
"YTT",
|
||||
]
|
||||
src = ["src", "tests"]
|
||||
target-version = "py310"
|
||||
unfixable = ["ERA001", "F401", "F841", "UP007"]
|
||||
|
||||
[tool.commitizen]
|
||||
bump_message = "bump(release): v$current_version → v$new_version"
|
||||
changelog_incremental = true
|
||||
tag_format = "v$version"
|
||||
update_changelog_on_bump = true
|
||||
version = "0.10.0"
|
||||
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
||||
|
||||
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||
exclude_lines = [
|
||||
@@ -141,20 +91,6 @@
|
||||
[tool.coverage.xml]
|
||||
output = "reports/coverage.xml"
|
||||
|
||||
[tool.black]
|
||||
line-length = 100
|
||||
|
||||
[tool.commitizen]
|
||||
bump_message = "bump(release): v$current_version → v$new_version"
|
||||
changelog_incremental = true
|
||||
tag_format = "v$version"
|
||||
update_changelog_on_bump = true
|
||||
version = "0.5.0"
|
||||
version_files = [
|
||||
"pyproject.toml:version",
|
||||
"src/obsidian_metadata/__version__.py:__version__",
|
||||
]
|
||||
|
||||
[tool.interrogate]
|
||||
exclude = ["build", "docs", "tests"]
|
||||
fail-under = 90
|
||||
@@ -188,6 +124,108 @@
|
||||
testpaths = ["src", "tests"]
|
||||
xfail_strict = true
|
||||
|
||||
[tool.ruff] # https://github.com/charliermarsh/ruff
|
||||
exclude = [
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".hg",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"venv",
|
||||
]
|
||||
# Avoiding flagging (and removing) `V101` from any `# noqa`
|
||||
# directives, despite Ruff's lack of support for `vulture`.
|
||||
external = ["V101"]
|
||||
fix = true
|
||||
ignore = [
|
||||
"B006",
|
||||
"B008",
|
||||
"D107",
|
||||
"D203",
|
||||
"D204",
|
||||
"D213",
|
||||
"D215",
|
||||
"D404",
|
||||
"D406",
|
||||
"D407",
|
||||
"D408",
|
||||
"D409",
|
||||
"D413",
|
||||
"E501",
|
||||
"N805",
|
||||
"PGH001",
|
||||
"PGH003",
|
||||
"UP007",
|
||||
]
|
||||
ignore-init-module-imports = true
|
||||
line-length = 100
|
||||
per-file-ignores = { "cli.py" = [
|
||||
"PLR0912",
|
||||
"PLR0913",
|
||||
], "tests/*.py" = [
|
||||
"PLR0913",
|
||||
"PLR2004",
|
||||
"S101",
|
||||
] }
|
||||
select = [
|
||||
"A", # flake8-builtins
|
||||
"ARG", # flake8-unused-arguments
|
||||
"B", # flake8-bugbear
|
||||
"BLE", # flake8-blind-exception
|
||||
"C40", # flake8-comprehensions
|
||||
"C90", # McCabe
|
||||
"D", # pydocstyle
|
||||
"E", # pycodestyle Errors
|
||||
"ERA", # flake8-eradicate
|
||||
"EXE", # flake8-executable
|
||||
"F", # pyflakes
|
||||
"I", # iSort
|
||||
"N", # Pep8-naming
|
||||
"PGH", # pygrep-hooks
|
||||
"PLC", # pylint Convention
|
||||
"PLE", # pylint Error
|
||||
"PLR", # pylint Refactor
|
||||
"PLW", # pylint Warning
|
||||
"PT", # flake8-pytest-style
|
||||
"PTH", # flake8-use-pathlib
|
||||
"Q", # flake8-quotes
|
||||
"RET", # flake8-return
|
||||
"RUF", # Ruff-specific rules
|
||||
"S", # flake8-bandit
|
||||
"SIM", # flake8-simplify
|
||||
"TID", # flake8-tidy-imports
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle Warnings
|
||||
"YTT", # flake8-2020
|
||||
]
|
||||
src = ["src", "tests"]
|
||||
target-version = "py310"
|
||||
unfixable = ["ERA001", "F401", "F841", "UP007"]
|
||||
|
||||
[tool.ruff.mccabe]
|
||||
# Unlike Flake8, default to a complexity level of 10.
|
||||
max-complexity = 10
|
||||
|
||||
[tool.ruff.pydocstyle]
|
||||
convention = "google"
|
||||
|
||||
[tool.ruff.pylint]
|
||||
max-args = 6
|
||||
|
||||
[tool.vulture] # https://pypi.org/project/vulture/
|
||||
# exclude = ["file*.py", "dir/"]
|
||||
# ignore_decorators = ["@app.route", "@require_*"]
|
||||
@@ -213,7 +251,7 @@
|
||||
help = "Lint this package"
|
||||
|
||||
[[tool.poe.tasks.lint.sequence]]
|
||||
shell = "ruff --extend-ignore=I001,D301,D401,PLR2004,PLR0913 src/"
|
||||
shell = "ruff src/ --no-fix"
|
||||
|
||||
[[tool.poe.tasks.lint.sequence]]
|
||||
shell = "black --check src/ tests/"
|
||||
|
||||
150
scripts/update_dependencies.py
Executable file
150
scripts/update_dependencies.py
Executable file
@@ -0,0 +1,150 @@
|
||||
#!/usr/bin/env python
|
||||
"""Script to update the pyproject.toml file with the latest versions of the dependencies."""
|
||||
from pathlib import Path
|
||||
from textwrap import wrap
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError: # pragma: no cover
|
||||
import tomli as tomllib # type: ignore [no-redef]
|
||||
|
||||
import sh
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
|
||||
|
||||
def dryrun(msg: str) -> None:
|
||||
"""Print a message if the dry run flag is set.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
|
||||
|
||||
|
||||
def success(msg: str) -> None:
|
||||
"""Print a success message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[green]SUCCESS | {msg}[/green]")
|
||||
|
||||
|
||||
def warning(msg: str) -> None:
|
||||
"""Print a warning message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[yellow]WARNING | {msg}[/yellow]")
|
||||
|
||||
|
||||
def error(msg: str) -> None:
|
||||
"""Print an error message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[red]ERROR | {msg}[/red]")
|
||||
|
||||
|
||||
def notice(msg: str) -> None:
|
||||
"""Print a notice message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[bold]NOTICE | {msg}[/bold]")
|
||||
|
||||
|
||||
def info(msg: str) -> None:
|
||||
"""Print a notice message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"INFO | {msg}")
|
||||
|
||||
|
||||
def usage(msg: str, width: int = 80) -> None:
|
||||
"""Print a usage message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
width (optional): Width of the message
|
||||
"""
|
||||
for _n, line in enumerate(wrap(msg, width=width)):
|
||||
if _n == 0:
|
||||
console.print(f"[dim]USAGE | {line}")
|
||||
else:
|
||||
console.print(f"[dim] | {line}")
|
||||
|
||||
|
||||
def debug(msg: str) -> None:
|
||||
"""Print a debug message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[blue]DEBUG | {msg}[/blue]")
|
||||
|
||||
|
||||
def dim(msg: str) -> None:
|
||||
"""Print a message in dimmed color.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
console.print(f"[dim]{msg}[/dim]")
|
||||
|
||||
|
||||
# Load the pyproject.toml file
|
||||
pyproject = Path(__file__).parents[1] / "pyproject.toml"
|
||||
|
||||
if not pyproject.exists():
|
||||
console.print("pyproject.toml file not found")
|
||||
raise SystemExit(1)
|
||||
|
||||
with pyproject.open("rb") as f:
|
||||
try:
|
||||
data = tomllib.load(f)
|
||||
except tomllib.TOMLDecodeError as e:
|
||||
raise SystemExit(1) from e
|
||||
|
||||
|
||||
# Get the latest versions of all dependencies
|
||||
info("Getting latest versions of dependencies...")
|
||||
packages: dict = {}
|
||||
for line in sh.poetry("--no-ansi", "show", "--outdated").splitlines():
|
||||
package, current, latest = line.split()[:3]
|
||||
packages[package] = {"current_version": current, "new_version": latest}
|
||||
|
||||
if not packages:
|
||||
success("All dependencies are up to date")
|
||||
raise SystemExit(0)
|
||||
|
||||
|
||||
dependencies = data["tool"]["poetry"]["dependencies"]
|
||||
groups = data["tool"]["poetry"]["group"]
|
||||
|
||||
for p in dependencies:
|
||||
if p in packages:
|
||||
notice(
|
||||
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
|
||||
)
|
||||
sh.poetry("add", f"{p}@latest", _fg=True)
|
||||
|
||||
|
||||
for group in groups:
|
||||
for p in groups[group]["dependencies"]:
|
||||
if p in packages:
|
||||
notice(
|
||||
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
|
||||
)
|
||||
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
|
||||
|
||||
sh.poetry("update", _fg=True)
|
||||
success("All dependencies are up to date")
|
||||
raise SystemExit(0)
|
||||
@@ -1,2 +1,2 @@
|
||||
"""obsidian-metadata version."""
|
||||
__version__ = "0.5.0"
|
||||
__version__ = "0.10.0"
|
||||
|
||||
@@ -91,7 +91,7 @@ class Config:
|
||||
def _load_config(self) -> dict[str, Any]:
|
||||
"""Load the configuration file."""
|
||||
try:
|
||||
with open(self.config_path, encoding="utf-8") as fp:
|
||||
with self.config_path.open(encoding="utf-8") as fp:
|
||||
return tomlkit.load(fp)
|
||||
except tomlkit.exceptions.TOMLKitError as e:
|
||||
alerts.error(f"Could not parse '{self.config_path}'")
|
||||
@@ -122,9 +122,9 @@ class Config:
|
||||
# Folders within the vault to ignore when indexing metadata
|
||||
exclude_paths = [".git", ".obsidian"]
|
||||
|
||||
# Location to add metadata. One of:
|
||||
# Location to add new metadata. One of:
|
||||
# TOP: Directly after frontmatter.
|
||||
# AFTER_TITLE: After a header following frontmatter.
|
||||
# AFTER_TITLE: After the first header following frontmatter.
|
||||
# BOTTOM: The bottom of the note
|
||||
insert_location = "BOTTOM"
|
||||
"""
|
||||
@@ -164,6 +164,7 @@ class VaultConfig:
|
||||
yield "config", self.config
|
||||
yield "path", self.path
|
||||
yield "exclude_paths", self.exclude_paths
|
||||
yield "insert_location", self.insert_location
|
||||
|
||||
def _validate_vault_path(self, vault_path: Path | None) -> Path:
|
||||
"""Validate the vault path."""
|
||||
|
||||
@@ -5,11 +5,15 @@ from obsidian_metadata._utils.alerts import LoggerManager
|
||||
from obsidian_metadata._utils.utilities import (
|
||||
clean_dictionary,
|
||||
clear_screen,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
docstring_parameter,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
validate_csv_bulk_imports,
|
||||
version_callback,
|
||||
)
|
||||
|
||||
@@ -17,12 +21,15 @@ __all__ = [
|
||||
"alerts",
|
||||
"clean_dictionary",
|
||||
"clear_screen",
|
||||
"delete_from_dict",
|
||||
"dict_contains",
|
||||
"dict_keys_to_lower",
|
||||
"dict_values_to_lists_strings",
|
||||
"docstring_parameter",
|
||||
"LoggerManager",
|
||||
"merge_dictionaries",
|
||||
"rename_in_dict",
|
||||
"remove_markdown_sections",
|
||||
"vault_validation",
|
||||
"validate_csv_bulk_imports",
|
||||
"version_callback",
|
||||
]
|
||||
|
||||
@@ -7,7 +7,8 @@ from textwrap import wrap
|
||||
import rich.repr
|
||||
import typer
|
||||
from loguru import logger
|
||||
from rich import print
|
||||
|
||||
from obsidian_metadata._utils.console import console
|
||||
|
||||
|
||||
class LogLevel(Enum):
|
||||
@@ -38,7 +39,7 @@ def dryrun(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[cyan]DRYRUN | {msg}[/cyan]")
|
||||
console.print(f"[cyan]DRYRUN | {msg}[/cyan]")
|
||||
|
||||
|
||||
def success(msg: str) -> None:
|
||||
@@ -47,7 +48,7 @@ def success(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[green]SUCCESS | {msg}[/green]")
|
||||
console.print(f"[green]SUCCESS | {msg}[/green]")
|
||||
|
||||
|
||||
def warning(msg: str) -> None:
|
||||
@@ -56,7 +57,7 @@ def warning(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[yellow]WARNING | {msg}[/yellow]")
|
||||
console.print(f"[yellow]WARNING | {msg}[/yellow]")
|
||||
|
||||
|
||||
def error(msg: str) -> None:
|
||||
@@ -65,7 +66,7 @@ def error(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[red]ERROR | {msg}[/red]")
|
||||
console.print(f"[red]ERROR | {msg}[/red]")
|
||||
|
||||
|
||||
def notice(msg: str) -> None:
|
||||
@@ -74,7 +75,7 @@ def notice(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[bold]NOTICE | {msg}[/bold]")
|
||||
console.print(f"[bold]NOTICE | {msg}[/bold]")
|
||||
|
||||
|
||||
def info(msg: str) -> None:
|
||||
@@ -83,21 +84,24 @@ def info(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"INFO | {msg}")
|
||||
console.print(f"INFO | {msg}")
|
||||
|
||||
|
||||
def usage(msg: str, width: int = 80) -> None:
|
||||
def usage(msg: str, width: int = None) -> None:
|
||||
"""Print a usage message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
width (optional): Width of the message
|
||||
"""
|
||||
if width is None:
|
||||
width = console.width - 15
|
||||
|
||||
for _n, line in enumerate(wrap(msg, width=width)):
|
||||
if _n == 0:
|
||||
print(f"[dim]USAGE | {line}")
|
||||
console.print(f"[dim]USAGE | {line}")
|
||||
else:
|
||||
print(f"[dim] | {line}")
|
||||
console.print(f"[dim] | {line}")
|
||||
|
||||
|
||||
def debug(msg: str) -> None:
|
||||
@@ -106,7 +110,7 @@ def debug(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[blue]DEBUG | {msg}[/blue]")
|
||||
console.print(f"[blue]DEBUG | {msg}[/blue]")
|
||||
|
||||
|
||||
def dim(msg: str) -> None:
|
||||
@@ -115,7 +119,7 @@ def dim(msg: str) -> None:
|
||||
Args:
|
||||
msg: Message to print
|
||||
"""
|
||||
print(f"[dim]{msg}[/dim]")
|
||||
console.print(f"[dim]{msg}[/dim]")
|
||||
|
||||
|
||||
def _log_formatter(record: dict) -> str:
|
||||
@@ -125,9 +129,12 @@ def _log_formatter(record: dict) -> str:
|
||||
or record["level"].name == "SUCCESS"
|
||||
or record["level"].name == "WARNING"
|
||||
):
|
||||
return "<level>{level: <8}</level> | <level>{message}</level>\n{exception}"
|
||||
return "<level><normal>{level: <8} | {message}</normal></level>\n{exception}"
|
||||
|
||||
return "<level>{level: <8}</level> | <level>{message}</level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||
if record["level"].name == "TRACE" or record["level"].name == "DEBUG":
|
||||
return "<level><normal>{level: <8} | {message}</normal></level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||
|
||||
return "<level>{level: <8} | {message}</level> <fg #c5c5c5>({name}:{function}:{line})</fg #c5c5c5>\n{exception}"
|
||||
|
||||
|
||||
@rich.repr.auto
|
||||
@@ -171,7 +178,7 @@ class LoggerManager:
|
||||
self.log_level = log_level
|
||||
|
||||
if self.log_file == Path("/logs") and self.log_to_file: # pragma: no cover
|
||||
print("No log file specified")
|
||||
console.print("No log file specified")
|
||||
raise typer.Exit(1)
|
||||
|
||||
if self.verbosity >= VerboseLevel.TRACE.value:
|
||||
@@ -239,7 +246,7 @@ class LoggerManager:
|
||||
"""
|
||||
if self.log_level <= LogLevel.TRACE.value:
|
||||
if msg:
|
||||
print(msg)
|
||||
console.print(msg)
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -254,7 +261,7 @@ class LoggerManager:
|
||||
"""
|
||||
if self.log_level <= LogLevel.DEBUG.value:
|
||||
if msg:
|
||||
print(msg)
|
||||
console.print(msg)
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -269,7 +276,7 @@ class LoggerManager:
|
||||
"""
|
||||
if self.log_level <= LogLevel.INFO.value:
|
||||
if msg:
|
||||
print(msg)
|
||||
console.print(msg)
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -284,6 +291,6 @@ class LoggerManager:
|
||||
"""
|
||||
if self.log_level <= LogLevel.WARNING.value:
|
||||
if msg:
|
||||
print(msg)
|
||||
console.print(msg)
|
||||
return True
|
||||
return False # pragma: no cover
|
||||
|
||||
4
src/obsidian_metadata/_utils/console.py
Normal file
4
src/obsidian_metadata/_utils/console.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Rich console object for the application."""
|
||||
from rich.console import Console
|
||||
|
||||
console = Console()
|
||||
@@ -1,11 +1,15 @@
|
||||
"""Utility functions."""
|
||||
import copy
|
||||
import csv
|
||||
import re
|
||||
from os import name, system
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import typer
|
||||
|
||||
from obsidian_metadata.__version__ import __version__
|
||||
from obsidian_metadata._utils.console import console
|
||||
|
||||
|
||||
def clean_dictionary(dictionary: dict[str, Any]) -> dict[str, Any]:
|
||||
@@ -34,7 +38,7 @@ def clear_screen() -> None: # pragma: no cover
|
||||
def dict_contains(
|
||||
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if a dictionary contains a key.
|
||||
"""Check if a dictionary contains a key or if a specified key contains a value.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to check
|
||||
@@ -62,12 +66,27 @@ def dict_contains(
|
||||
return key in dictionary and value in dictionary[key]
|
||||
|
||||
|
||||
def dict_values_to_lists_strings(dictionary: dict, strip_null_values: bool = False) -> dict:
|
||||
def dict_keys_to_lower(dictionary: dict) -> dict:
|
||||
"""Convert all keys in a dictionary to lowercase.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to convert
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with all keys converted to lowercase
|
||||
"""
|
||||
return {key.lower(): value for key, value in dictionary.items()}
|
||||
|
||||
|
||||
def dict_values_to_lists_strings(
|
||||
dictionary: dict,
|
||||
strip_null_values: bool = False,
|
||||
) -> dict:
|
||||
"""Convert all values in a dictionary to lists of strings.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to convert
|
||||
strip_null (bool): Whether to strip null values
|
||||
strip_null_values (bool): Whether to strip null values
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with all values converted to lists of strings
|
||||
@@ -82,7 +101,7 @@ def dict_values_to_lists_strings(dictionary: dict, strip_null_values: bool = Fal
|
||||
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
||||
elif isinstance(value, dict):
|
||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||
elif value is None or value == "None" or value == "":
|
||||
elif value is None or value == "None" or not value:
|
||||
new_dict[key] = []
|
||||
else:
|
||||
new_dict[key] = [str(value)]
|
||||
@@ -100,6 +119,48 @@ def dict_values_to_lists_strings(dictionary: dict, strip_null_values: bool = Fal
|
||||
return new_dict
|
||||
|
||||
|
||||
def delete_from_dict( # noqa: C901
|
||||
dictionary: dict, key: str, value: str = None, is_regex: bool = False
|
||||
) -> dict:
|
||||
"""Delete a key or a value from a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to delete from
|
||||
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||
key (str): Key to delete
|
||||
value (str, optional): Value to delete. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary without the key
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
|
||||
if value is None:
|
||||
if is_regex:
|
||||
return {k: v for k, v in dictionary.items() if not re.search(key, str(k))}
|
||||
|
||||
return {k: v for k, v in dictionary.items() if k != key}
|
||||
|
||||
if is_regex:
|
||||
keys_to_delete = []
|
||||
for _key in dictionary:
|
||||
if re.search(key, str(_key)):
|
||||
if isinstance(dictionary[_key], list):
|
||||
dictionary[_key] = [v for v in dictionary[_key] if not re.search(value, v)]
|
||||
elif isinstance(dictionary[_key], str) and re.search(value, dictionary[_key]):
|
||||
keys_to_delete.append(_key)
|
||||
|
||||
for key in keys_to_delete:
|
||||
dictionary.pop(key)
|
||||
|
||||
elif key in dictionary and isinstance(dictionary[key], list):
|
||||
dictionary[key] = [v for v in dictionary[key] if v != value]
|
||||
elif key in dictionary and dictionary[key] == value:
|
||||
dictionary.pop(key)
|
||||
|
||||
return dictionary
|
||||
|
||||
|
||||
def docstring_parameter(*sub: Any) -> Any:
|
||||
"""Replace variables within docstrings.
|
||||
|
||||
@@ -122,7 +183,7 @@ def docstring_parameter(*sub: Any) -> Any:
|
||||
|
||||
|
||||
def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
"""Merge two dictionaries.
|
||||
"""Merge two dictionaries. When the values are lists, they are merged and sorted.
|
||||
|
||||
Args:
|
||||
dict1 (dict): First dictionary.
|
||||
@@ -149,6 +210,31 @@ def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
return dict(sorted(dict1.items()))
|
||||
|
||||
|
||||
def rename_in_dict(
|
||||
dictionary: dict[str, list[str]], key: str, value_1: str, value_2: str = None
|
||||
) -> dict:
|
||||
"""Rename a key or a value in a dictionary who's values are lists of strings.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to rename in.
|
||||
key (str): Key to check.
|
||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
value_2 (str, Optional): New value.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with renamed key or value
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
|
||||
if value_2 is None:
|
||||
if key in dictionary and value_1 not in dictionary:
|
||||
dictionary[value_1] = dictionary.pop(key)
|
||||
elif key in dictionary and value_1 in dictionary[key]:
|
||||
dictionary[key] = sorted({value_2 if x == value_1 else x for x in dictionary[key]})
|
||||
|
||||
return dictionary
|
||||
|
||||
|
||||
def remove_markdown_sections(
|
||||
text: str,
|
||||
strip_codeblocks: bool = False,
|
||||
@@ -175,11 +261,62 @@ def remove_markdown_sections(
|
||||
if strip_frontmatter:
|
||||
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
||||
|
||||
return text # noqa: RET504
|
||||
return text
|
||||
|
||||
|
||||
def validate_csv_bulk_imports(csv_path: Path, note_paths: list) -> dict[str, list[dict[str, str]]]:
|
||||
"""Validate the bulk import CSV file.
|
||||
|
||||
Args:
|
||||
csv_path (dict): Dictionary to validate
|
||||
note_paths (list): List of paths to all notes in vault
|
||||
|
||||
Returns:
|
||||
dict: Validated dictionary
|
||||
"""
|
||||
csv_dict: dict[str, Any] = {}
|
||||
with csv_path.expanduser().open("r") as csv_file:
|
||||
csv_reader = csv.DictReader(csv_file, delimiter=",")
|
||||
row_num = 0
|
||||
for row in csv_reader:
|
||||
if row_num == 0:
|
||||
if "path" not in row:
|
||||
raise typer.BadParameter("Missing 'path' column in CSV file")
|
||||
if "type" not in row:
|
||||
raise typer.BadParameter("Missing 'type' column in CSV file")
|
||||
if "key" not in row:
|
||||
raise typer.BadParameter("Missing 'key' column in CSV file")
|
||||
if "value" not in row:
|
||||
raise typer.BadParameter("Missing 'value' column in CSV file")
|
||||
row_num += 1
|
||||
|
||||
if row_num > 0 and row["type"] not in ["tag", "frontmatter", "inline_metadata"]:
|
||||
raise typer.BadParameter(
|
||||
f"Invalid type '{row['type']}' in CSV file. Must be one of 'tag', 'frontmatter', 'inline_metadata'"
|
||||
)
|
||||
|
||||
if row["path"] not in csv_dict:
|
||||
csv_dict[row["path"]] = []
|
||||
|
||||
csv_dict[row["path"]].append(
|
||||
{"type": row["type"], "key": row["key"], "value": row["value"]}
|
||||
)
|
||||
|
||||
if row_num == 0 or row_num == 1:
|
||||
raise typer.BadParameter("Empty CSV file")
|
||||
|
||||
paths_to_remove = [x for x in csv_dict if x not in note_paths]
|
||||
|
||||
for _path in paths_to_remove:
|
||||
raise typer.BadParameter(
|
||||
f"'{_path}' in CSV does not exist in vault. Ensure all paths are relative to the vault root."
|
||||
)
|
||||
|
||||
return csv_dict
|
||||
|
||||
|
||||
def version_callback(value: bool) -> None:
|
||||
"""Print version and exit."""
|
||||
if value:
|
||||
print(f"{__package__.split('.')[0]}: v{__version__}")
|
||||
console.print(f"{__package__.split('.')[0]}: v{__version__}")
|
||||
raise typer.Exit()
|
||||
|
||||
@@ -5,7 +5,6 @@ from typing import Optional
|
||||
|
||||
import questionary
|
||||
import typer
|
||||
from rich import print
|
||||
|
||||
from obsidian_metadata._config import Config
|
||||
from obsidian_metadata._utils import (
|
||||
@@ -14,6 +13,7 @@ from obsidian_metadata._utils import (
|
||||
docstring_parameter,
|
||||
version_callback,
|
||||
)
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import Application
|
||||
|
||||
app = typer.Typer(add_completion=False, no_args_is_help=True, rich_markup_mode="rich")
|
||||
@@ -34,14 +34,21 @@ def main(
|
||||
),
|
||||
export_csv: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all metadata to a specified CSV file and exits. (Will overwrite any existing file)",
|
||||
help="Exports all metadata to a specified CSV file and exits.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
export_json: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all metadata to a specified JSON file and exits. (Will overwrite any existing file)",
|
||||
help="Exports all metadata to a specified JSON file and exits.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
export_template: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all notes and their metadata to a specified CSV file and exits. Use to create a template for batch updates.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
@@ -79,7 +86,7 @@ def main(
|
||||
help="""Set verbosity level (0=WARN, 1=INFO, 2=DEBUG, 3=TRACE)""",
|
||||
count=True,
|
||||
),
|
||||
version: Optional[bool] = typer.Option(
|
||||
version: Optional[bool] = typer.Option( # noqa: ARG001
|
||||
None, "--version", help="Print version and exit", callback=version_callback, is_eager=True
|
||||
),
|
||||
) -> None:
|
||||
@@ -91,57 +98,7 @@ def main(
|
||||
[bold underline]Configuration:[/]
|
||||
Configuration is specified in a configuration file. On First run, this file will be created at [tan]~/.{0}.env[/]. Any options specified on the command line will override the configuration file.
|
||||
|
||||
[bold underline]Usage:[/]
|
||||
[tan]Obsidian-metadata[/] provides a menu of sub-commands.
|
||||
|
||||
[bold underline]Vault Actions[/]
|
||||
Create or delete a backup of your vault.
|
||||
• Backup: Create a backup of the vault.
|
||||
• Delete Backup: Delete a backup of the vault.
|
||||
|
||||
[bold underline]Inspect Metadata[/]
|
||||
Inspect the metadata in your vault.
|
||||
• View all metadata in the vault
|
||||
• View all frontmatter
|
||||
• View all inline metadata
|
||||
• View all inline tags
|
||||
• Export all metadata to CSV or JSON file
|
||||
|
||||
[bold underline]Filter Notes in Scope[/]
|
||||
Limit the scope of notes to be processed with one or more filters.
|
||||
• Path filter (regex): Limit scope based on the path or filename
|
||||
• Metadata filter: Limit scope based on a key or key/value pair
|
||||
• Tag filter: Limit scope based on an in-text tag
|
||||
• List and clear filters: List all current filters and clear one or all
|
||||
• List notes in scope: List notes that will be processed.
|
||||
|
||||
[bold underline]Add Metadata[/]
|
||||
Add new metadata to your vault.
|
||||
• Add new metadata to the frontmatter
|
||||
• Add new inline metadata - Set `insert_location` in the config to
|
||||
control where the new metadata is inserted. (Default: Bottom)
|
||||
• Add new inline tag - Set `insert_location` in the config to
|
||||
control where the new tag is inserted. (Default: Bottom)
|
||||
|
||||
[bold underline]Rename Metadata[/]
|
||||
Rename either a key and all associated values, a specific value within a key. or an in-text tag.
|
||||
• Rename a key
|
||||
• Rename a value
|
||||
• rename an inline tag
|
||||
|
||||
[bold underline]Delete Metadata[/]
|
||||
Delete either a key and all associated values, or a specific value.
|
||||
• Delete a key and associated values
|
||||
• Delete a value from a key
|
||||
• Delete an inline tag
|
||||
|
||||
[bold underline]Review Changes[/]
|
||||
Prior to committing changes, review all changes that will be made.
|
||||
• View a diff of the changes that will be made
|
||||
|
||||
[bold underline]Commit Changes[/]
|
||||
Write the changes to disk. This step is not undoable.
|
||||
• Commit changes to the vault
|
||||
Full usage information is available at https://github.com/natelandau/obsidian-metadata
|
||||
|
||||
"""
|
||||
# Instantiate logger
|
||||
@@ -163,7 +120,7 @@ def main(
|
||||
|_| |_|\___|\__\__,_|\__,_|\__,_|\__\__,_|
|
||||
"""
|
||||
clear_screen()
|
||||
print(banner)
|
||||
console.print(banner)
|
||||
|
||||
config: Config = Config(config_path=config_file, vault_path=vault_path)
|
||||
if len(config.vaults) == 0:
|
||||
@@ -192,6 +149,10 @@ def main(
|
||||
path = Path(export_json).expanduser().resolve()
|
||||
application.noninteractive_export_csv(path)
|
||||
raise typer.Exit(code=0)
|
||||
if export_template is not None:
|
||||
path = Path(export_template).expanduser().resolve()
|
||||
application.noninteractive_export_template(path)
|
||||
raise typer.Exit(code=0)
|
||||
|
||||
application.application_main()
|
||||
|
||||
|
||||
@@ -1,21 +1,20 @@
|
||||
"""Questions for the cli."""
|
||||
|
||||
|
||||
from typing import Any
|
||||
from pathlib import Path
|
||||
import questionary
|
||||
from rich import print
|
||||
from rich import box
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from obsidian_metadata._config import VaultConfig
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata.models import Patterns, Vault, VaultFilter
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata.models.questions import Questions
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from typing import Any
|
||||
|
||||
PATTERNS = Patterns()
|
||||
import questionary
|
||||
import typer
|
||||
from rich import box
|
||||
from rich.table import Table
|
||||
|
||||
from obsidian_metadata._config import VaultConfig
|
||||
from obsidian_metadata._utils import alerts, validate_csv_bulk_imports
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import InsertLocation, Vault, VaultFilter
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.questions import Questions
|
||||
|
||||
|
||||
class Application:
|
||||
@@ -34,7 +33,6 @@ class Application:
|
||||
|
||||
def _load_vault(self) -> None:
|
||||
"""Load the vault."""
|
||||
|
||||
if len(self.filters) == 0:
|
||||
self.vault: Vault = Vault(config=self.config, dry_run=self.dry_run)
|
||||
else:
|
||||
@@ -52,11 +50,15 @@ class Application:
|
||||
while True:
|
||||
self.vault.info()
|
||||
|
||||
match self.questions.ask_application_main(): # noqa: E999
|
||||
match self.questions.ask_application_main():
|
||||
case "vault_actions":
|
||||
self.application_vault()
|
||||
case "export_metadata":
|
||||
self.application_export_metadata()
|
||||
case "inspect_metadata":
|
||||
self.application_inspect_metadata()
|
||||
case "import_from_csv":
|
||||
self.application_import_csv()
|
||||
case "filter_notes":
|
||||
self.application_filter()
|
||||
case "add_metadata":
|
||||
@@ -65,6 +67,8 @@ class Application:
|
||||
self.application_rename_metadata()
|
||||
case "delete_metadata":
|
||||
self.application_delete_metadata()
|
||||
case "reorganize_metadata":
|
||||
self.application_reorganize_metadata()
|
||||
case "review_changes":
|
||||
self.review_changes()
|
||||
case "commit_changes":
|
||||
@@ -72,8 +76,7 @@ class Application:
|
||||
case _:
|
||||
break
|
||||
|
||||
print("Done!")
|
||||
return
|
||||
console.print("Done!")
|
||||
|
||||
def application_add_metadata(self) -> None:
|
||||
"""Add metadata."""
|
||||
@@ -98,7 +101,7 @@ class Application:
|
||||
area=area, key=key, value=value, location=self.vault.insert_location
|
||||
)
|
||||
if num_changed == 0: # pragma: no cover
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Added metadata to {num_changed} notes")
|
||||
@@ -113,18 +116,67 @@ class Application:
|
||||
)
|
||||
|
||||
if num_changed == 0: # pragma: no cover
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Added metadata to {num_changed} notes")
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def application_filter(self) -> None:
|
||||
def application_delete_metadata(self) -> None:
|
||||
"""Delete metadata."""
|
||||
alerts.usage("Delete either a key and all associated values, or a specific value.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Delete inline tag", "value": "delete_tag"},
|
||||
{"name": "Delete key", "value": "delete_key"},
|
||||
{"name": "Delete value", "value": "delete_value"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(
|
||||
choices=choices, question="Select a metadata type to delete"
|
||||
):
|
||||
case "delete_key":
|
||||
self.delete_key()
|
||||
case "delete_value":
|
||||
self.delete_value()
|
||||
case "delete_tag":
|
||||
self.delete_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def application_rename_metadata(self) -> None:
|
||||
"""Rename metadata."""
|
||||
alerts.usage("Select the type of metadata to rename.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Rename inline tag", "value": "rename_tag"},
|
||||
{"name": "Rename key", "value": "rename_key"},
|
||||
{"name": "Rename value", "value": "rename_value"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(
|
||||
choices=choices, question="Select a metadata type to rename"
|
||||
):
|
||||
case "rename_key":
|
||||
self.rename_key()
|
||||
case "rename_value":
|
||||
self.rename_value()
|
||||
case "rename_tag":
|
||||
self.rename_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def application_filter(self) -> None: # noqa: C901,PLR0911,PLR0912
|
||||
"""Filter notes."""
|
||||
alerts.usage("Limit the scope of notes to be processed with one or more filters.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Apply new regex path filter", "value": "apply_path_filter"},
|
||||
{"name": "Apply new metadata filter", "value": "apply_metadata_filter"},
|
||||
{"name": "Apply new in-text tag filter", "value": "apply_tag_filter"},
|
||||
@@ -137,7 +189,7 @@ class Application:
|
||||
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
||||
case "apply_path_filter":
|
||||
path = self.questions.ask_filter_path()
|
||||
if path is None or path == "": # pragma: no cover
|
||||
if path is None or not path: # pragma: no cover
|
||||
return
|
||||
|
||||
self.filters.append(VaultFilter(path_filter=path))
|
||||
@@ -154,15 +206,15 @@ class Application:
|
||||
)
|
||||
if value is None: # pragma: no cover
|
||||
return
|
||||
if value == "":
|
||||
if not value:
|
||||
self.filters.append(VaultFilter(key_filter=key))
|
||||
else:
|
||||
self.filters.append(VaultFilter(key_filter=key, value_filter=value))
|
||||
self._load_vault()
|
||||
|
||||
case "apply_tag_filter":
|
||||
tag = self.questions.ask_existing_inline_tag()
|
||||
if tag is None or tag == "":
|
||||
tag = self.questions.ask_existing_tag()
|
||||
if tag is None or not tag:
|
||||
return
|
||||
|
||||
self.filters.append(VaultFilter(tag_filter=tag))
|
||||
@@ -173,7 +225,7 @@ class Application:
|
||||
alerts.notice("No filters have been applied")
|
||||
return
|
||||
|
||||
print("")
|
||||
console.print("")
|
||||
table = Table(
|
||||
"Opt",
|
||||
"Filter",
|
||||
@@ -182,34 +234,34 @@ class Application:
|
||||
show_header=False,
|
||||
box=box.HORIZONTALS,
|
||||
)
|
||||
for _n, filter in enumerate(self.filters, start=1):
|
||||
if filter.path_filter is not None:
|
||||
for _n, _filter in enumerate(self.filters, start=1):
|
||||
if _filter.path_filter is not None:
|
||||
table.add_row(
|
||||
str(_n),
|
||||
f"Path regex: [tan bold]{filter.path_filter}",
|
||||
f"Path regex: [tan bold]{_filter.path_filter}",
|
||||
end_section=bool(_n == len(self.filters)),
|
||||
)
|
||||
elif filter.tag_filter is not None:
|
||||
elif _filter.tag_filter is not None:
|
||||
table.add_row(
|
||||
str(_n),
|
||||
f"Tag filter: [tan bold]{filter.tag_filter}",
|
||||
f"Tag filter: [tan bold]{_filter.tag_filter}",
|
||||
end_section=bool(_n == len(self.filters)),
|
||||
)
|
||||
elif filter.key_filter is not None and filter.value_filter is None:
|
||||
elif _filter.key_filter is not None and _filter.value_filter is None:
|
||||
table.add_row(
|
||||
str(_n),
|
||||
f"Key filter: [tan bold]{filter.key_filter}",
|
||||
f"Key filter: [tan bold]{_filter.key_filter}",
|
||||
end_section=bool(_n == len(self.filters)),
|
||||
)
|
||||
elif filter.key_filter is not None and filter.value_filter is not None:
|
||||
elif _filter.key_filter is not None and _filter.value_filter is not None:
|
||||
table.add_row(
|
||||
str(_n),
|
||||
f"Key/Value : [tan bold]{filter.key_filter}={filter.value_filter}",
|
||||
f"Key/Value : [tan bold]{_filter.key_filter}={_filter.value_filter}",
|
||||
end_section=bool(_n == len(self.filters)),
|
||||
)
|
||||
table.add_row(f"{len(self.filters) + 1}", "Clear All")
|
||||
table.add_row(f"{len(self.filters) + 2}", "Return to Main Menu")
|
||||
Console().print(table)
|
||||
console.print(table)
|
||||
|
||||
num = self.questions.ask_number(
|
||||
question="Enter the number of the filter to clear"
|
||||
@@ -231,6 +283,76 @@ class Application:
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_import_csv(self) -> None:
|
||||
"""Import CSV for bulk changes to metadata."""
|
||||
alerts.usage(
|
||||
"Import CSV to make build changes to metadata. The CSV must have the following columns: path, type, key, value. Where type is one of 'frontmatter', 'inline_metadata', or 'tag'. Note: this will not create new notes."
|
||||
)
|
||||
|
||||
path = self.questions.ask_path(question="Enter path to a CSV file", valid_file=True)
|
||||
|
||||
if path is None:
|
||||
return
|
||||
|
||||
csv_path = Path(path).expanduser()
|
||||
|
||||
if "csv" not in csv_path.suffix.lower():
|
||||
alerts.error("File must be a CSV file")
|
||||
return
|
||||
|
||||
note_paths = [
|
||||
str(n.note_path.relative_to(self.vault.vault_path)) for n in self.vault.all_notes
|
||||
]
|
||||
|
||||
dict_from_csv = validate_csv_bulk_imports(csv_path, note_paths)
|
||||
num_changed = self.vault.update_from_dict(dict_from_csv)
|
||||
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Rewrote metadata for {num_changed} notes.")
|
||||
|
||||
def application_export_metadata(self) -> None:
|
||||
"""Export metadata to various formats."""
|
||||
alerts.usage(
|
||||
"Export the metadata in your vault. Note, uncommitted changes will be reflected in these files. The notes csv export can be used as template for importing bulk changes"
|
||||
)
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Metadata by type to CSV", "value": "export_csv"},
|
||||
{"name": "Metadata by type to JSON", "value": "export_json"},
|
||||
{
|
||||
"name": "Metadata by note to CSV [Bulk import template]",
|
||||
"value": "export_notes_csv",
|
||||
},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
while True:
|
||||
match self.questions.ask_selection(choices=choices, question="Export format"):
|
||||
case "export_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="csv")
|
||||
alerts.success(f"CSV written to {path}")
|
||||
case "export_json":
|
||||
path = self.questions.ask_path(question="Enter a path for the JSON file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="json")
|
||||
alerts.success(f"JSON written to {path}")
|
||||
case "export_notes_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_notes_to_csv(path=path)
|
||||
alerts.success(f"CSV written to {path}")
|
||||
return
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_inspect_metadata(self) -> None:
|
||||
"""View metadata."""
|
||||
alerts.usage(
|
||||
@@ -238,59 +360,85 @@ class Application:
|
||||
)
|
||||
|
||||
choices = [
|
||||
{"name": "View all metadata", "value": "all_metadata"},
|
||||
{"name": "View all frontmatter", "value": "all_frontmatter"},
|
||||
{"name": "View all inline_metadata", "value": "all_inline"},
|
||||
{"name": "View all keys", "value": "all_keys"},
|
||||
{"name": "View all inline tags", "value": "all_tags"},
|
||||
questionary.Separator(),
|
||||
{"name": "Write all metadata to CSV", "value": "export_csv"},
|
||||
{"name": "Write all metadata to JSON file", "value": "export_json"},
|
||||
{"name": "View all frontmatter", "value": "all_frontmatter"},
|
||||
{"name": "View all inline metadata", "value": "all_inline"},
|
||||
{"name": "View all inline tags", "value": "all_tags"},
|
||||
{"name": "View all keys", "value": "all_keys"},
|
||||
{"name": "View all metadata", "value": "all_metadata"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
while True:
|
||||
match self.questions.ask_selection(choices=choices, question="Select a vault action"):
|
||||
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
||||
case "all_metadata":
|
||||
print("")
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.ALL)
|
||||
print("")
|
||||
console.print("")
|
||||
case "all_frontmatter":
|
||||
print("")
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.FRONTMATTER)
|
||||
print("")
|
||||
console.print("")
|
||||
case "all_inline":
|
||||
print("")
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.INLINE)
|
||||
print("")
|
||||
console.print("")
|
||||
case "all_keys":
|
||||
print("")
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.KEYS)
|
||||
print("")
|
||||
console.print("")
|
||||
case "all_tags":
|
||||
print("")
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.TAGS)
|
||||
print("")
|
||||
case "export_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, format="csv")
|
||||
alerts.success(f"Metadata written to {path}")
|
||||
case "export_json":
|
||||
path = self.questions.ask_path(question="Enter a path for the JSON file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, format="json")
|
||||
alerts.success(f"Metadata written to {path}")
|
||||
console.print("")
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_reorganize_metadata(self) -> None:
|
||||
"""Reorganize metadata.
|
||||
|
||||
This portion of the application deals with moving metadata between types (inline to frontmatter, etc.) and moving the location of inline metadata within a note.
|
||||
|
||||
"""
|
||||
alerts.usage("Move metadata within notes.")
|
||||
alerts.usage(" 1. Transpose frontmatter to inline or vice versa.")
|
||||
alerts.usage(" 2. Move the location of inline metadata within a note.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Move inline metadata to top of note", "value": "move_to_top"},
|
||||
{
|
||||
"name": "Move inline metadata beneath the first header",
|
||||
"value": "move_to_after_header",
|
||||
},
|
||||
{"name": "Move inline metadata to bottom of the note", "value": "move_to_bottom"},
|
||||
{"name": "Transpose frontmatter to inline", "value": "frontmatter_to_inline"},
|
||||
{"name": "Transpose inline to frontmatter", "value": "inline_to_frontmatter"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(
|
||||
choices=choices, question="Select metadata to transpose"
|
||||
):
|
||||
case "frontmatter_to_inline":
|
||||
self.transpose_metadata(begin=MetadataType.FRONTMATTER, end=MetadataType.INLINE)
|
||||
case "inline_to_frontmatter":
|
||||
self.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER)
|
||||
case "move_to_top":
|
||||
self.move_inline_metadata(location=InsertLocation.TOP)
|
||||
case "move_to_after_header":
|
||||
self.move_inline_metadata(location=InsertLocation.AFTER_TITLE)
|
||||
case "move_to_bottom":
|
||||
self.move_inline_metadata(location=InsertLocation.BOTTOM)
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def application_vault(self) -> None:
|
||||
"""Vault actions."""
|
||||
alerts.usage("Create or delete a backup of your vault.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Backup vault", "value": "backup_vault"},
|
||||
{"name": "Delete vault backup", "value": "delete_backup"},
|
||||
questionary.Separator(),
|
||||
@@ -306,51 +454,6 @@ class Application:
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_delete_metadata(self) -> None:
|
||||
alerts.usage("Delete either a key and all associated values, or a specific value.")
|
||||
|
||||
choices = [
|
||||
{"name": "Delete key", "value": "delete_key"},
|
||||
{"name": "Delete value", "value": "delete_value"},
|
||||
{"name": "Delete inline tag", "value": "delete_inline_tag"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(
|
||||
choices=choices, question="Select a metadata type to delete"
|
||||
):
|
||||
case "delete_key":
|
||||
self.delete_key()
|
||||
case "delete_value":
|
||||
self.delete_value()
|
||||
case "delete_inline_tag":
|
||||
self.delete_inline_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def application_rename_metadata(self) -> None:
|
||||
"""Rename metadata."""
|
||||
alerts.usage("Select the type of metadata to rename.")
|
||||
|
||||
choices = [
|
||||
{"name": "Rename key", "value": "rename_key"},
|
||||
{"name": "Rename value", "value": "rename_value"},
|
||||
{"name": "Rename inline tag", "value": "rename_inline_tag"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(
|
||||
choices=choices, question="Select a metadata type to rename"
|
||||
):
|
||||
case "rename_key":
|
||||
self.rename_key()
|
||||
case "rename_value":
|
||||
self.rename_value()
|
||||
case "rename_inline_tag":
|
||||
self.rename_inline_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
def commit_changes(self) -> bool:
|
||||
"""Write all changes to disk.
|
||||
|
||||
@@ -360,7 +463,7 @@ class Application:
|
||||
changed_notes = self.vault.get_changed_notes()
|
||||
|
||||
if len(changed_notes) == 0:
|
||||
print("\n")
|
||||
console.print("\n")
|
||||
alerts.notice("No changes to commit.\n")
|
||||
return False
|
||||
|
||||
@@ -375,17 +478,17 @@ class Application:
|
||||
|
||||
if not self.dry_run:
|
||||
alerts.success(f"{len(changed_notes)} changes committed to disk. Exiting")
|
||||
return True
|
||||
raise typer.Exit(0)
|
||||
|
||||
return True
|
||||
|
||||
def delete_inline_tag(self) -> None:
|
||||
def delete_tag(self) -> None:
|
||||
"""Delete an inline tag."""
|
||||
tag = self.questions.ask_existing_inline_tag(question="Which tag would you like to delete?")
|
||||
tag = self.questions.ask_existing_tag(question="Which tag would you like to delete?")
|
||||
|
||||
num_changed = self.vault.delete_inline_tag(tag)
|
||||
num_changed = self.vault.delete_tag(tag)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Deleted inline tag: {tag} in {num_changed} notes")
|
||||
@@ -399,7 +502,9 @@ class Application:
|
||||
if key_to_delete is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.delete_metadata(key_to_delete)
|
||||
num_changed = self.vault.delete_metadata(
|
||||
key=key_to_delete, area=MetadataType.ALL, is_regex=True
|
||||
)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes found with a key matching: [reverse]{key_to_delete}[/]")
|
||||
return
|
||||
@@ -421,7 +526,9 @@ class Application:
|
||||
if value is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.delete_metadata(key, value)
|
||||
num_changed = self.vault.delete_metadata(
|
||||
key=key, value=value, area=MetadataType.ALL, is_regex=True
|
||||
)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes found matching: {key}: {value}")
|
||||
return
|
||||
@@ -432,21 +539,39 @@ class Application:
|
||||
|
||||
return
|
||||
|
||||
def move_inline_metadata(self, location: InsertLocation) -> None:
|
||||
"""Move inline metadata to the selected location."""
|
||||
num_changed = self.vault.move_inline_metadata(location)
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
||||
|
||||
def noninteractive_export_csv(self, path: Path) -> None:
|
||||
"""Export the vault metadata to CSV."""
|
||||
self._load_vault()
|
||||
self.vault.export_metadata(format="json", path=str(path))
|
||||
self.vault.export_metadata(export_format="csv", path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def noninteractive_export_json(self, path: Path) -> None:
|
||||
"""Export the vault metadata to JSON."""
|
||||
self._load_vault()
|
||||
self.vault.export_metadata(format="json", path=str(path))
|
||||
self.vault.export_metadata(export_format="json", path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def noninteractive_export_template(self, path: Path) -> None:
|
||||
"""Export the vault metadata to CSV."""
|
||||
self._load_vault()
|
||||
with console.status(
|
||||
"Preparing export... [dim](Can take a while for large vaults)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
self.vault.export_notes_to_csv(path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def rename_key(self) -> None:
|
||||
"""Renames a key in the vault."""
|
||||
|
||||
"""Rename a key in the vault."""
|
||||
original_key = self.questions.ask_existing_key(
|
||||
question="Which key would you like to rename?"
|
||||
)
|
||||
@@ -459,17 +584,16 @@ class Application:
|
||||
|
||||
num_changed = self.vault.rename_metadata(original_key, new_key)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(
|
||||
f"Renamed [reverse]{original_key}[/] to [reverse]{new_key}[/] in {num_changed} notes"
|
||||
)
|
||||
|
||||
def rename_inline_tag(self) -> None:
|
||||
def rename_tag(self) -> None:
|
||||
"""Rename an inline tag."""
|
||||
|
||||
original_tag = self.questions.ask_existing_inline_tag(question="Which tag to rename?")
|
||||
original_tag = self.questions.ask_existing_tag(question="Which tag to rename?")
|
||||
if original_tag is None: # pragma: no cover
|
||||
return
|
||||
|
||||
@@ -477,9 +601,9 @@ class Application:
|
||||
if new_tag is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.rename_inline_tag(original_tag, new_tag)
|
||||
num_changed = self.vault.rename_tag(original_tag, new_tag)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(
|
||||
@@ -504,7 +628,7 @@ class Application:
|
||||
|
||||
num_changes = self.vault.rename_metadata(key, value, new_value)
|
||||
if num_changes == 0:
|
||||
alerts.warning(f"No notes were changed")
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Renamed '{key}:{value}' to '{key}:{new_value}' in {num_changes} notes")
|
||||
@@ -517,14 +641,9 @@ class Application:
|
||||
alerts.info("No changes to review.")
|
||||
return
|
||||
|
||||
print(f"\nFound {len(changed_notes)} changed notes in the vault.\n")
|
||||
answer = self.questions.ask_confirm(
|
||||
question="View diffs of individual files?", default=False
|
||||
)
|
||||
if not answer: # pragma: no cover
|
||||
return
|
||||
|
||||
choices: list[dict[str, Any] | questionary.Separator] = [questionary.Separator()]
|
||||
alerts.info(f"Found {len(changed_notes)} changed notes in the vault")
|
||||
choices: list[dict[str, Any] | questionary.Separator] = []
|
||||
choices.append(questionary.Separator())
|
||||
for n, note in enumerate(changed_notes, start=1):
|
||||
_selection = {
|
||||
"name": f"{n}: {note.note_path.relative_to(self.vault.vault_path)}",
|
||||
@@ -538,8 +657,82 @@ class Application:
|
||||
while True:
|
||||
note_to_review = self.questions.ask_selection(
|
||||
choices=choices,
|
||||
question="Select a new to view the diff",
|
||||
question="Select an updated note to view the diff",
|
||||
)
|
||||
if note_to_review is None or note_to_review == "return":
|
||||
break
|
||||
changed_notes[note_to_review].print_diff()
|
||||
|
||||
def transpose_metadata(self, begin: MetadataType, end: MetadataType) -> None: # noqa: PLR0911
|
||||
"""Transpose metadata from one format to another.
|
||||
|
||||
Args:
|
||||
begin: The format to transpose from.
|
||||
end: The format to transpose to.
|
||||
"""
|
||||
choices = [
|
||||
{"name": f"Transpose all {begin.value} to {end.value}", "value": "transpose_all"},
|
||||
{"name": "Transpose a key", "value": "transpose_key"},
|
||||
{"name": "Transpose a value", "value": "transpose_value"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
match self.questions.ask_selection(choices=choices, question="Select an action to perform"):
|
||||
case "transpose_all":
|
||||
num_changed = self.vault.transpose_metadata(
|
||||
begin=begin,
|
||||
end=end,
|
||||
location=self.vault.insert_location,
|
||||
)
|
||||
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Transposed {begin.value} to {end.value} in {num_changed} notes")
|
||||
case "transpose_key":
|
||||
key = self.questions.ask_existing_key(question="Which key to transpose?")
|
||||
if key is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.transpose_metadata(
|
||||
begin=begin,
|
||||
end=end,
|
||||
key=key,
|
||||
location=self.vault.insert_location,
|
||||
)
|
||||
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(
|
||||
f"Transposed key: `{key}` from {begin.value} to {end.value} in {num_changed} notes"
|
||||
)
|
||||
case "transpose_value":
|
||||
key = self.questions.ask_existing_key(question="Which key contains the value?")
|
||||
if key is None: # pragma: no cover
|
||||
return
|
||||
|
||||
questions2 = Questions(vault=self.vault, key=key)
|
||||
value = questions2.ask_existing_value(question="Which value to transpose?")
|
||||
if value is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.transpose_metadata(
|
||||
begin=begin,
|
||||
end=end,
|
||||
key=key,
|
||||
value=value,
|
||||
location=self.vault.insert_location,
|
||||
)
|
||||
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(
|
||||
f"Transposed key: `{key}:{value}` from {begin.value} to {end.value} in {num_changed} notes"
|
||||
)
|
||||
case _:
|
||||
return
|
||||
|
||||
@@ -23,5 +23,5 @@ class InsertLocation(Enum):
|
||||
"""
|
||||
|
||||
TOP = "Top"
|
||||
AFTER_TITLE = "Header"
|
||||
AFTER_TITLE = "After title"
|
||||
BOTTOM = "Bottom"
|
||||
|
||||
@@ -1,23 +1,23 @@
|
||||
"""Work with metadata items."""
|
||||
|
||||
import copy
|
||||
import re
|
||||
from io import StringIO
|
||||
|
||||
from rich import print
|
||||
from rich.columns import Columns
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
from ruamel.yaml import YAML
|
||||
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils import (
|
||||
clean_dictionary,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_values_to_lists_strings,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
)
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import Patterns # isort: ignore
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
|
||||
@@ -26,7 +26,14 @@ INLINE_TAG_KEY: str = "inline_tag"
|
||||
|
||||
|
||||
class VaultMetadata:
|
||||
"""Representation of all Metadata in the Vault."""
|
||||
"""Representation of all Metadata in the Vault.
|
||||
|
||||
Attributes:
|
||||
dict (dict): Dictionary of all frontmatter and inline metadata. Does not include tags.
|
||||
frontmatter (dict): Dictionary of all frontmatter metadata.
|
||||
inline_metadata (dict): Dictionary of all inline metadata.
|
||||
tags (list): List of all tags.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.dict: dict[str, list[str]] = {}
|
||||
@@ -49,15 +56,13 @@ class VaultMetadata:
|
||||
"""
|
||||
if isinstance(metadata, dict):
|
||||
new_metadata = clean_dictionary(metadata)
|
||||
self.dict = merge_dictionaries(self.dict.copy(), new_metadata.copy())
|
||||
self.dict = merge_dictionaries(self.dict, new_metadata)
|
||||
|
||||
if area == MetadataType.FRONTMATTER:
|
||||
self.frontmatter = merge_dictionaries(self.frontmatter.copy(), new_metadata.copy())
|
||||
self.frontmatter = merge_dictionaries(self.frontmatter, new_metadata)
|
||||
|
||||
if area == MetadataType.INLINE:
|
||||
self.inline_metadata = merge_dictionaries(
|
||||
self.inline_metadata.copy(), new_metadata.copy()
|
||||
)
|
||||
self.inline_metadata = merge_dictionaries(self.inline_metadata, new_metadata)
|
||||
|
||||
if area == MetadataType.TAGS and isinstance(metadata, list):
|
||||
self.tags.extend(metadata)
|
||||
@@ -84,15 +89,9 @@ class VaultMetadata:
|
||||
if area != MetadataType.TAGS and key is None:
|
||||
raise ValueError("Key must be provided when checking for a key's existence.")
|
||||
|
||||
match area: # noqa: E999
|
||||
match area:
|
||||
case MetadataType.ALL:
|
||||
if dict_contains(self.dict, key, value, is_regex):
|
||||
return True
|
||||
if key is None and value is not None:
|
||||
if is_regex:
|
||||
return any(re.search(value, tag) for tag in self.tags)
|
||||
return value in self.tags
|
||||
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
case MetadataType.FRONTMATTER:
|
||||
return dict_contains(self.frontmatter, key, value, is_regex)
|
||||
case MetadataType.INLINE:
|
||||
@@ -106,10 +105,8 @@ class VaultMetadata:
|
||||
return any(re.search(value, tag) for tag in self.tags)
|
||||
return value in self.tags
|
||||
|
||||
return False
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
"""Delete a key or a key's value from the metadata. Regex is supported to allow deleting more than one key or value.
|
||||
"""Delete a key or a value from the VaultMetadata dict object. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
key (str): Key to check.
|
||||
@@ -118,17 +115,12 @@ class VaultMetadata:
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = self.dict.copy()
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=True,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
@@ -142,28 +134,24 @@ class VaultMetadata:
|
||||
Args:
|
||||
area (MetadataType): Type of metadata to print
|
||||
"""
|
||||
dict_to_print: dict[str, list[str]] = None
|
||||
list_to_print: list[str] = None
|
||||
dict_to_print = None
|
||||
list_to_print = None
|
||||
match area:
|
||||
case MetadataType.INLINE:
|
||||
dict_to_print = self.inline_metadata.copy()
|
||||
dict_to_print = self.inline_metadata
|
||||
header = "All inline metadata"
|
||||
case MetadataType.FRONTMATTER:
|
||||
dict_to_print = self.frontmatter.copy()
|
||||
dict_to_print = self.frontmatter
|
||||
header = "All frontmatter"
|
||||
case MetadataType.TAGS:
|
||||
list_to_print = []
|
||||
for tag in self.tags:
|
||||
list_to_print.append(f"#{tag}")
|
||||
list_to_print = [f"#{x}" for x in self.tags]
|
||||
header = "All inline tags"
|
||||
case MetadataType.KEYS:
|
||||
list_to_print = sorted(self.dict.keys())
|
||||
header = "All Keys"
|
||||
case MetadataType.ALL:
|
||||
dict_to_print = self.dict.copy()
|
||||
list_to_print = []
|
||||
for tag in self.tags:
|
||||
list_to_print.append(f"#{tag}")
|
||||
dict_to_print = self.dict
|
||||
list_to_print = [f"#{x}" for x in self.tags]
|
||||
header = "All metadata"
|
||||
|
||||
if dict_to_print is not None:
|
||||
@@ -175,7 +163,7 @@ class VaultMetadata:
|
||||
"\n".join(sorted(value)) if isinstance(value, list) else value
|
||||
)
|
||||
table.add_row(f"[bold]{key}[/]", str(values))
|
||||
Console().print(table)
|
||||
console.print(table)
|
||||
|
||||
if list_to_print is not None:
|
||||
columns = Columns(
|
||||
@@ -184,7 +172,7 @@ class VaultMetadata:
|
||||
expand=True,
|
||||
title=header if area != MetadataType.ALL else "All inline tags",
|
||||
)
|
||||
print(columns)
|
||||
console.print(columns)
|
||||
|
||||
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||
"""Replace a value in the frontmatter.
|
||||
@@ -193,19 +181,14 @@ class VaultMetadata:
|
||||
key (str): Key to check.
|
||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
value_2 (str, Optional): New value.
|
||||
bypass_check (bool, optional): Bypass the check if the key exists. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -214,9 +197,9 @@ class VaultMetadata:
|
||||
class Frontmatter:
|
||||
"""Representation of frontmatter metadata."""
|
||||
|
||||
def __init__(self, file_content: str):
|
||||
def __init__(self, file_content: str) -> None:
|
||||
self.dict: dict[str, list[str]] = self._grab_note_frontmatter(file_content)
|
||||
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
||||
self.dict_original: dict[str, list[str]] = copy.deepcopy(self.dict)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
"""Representation of the frontmatter.
|
||||
@@ -230,7 +213,7 @@ class Frontmatter:
|
||||
"""Grab metadata from a note.
|
||||
|
||||
Args:
|
||||
note_path (Path): Path to the note file.
|
||||
file_content (str): Content of the note.
|
||||
|
||||
Returns:
|
||||
dict: Metadata from the note.
|
||||
@@ -243,7 +226,14 @@ class Frontmatter:
|
||||
return {}
|
||||
|
||||
yaml = YAML(typ="safe")
|
||||
frontmatter: dict = yaml.load(frontmatter_block)
|
||||
yaml.allow_unicode = False
|
||||
try:
|
||||
frontmatter: dict = yaml.load(frontmatter_block)
|
||||
except Exception as e: # noqa: BLE001
|
||||
raise AttributeError(e) from e
|
||||
|
||||
if frontmatter is None or frontmatter == [None]:
|
||||
return {}
|
||||
|
||||
for k in frontmatter:
|
||||
if frontmatter[k] is None:
|
||||
@@ -251,7 +241,7 @@ class Frontmatter:
|
||||
|
||||
return dict_values_to_lists_strings(frontmatter, strip_null_values=True)
|
||||
|
||||
def add(self, key: str, value: str | list[str] = None) -> bool:
|
||||
def add(self, key: str, value: str | list[str] = None) -> bool: # noqa: PLR0911
|
||||
"""Add a key and value to the frontmatter.
|
||||
|
||||
Args:
|
||||
@@ -278,6 +268,7 @@ class Frontmatter:
|
||||
if key in self.dict and value not in self.dict[key]:
|
||||
if isinstance(value, list):
|
||||
self.dict[key].extend(value)
|
||||
self.dict[key] = list(sorted(set(self.dict[key])))
|
||||
return True
|
||||
|
||||
self.dict[key].append(value)
|
||||
@@ -298,27 +289,23 @@ class Frontmatter:
|
||||
"""
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
||||
"""Delete a value or key in the frontmatter. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): Use regex to check. Defaults to False.
|
||||
key (str): If no value, key to delete. If value, key containing the value.
|
||||
value_to_delete (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = dict(self.dict)
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=is_regex,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
@@ -326,6 +313,10 @@ class Frontmatter:
|
||||
|
||||
return False
|
||||
|
||||
def delete_all(self) -> None:
|
||||
"""Delete all Frontmatter from the note."""
|
||||
self.dict = {}
|
||||
|
||||
def has_changes(self) -> bool:
|
||||
"""Check if the frontmatter has changes.
|
||||
|
||||
@@ -345,14 +336,10 @@ class Frontmatter:
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -364,7 +351,7 @@ class Frontmatter:
|
||||
str: Frontmatter as a YAML string.
|
||||
sort_keys (bool, optional): Sort the keys. Defaults to False.
|
||||
"""
|
||||
dict_to_dump = self.dict.copy()
|
||||
dict_to_dump = copy.deepcopy(self.dict)
|
||||
for k in dict_to_dump:
|
||||
if dict_to_dump[k] == []:
|
||||
dict_to_dump[k] = None
|
||||
@@ -389,9 +376,9 @@ class Frontmatter:
|
||||
class InlineMetadata:
|
||||
"""Representation of inline metadata in the form of `key:: value`."""
|
||||
|
||||
def __init__(self, file_content: str):
|
||||
self.dict: dict[str, list[str]] = self.grab_inline_metadata(file_content)
|
||||
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
||||
def __init__(self, file_content: str) -> None:
|
||||
self.dict: dict[str, list[str]] = self._grab_inline_metadata(file_content)
|
||||
self.dict_original: dict[str, list[str]] = copy.deepcopy(self.dict)
|
||||
|
||||
def __repr__(self) -> str: # pragma: no cover
|
||||
"""Representation of inline metadata.
|
||||
@@ -401,76 +388,7 @@ class InlineMetadata:
|
||||
"""
|
||||
return f"InlineMetadata(inline_metadata={self.dict})"
|
||||
|
||||
def add(self, key: str, value: str = None) -> bool:
|
||||
"""Add a key and value to the inline metadata.
|
||||
|
||||
Args:
|
||||
key (str): Key to add.
|
||||
value (str, optional): Value to add.
|
||||
|
||||
Returns:
|
||||
bool: True if the metadata was added
|
||||
"""
|
||||
if value is None or value == "" or value == "None":
|
||||
if key not in self.dict:
|
||||
self.dict[key] = []
|
||||
return True
|
||||
return False
|
||||
|
||||
if key not in self.dict:
|
||||
self.dict[key] = [value]
|
||||
return True
|
||||
|
||||
if key in self.dict and len(self.dict[key]) > 0:
|
||||
if value in self.dict[key]:
|
||||
return False
|
||||
raise ValueError(f"'{key}' not empty")
|
||||
|
||||
self.dict[key].append(value)
|
||||
return True
|
||||
|
||||
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||
"""Check if a key or value exists in the inline metadata.
|
||||
|
||||
Args:
|
||||
key (str): Key to check.
|
||||
value (str, Optional): Value to check.
|
||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: True if the key exists.
|
||||
"""
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
"""Delete a value or key in the inline metadata. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
key (str): If no value, key to delete. If value, key containing the value.
|
||||
value_to_delete (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = dict(self.dict)
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
||||
def _grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
||||
"""Grab inline metadata from a note.
|
||||
|
||||
Returns:
|
||||
@@ -494,6 +412,78 @@ class InlineMetadata:
|
||||
|
||||
return clean_dictionary(inline_metadata)
|
||||
|
||||
def add(self, key: str, value: str | list[str] = None) -> bool: # noqa: PLR0911
|
||||
"""Add a key and value to the inline metadata.
|
||||
|
||||
Args:
|
||||
key (str): Key to add.
|
||||
value (str, optional): Value to add.
|
||||
|
||||
Returns:
|
||||
bool: True if the metadata was added
|
||||
"""
|
||||
if value is None:
|
||||
if key not in self.dict:
|
||||
self.dict[key] = []
|
||||
return True
|
||||
return False
|
||||
|
||||
if key not in self.dict:
|
||||
if isinstance(value, list):
|
||||
self.dict[key] = value
|
||||
return True
|
||||
|
||||
self.dict[key] = [value]
|
||||
return True
|
||||
|
||||
if key in self.dict and value not in self.dict[key]:
|
||||
if isinstance(value, list):
|
||||
self.dict[key].extend(value)
|
||||
self.dict[key] = list(sorted(set(self.dict[key])))
|
||||
return True
|
||||
|
||||
self.dict[key].append(value)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||
"""Check if a key or value exists in the inline metadata.
|
||||
|
||||
Args:
|
||||
key (str): Key to check.
|
||||
value (str, Optional): Value to check.
|
||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: True if the key exists.
|
||||
"""
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
||||
"""Delete a value or key in the inline metadata. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
||||
key (str): If no value, key to delete. If value, key containing the value.
|
||||
value_to_delete (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=is_regex,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def has_changes(self) -> bool:
|
||||
"""Check if the metadata has changes.
|
||||
|
||||
@@ -513,14 +503,10 @@ class InlineMetadata:
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -529,7 +515,7 @@ class InlineMetadata:
|
||||
class InlineTags:
|
||||
"""Representation of inline tags."""
|
||||
|
||||
def __init__(self, file_content: str):
|
||||
def __init__(self, file_content: str) -> None:
|
||||
self.metadata_key = INLINE_TAG_KEY
|
||||
self.list: list[str] = self._grab_inline_tags(file_content)
|
||||
self.list_original: list[str] = self.list.copy()
|
||||
@@ -561,18 +547,34 @@ class InlineTags:
|
||||
)
|
||||
)
|
||||
|
||||
def add(self, new_tag: str) -> bool:
|
||||
def add(self, new_tag: str | list[str]) -> bool:
|
||||
"""Add a new inline tag.
|
||||
|
||||
Args:
|
||||
new_tag (str): Tag to add.
|
||||
new_tag (str, list[str]): Tag to add.
|
||||
|
||||
Returns:
|
||||
bool: True if a tag was added.
|
||||
"""
|
||||
if new_tag in self.list:
|
||||
added_tag = False
|
||||
if isinstance(new_tag, list):
|
||||
for _tag in new_tag:
|
||||
if _tag.startswith("#"):
|
||||
_tag = _tag[1:]
|
||||
if _tag in self.list:
|
||||
continue
|
||||
self.list.append(_tag)
|
||||
added_tag = True
|
||||
|
||||
if added_tag:
|
||||
self.list = sorted(self.list)
|
||||
return True
|
||||
return False
|
||||
|
||||
if new_tag.startswith("#"):
|
||||
new_tag = new_tag[1:]
|
||||
if new_tag in self.list:
|
||||
return False
|
||||
new_list = self.list.copy()
|
||||
new_list.append(new_tag)
|
||||
self.list = sorted(new_list)
|
||||
@@ -624,13 +626,13 @@ class InlineTags:
|
||||
"""Replace an inline tag with another string.
|
||||
|
||||
Args:
|
||||
old_tag (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
new_tag (str, Optional): New value.
|
||||
old_tag (str): `With value_2` this is the value to rename.
|
||||
new_tag (str): New value
|
||||
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if old_tag in self.list:
|
||||
self.list = sorted([new_tag if i == old_tag else i for i in self.list])
|
||||
if old_tag in self.list and new_tag is not None and new_tag:
|
||||
self.list = sorted({new_tag if i == old_tag else i for i in self.list})
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -1,16 +1,18 @@
|
||||
"""Representation of notes and in the vault."""
|
||||
"""Representation of a not in the vault."""
|
||||
|
||||
|
||||
import copy
|
||||
import difflib
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
import rich.repr
|
||||
import typer
|
||||
from rich.console import Console
|
||||
from rich.table import Table
|
||||
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import (
|
||||
Frontmatter,
|
||||
InlineMetadata,
|
||||
@@ -35,11 +37,12 @@ class Note:
|
||||
dry_run (bool): Whether to run in dry-run mode.
|
||||
file_content (str): Total contents of the note file (frontmatter and content).
|
||||
frontmatter (dict): Frontmatter of the note.
|
||||
inline_tags (list): List of inline tags in the note.
|
||||
tags (list): List of inline tags in the note.
|
||||
inline_metadata (dict): Dictionary of inline metadata in the note.
|
||||
original_file_content (str): Original contents of the note file (frontmatter and content)
|
||||
"""
|
||||
|
||||
def __init__(self, note_path: Path, dry_run: bool = False):
|
||||
def __init__(self, note_path: Path, dry_run: bool = False) -> None:
|
||||
log.trace(f"Creating Note object for {note_path}")
|
||||
self.note_path: Path = Path(note_path)
|
||||
self.dry_run: bool = dry_run
|
||||
@@ -51,8 +54,13 @@ class Note:
|
||||
alerts.error(f"Note {self.note_path} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
self.frontmatter: Frontmatter = Frontmatter(self.file_content)
|
||||
self.inline_tags: InlineTags = InlineTags(self.file_content)
|
||||
try:
|
||||
self.frontmatter: Frontmatter = Frontmatter(self.file_content)
|
||||
except AttributeError as e:
|
||||
alerts.error(f"Note {self.note_path} has invalid frontmatter.\n{e}")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
self.tags: InlineTags = InlineTags(self.file_content)
|
||||
self.inline_metadata: InlineMetadata = InlineMetadata(self.file_content)
|
||||
self.original_file_content: str = self.file_content
|
||||
|
||||
@@ -61,70 +69,17 @@ class Note:
|
||||
yield "note_path", self.note_path
|
||||
yield "dry_run", self.dry_run
|
||||
yield "frontmatter", self.frontmatter
|
||||
yield "inline_tags", self.inline_tags
|
||||
yield "tags", self.tags
|
||||
yield "inline_metadata", self.inline_metadata
|
||||
|
||||
def _delete_inline_metadata(self, key: str, value: str = None) -> None:
|
||||
"""Delete an inline metadata key/value pair from the text of the note. This method does not remove the key/value from the metadata attribute of the note.
|
||||
|
||||
Args:
|
||||
key (str): Key to delete.
|
||||
value (str, optional): Value to delete.
|
||||
"""
|
||||
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||
|
||||
for _k, _v in stripped_null_values:
|
||||
if re.search(key, _k):
|
||||
if value is None:
|
||||
_k = re.escape(_k)
|
||||
_v = re.escape(_v)
|
||||
self.sub(rf"\[?{_k}:: ?{_v}]?", "", is_regex=True)
|
||||
return
|
||||
|
||||
if re.search(value, _v):
|
||||
_k = re.escape(_k)
|
||||
_v = re.escape(_v)
|
||||
self.sub(rf"({_k}::) ?{_v}", r"\1", is_regex=True)
|
||||
|
||||
def _rename_inline_metadata(self, key: str, value_1: str, value_2: str = None) -> None:
|
||||
"""Replace the inline metadata in the note with the current inline metadata object.
|
||||
|
||||
Args:
|
||||
key (str): Key to rename.
|
||||
value_1 (str): Value to replace OR new key name (if value_2 is None).
|
||||
value_2 (str, optional): New value.
|
||||
|
||||
"""
|
||||
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||
|
||||
for _k, _v in stripped_null_values:
|
||||
if re.search(key, _k):
|
||||
if value_2 is None:
|
||||
if re.search(rf"{key}[^\w\d_-]+", _k):
|
||||
key_text = re.split(r"[^\w\d_-]+$", _k)[0]
|
||||
key_markdown = re.split(r"^[\w\d_-]+", _k)[1]
|
||||
self.sub(
|
||||
rf"{key_text}{key_markdown}::",
|
||||
rf"{value_1}{key_markdown}::",
|
||||
)
|
||||
else:
|
||||
self.sub(f"{_k}::", f"{value_1}::")
|
||||
else:
|
||||
if re.search(key, _k) and re.search(value_1, _v):
|
||||
_k = re.escape(_k)
|
||||
_v = re.escape(_v)
|
||||
self.sub(f"{_k}:: ?{_v}", f"{_k}:: {value_2}", is_regex=True)
|
||||
|
||||
def add_metadata(
|
||||
def add_metadata( # noqa: C901
|
||||
self,
|
||||
area: MetadataType,
|
||||
key: str = None,
|
||||
value: str | list[str] = None,
|
||||
location: InsertLocation = None,
|
||||
) -> bool:
|
||||
"""Add metadata to the note if it does not already exist.
|
||||
"""Add metadata to the note if it does not already exist. This method adds specified metadata to the appropriate MetadataType object AND writes the new metadata to the note's file.
|
||||
|
||||
Args:
|
||||
area (MetadataType): Area to add metadata to.
|
||||
@@ -135,28 +90,71 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the metadata was added.
|
||||
"""
|
||||
if area is MetadataType.FRONTMATTER and self.frontmatter.add(key, value):
|
||||
self.update_frontmatter()
|
||||
return True
|
||||
|
||||
try:
|
||||
if area is MetadataType.INLINE and self.inline_metadata.add(key, str(value)):
|
||||
line = f"{key}:: " if value is None else f"{key}:: {value}"
|
||||
self.insert(new_string=line, location=location)
|
||||
match area:
|
||||
case MetadataType.FRONTMATTER if self.frontmatter.add(key, value):
|
||||
self.write_frontmatter()
|
||||
return True
|
||||
|
||||
except ValueError as e:
|
||||
log.warning(f"Could not add metadata to {self.note_path}: {e}")
|
||||
return False
|
||||
case MetadataType.INLINE:
|
||||
if value is None and self.inline_metadata.add(key):
|
||||
line = f"{key}::"
|
||||
self.write_string(new_string=line, location=location)
|
||||
return True
|
||||
|
||||
if area is MetadataType.TAGS and self.inline_tags.add(str(value)):
|
||||
line = f"#{value}"
|
||||
self.insert(new_string=line, location=location)
|
||||
return True
|
||||
new_values = []
|
||||
if isinstance(value, list):
|
||||
new_values = [_v for _v in value if self.inline_metadata.add(key, _v)]
|
||||
elif self.inline_metadata.add(key, value):
|
||||
new_values = [value]
|
||||
|
||||
if new_values:
|
||||
for value in new_values:
|
||||
self.write_string(new_string=f"{key}:: {value}", location=location)
|
||||
return True
|
||||
|
||||
case MetadataType.TAGS:
|
||||
new_values = []
|
||||
if isinstance(value, list):
|
||||
new_values = [_v for _v in value if self.tags.add(_v)]
|
||||
elif self.tags.add(value):
|
||||
new_values = [value]
|
||||
|
||||
if new_values:
|
||||
for value in new_values:
|
||||
_v = value
|
||||
if _v.startswith("#"):
|
||||
_v = _v[1:]
|
||||
self.write_string(new_string=f"#{_v}", location=location)
|
||||
return True
|
||||
|
||||
case _:
|
||||
return False
|
||||
|
||||
return False
|
||||
|
||||
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||
def commit(self, path: Path = None) -> None:
|
||||
"""Write the note's new content to disk. This is a destructive action.
|
||||
|
||||
Args:
|
||||
path (Path): Path to write the note to. Defaults to the note's path.
|
||||
|
||||
Raises:
|
||||
typer.Exit: If the note's path is not found.
|
||||
"""
|
||||
p = self.note_path if path is None else path
|
||||
if self.dry_run:
|
||||
log.trace(f"DRY RUN: Writing note {p} to disk")
|
||||
return
|
||||
|
||||
try:
|
||||
with p.open(mode="w") as f:
|
||||
log.trace(f"Writing note {p} to disk")
|
||||
f.write(self.file_content)
|
||||
except FileNotFoundError as e:
|
||||
alerts.error(f"Note {p} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
def contains_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||
"""Check if a note contains the specified inline tag.
|
||||
|
||||
Args:
|
||||
@@ -166,10 +164,10 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the note has inline tags.
|
||||
"""
|
||||
return self.inline_tags.contains(tag, is_regex=is_regex)
|
||||
return self.tags.contains(tag, is_regex=is_regex)
|
||||
|
||||
def contains_metadata(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||
"""Check if a note has a key or a key-value pair in its metadata.
|
||||
"""Check if a note has a key or a key-value pair in its Frontmatter or InlineMetadata.
|
||||
|
||||
Args:
|
||||
key (str): Key to check for.
|
||||
@@ -193,8 +191,19 @@ class Note:
|
||||
|
||||
return False
|
||||
|
||||
def delete_inline_tag(self, tag: str) -> bool:
|
||||
"""Delete an inline tag from the `inline_tags` attribute AND removes the tag from the text of the note if it exists.
|
||||
def delete_all_metadata(self) -> None:
|
||||
"""Delete all metadata from the note. Removes all frontmatter and inline metadata and tags from the body of the note and from the associated metadata objects."""
|
||||
for key in self.inline_metadata.dict:
|
||||
self.delete_metadata(key=key, area=MetadataType.INLINE)
|
||||
|
||||
for tag in self.tags.list:
|
||||
self.delete_tag(tag=tag)
|
||||
|
||||
self.frontmatter.delete_all()
|
||||
self.write_frontmatter()
|
||||
|
||||
def delete_tag(self, tag: str) -> bool:
|
||||
"""Delete an inline tag from the `tags` attribute AND removes the tag from the text of the note if it exists.
|
||||
|
||||
Args:
|
||||
tag (str): Tag to delete.
|
||||
@@ -202,25 +211,33 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the tag was deleted.
|
||||
"""
|
||||
new_list = self.inline_tags.list.copy()
|
||||
new_list = self.tags.list.copy()
|
||||
|
||||
for _t in new_list:
|
||||
if re.search(tag, _t):
|
||||
_t = re.escape(_t)
|
||||
self.sub(rf"#{_t}([ \|,;:\*\(\)\[\]\\\.\n#&])", r"\1", is_regex=True)
|
||||
self.inline_tags.delete(tag)
|
||||
self.tags.delete(tag)
|
||||
|
||||
if new_list != self.inline_tags.list:
|
||||
if new_list != self.tags.list:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def delete_metadata(self, key: str, value: str = None) -> bool:
|
||||
"""Delete a key or key-value pair from the note's metadata. Regex is supported.
|
||||
def delete_metadata(
|
||||
self,
|
||||
key: str,
|
||||
value: str = None,
|
||||
area: MetadataType = MetadataType.ALL,
|
||||
is_regex: bool = False,
|
||||
) -> bool:
|
||||
"""Delete a key or key-value pair from the note's Metadata object and the content of the note. Regex is supported.
|
||||
|
||||
If no value is provided, will delete an entire key.
|
||||
If no value is provided, will delete an entire specified key.
|
||||
|
||||
Args:
|
||||
area (MetadataType, optional): Area to delete metadata from. Defaults to MetadataType.ALL.
|
||||
is_regex (bool, optional): Whether to use regex to match the key/value.
|
||||
key (str): Key to delete.
|
||||
value (str, optional): Value to delete.
|
||||
|
||||
@@ -229,20 +246,18 @@ class Note:
|
||||
"""
|
||||
changed_value: bool = False
|
||||
|
||||
if value is None:
|
||||
if self.frontmatter.delete(key):
|
||||
self.update_frontmatter()
|
||||
changed_value = True
|
||||
if self.inline_metadata.delete(key):
|
||||
self._delete_inline_metadata(key, value)
|
||||
changed_value = True
|
||||
else:
|
||||
if self.frontmatter.delete(key, value):
|
||||
self.update_frontmatter()
|
||||
changed_value = True
|
||||
if self.inline_metadata.delete(key, value):
|
||||
self._delete_inline_metadata(key, value)
|
||||
changed_value = True
|
||||
if (
|
||||
area == MetadataType.FRONTMATTER or area == MetadataType.ALL
|
||||
) and self.frontmatter.delete(key=key, value_to_delete=value, is_regex=is_regex):
|
||||
self.write_frontmatter()
|
||||
changed_value = True
|
||||
|
||||
if (
|
||||
area == MetadataType.INLINE or area == MetadataType.ALL
|
||||
) and self.inline_metadata.contains(key, value):
|
||||
self.write_delete_inline_metadata(key=key, value=value, is_regex=is_regex)
|
||||
self.inline_metadata.delete(key=key, value_to_delete=value, is_regex=is_regex)
|
||||
changed_value = True
|
||||
|
||||
if changed_value:
|
||||
return True
|
||||
@@ -257,7 +272,7 @@ class Note:
|
||||
if self.frontmatter.has_changes():
|
||||
return True
|
||||
|
||||
if self.inline_tags.has_changes():
|
||||
if self.tags.has_changes():
|
||||
return True
|
||||
|
||||
if self.inline_metadata.has_changes():
|
||||
@@ -268,59 +283,8 @@ class Note:
|
||||
|
||||
return False
|
||||
|
||||
def insert(
|
||||
self,
|
||||
new_string: str,
|
||||
location: InsertLocation,
|
||||
allow_multiple: bool = False,
|
||||
) -> None:
|
||||
"""Insert a string at the top of a note.
|
||||
|
||||
Args:
|
||||
new_string (str): String to insert at the top of the note.
|
||||
allow_multiple (bool): Whether to allow inserting the string if it already exists in the note.
|
||||
location (InsertLocation): Location to insert the string.
|
||||
"""
|
||||
if not allow_multiple and len(re.findall(re.escape(new_string), self.file_content)) > 0:
|
||||
return
|
||||
|
||||
match location: # noqa: E999
|
||||
case InsertLocation.BOTTOM:
|
||||
self.file_content += f"\n{new_string}"
|
||||
case InsertLocation.TOP:
|
||||
try:
|
||||
top = PATTERNS.frontmatter_block.search(self.file_content).group("frontmatter")
|
||||
except AttributeError:
|
||||
top = ""
|
||||
|
||||
if top == "":
|
||||
self.file_content = f"{new_string}\n{self.file_content}"
|
||||
else:
|
||||
new_string = f"{top}\n{new_string}"
|
||||
top = re.escape(top)
|
||||
self.sub(top, new_string, is_regex=True)
|
||||
case InsertLocation.AFTER_TITLE:
|
||||
try:
|
||||
top = PATTERNS.top_with_header.search(self.file_content).group("top")
|
||||
except AttributeError:
|
||||
top = ""
|
||||
|
||||
if top == "":
|
||||
self.file_content = f"{new_string}\n{self.file_content}"
|
||||
else:
|
||||
new_string = f"{top}\n{new_string}"
|
||||
top = re.escape(top)
|
||||
self.sub(top, new_string, is_regex=True)
|
||||
case _:
|
||||
raise ValueError(f"Invalid location: {location}")
|
||||
pass
|
||||
|
||||
def print_note(self) -> None:
|
||||
"""Print the note to the console."""
|
||||
print(self.file_content)
|
||||
|
||||
def print_diff(self) -> None:
|
||||
"""Print a diff of the note's original state and it's new state."""
|
||||
"""Print a diff of the note's content. Compares original state to it's new state."""
|
||||
a = self.original_file_content.splitlines()
|
||||
b = self.file_content.splitlines()
|
||||
|
||||
@@ -334,10 +298,14 @@ class Note:
|
||||
elif line.startswith("-"):
|
||||
table.add_row(line, style="red")
|
||||
|
||||
Console().print(table)
|
||||
console.print(table)
|
||||
|
||||
def rename_inline_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||
"""Rename an inline tag from the note ONLY if it's not in the metadata as well.
|
||||
def print_note(self) -> None:
|
||||
"""Print the note to the console."""
|
||||
console.print(self.file_content)
|
||||
|
||||
def rename_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||
"""Rename an inline tag. Updates the Metadata object and the text of the note.
|
||||
|
||||
Args:
|
||||
tag_1 (str): Tag to rename.
|
||||
@@ -346,20 +314,20 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the tag was renamed.
|
||||
"""
|
||||
if tag_1 in self.inline_tags.list:
|
||||
if tag_1 in self.tags.list:
|
||||
self.sub(
|
||||
rf"#{tag_1}([ \|,;:\*\(\)\[\]\\\.\n#&])",
|
||||
rf"#{tag_2}\1",
|
||||
is_regex=True,
|
||||
)
|
||||
self.inline_tags.rename(tag_1, tag_2)
|
||||
self.tags.rename(tag_1, tag_2)
|
||||
return True
|
||||
return False
|
||||
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> bool:
|
||||
"""Rename a key or key-value pair in the note's metadata.
|
||||
"""Rename a key or key-value pair in the note's InlineMetadata and Frontmatter objects and the content of the note.
|
||||
|
||||
If no value is provided, will rename an entire key.
|
||||
If no value is provided, will rename the entire specified key.
|
||||
|
||||
Args:
|
||||
key (str): Key to rename.
|
||||
@@ -372,17 +340,17 @@ class Note:
|
||||
changed_value: bool = False
|
||||
if value_2 is None:
|
||||
if self.frontmatter.rename(key, value_1):
|
||||
self.update_frontmatter()
|
||||
self.write_frontmatter()
|
||||
changed_value = True
|
||||
if self.inline_metadata.rename(key, value_1):
|
||||
self._rename_inline_metadata(key, value_1)
|
||||
self.write_inline_metadata_change(key, value_1)
|
||||
changed_value = True
|
||||
else:
|
||||
if self.frontmatter.rename(key, value_1, value_2):
|
||||
self.update_frontmatter()
|
||||
self.write_frontmatter()
|
||||
changed_value = True
|
||||
if self.inline_metadata.rename(key, value_1, value_2):
|
||||
self._rename_inline_metadata(key, value_1, value_2)
|
||||
self.write_inline_metadata_change(key, value_1, value_2)
|
||||
changed_value = True
|
||||
|
||||
if changed_value:
|
||||
@@ -403,8 +371,134 @@ class Note:
|
||||
|
||||
self.file_content = re.sub(pattern, replacement, self.file_content, re.MULTILINE)
|
||||
|
||||
def update_frontmatter(self, sort_keys: bool = False) -> None:
|
||||
"""Replace the frontmatter in the note with the current frontmatter object."""
|
||||
def transpose_metadata( # noqa: C901, PLR0912, PLR0911
|
||||
self,
|
||||
begin: MetadataType,
|
||||
end: MetadataType,
|
||||
key: str = None,
|
||||
value: str | list[str] = None,
|
||||
location: InsertLocation = InsertLocation.BOTTOM,
|
||||
) -> bool:
|
||||
"""Move metadata from one metadata object to another. i.e. Frontmatter to InlineMetadata or vice versa.
|
||||
|
||||
If no key is specified, will transpose all metadata. If a key is specified, but no value, the entire key will be transposed. if a specific value is specified, just that value will be transposed.
|
||||
|
||||
Args:
|
||||
begin (MetadataType): The type of metadata to transpose from.
|
||||
end (MetadataType): The type of metadata to transpose to.
|
||||
key (str, optional): The key to transpose. Defaults to None.
|
||||
location (InsertLocation, optional): Where to insert the metadata. Defaults to InsertLocation.BOTTOM.
|
||||
value (str | list[str], optional): The value to transpose. Defaults to None.
|
||||
|
||||
Returns:
|
||||
bool: Whether the note was updated.
|
||||
"""
|
||||
if (begin == MetadataType.FRONTMATTER or begin == MetadataType.INLINE) and (
|
||||
end == MetadataType.FRONTMATTER or end == MetadataType.INLINE
|
||||
):
|
||||
if begin == MetadataType.FRONTMATTER:
|
||||
begin_dict = self.frontmatter.dict
|
||||
else:
|
||||
begin_dict = self.inline_metadata.dict
|
||||
|
||||
if begin_dict == {}:
|
||||
return False
|
||||
|
||||
if key is None: # Transpose all metadata when no key is provided
|
||||
for _key, _value in begin_dict.items():
|
||||
self.add_metadata(key=_key, value=_value, area=end, location=location)
|
||||
self.delete_metadata(key=_key, area=begin)
|
||||
return True
|
||||
|
||||
has_changes = False
|
||||
temp_dict = copy.deepcopy(begin_dict)
|
||||
for k, v in begin_dict.items():
|
||||
if key == k:
|
||||
if value is None:
|
||||
self.add_metadata(key=k, value=v, area=end, location=location)
|
||||
self.delete_metadata(key=k, area=begin)
|
||||
return True
|
||||
|
||||
if value == v:
|
||||
self.add_metadata(key=k, value=v, area=end, location=location)
|
||||
self.delete_metadata(key=k, area=begin)
|
||||
return True
|
||||
|
||||
if isinstance(value, str):
|
||||
if value in v:
|
||||
self.add_metadata(key=k, value=value, area=end, location=location)
|
||||
self.delete_metadata(key=k, value=value, area=begin)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if isinstance(value, list):
|
||||
for value_item in value:
|
||||
if value_item in v:
|
||||
self.add_metadata(
|
||||
key=k, value=value_item, area=end, location=location
|
||||
)
|
||||
self.delete_metadata(key=k, value=value_item, area=begin)
|
||||
temp_dict[k].remove(value_item)
|
||||
has_changes = True
|
||||
|
||||
if temp_dict[k] == []:
|
||||
self.delete_metadata(key=k, area=begin)
|
||||
|
||||
return bool(has_changes)
|
||||
|
||||
if begin == MetadataType.TAGS:
|
||||
# TODO: Implement transposing to and from tags
|
||||
pass
|
||||
|
||||
return False
|
||||
|
||||
def write_delete_inline_metadata(
|
||||
self, key: str = None, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""For a given inline metadata key and/or key-value pair, delete it from the text of the note. If no key is provided, will delete all inline metadata from the text of the note.
|
||||
|
||||
IMPORTANT: This method makes no changes to the InlineMetadata object.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): Whether the key is a regex pattern or plain text. Defaults to False.
|
||||
key (str, optional): Key to delete.
|
||||
value (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: Whether the note was updated.
|
||||
"""
|
||||
if self.inline_metadata.dict != {}:
|
||||
if key is None:
|
||||
for _k, _v in self.inline_metadata.dict.items():
|
||||
for _value in _v:
|
||||
_k = re.escape(_k)
|
||||
_value = re.escape(_value)
|
||||
self.sub(rf"\[?{_k}:: ?\[?\[?{_value}\]?\]?", "", is_regex=True)
|
||||
return True
|
||||
|
||||
for _k, _v in self.inline_metadata.dict.items():
|
||||
if (is_regex and re.search(key, _k)) or (not is_regex and key == _k):
|
||||
for _value in _v:
|
||||
if value is None:
|
||||
_k = re.escape(_k)
|
||||
_value = re.escape(_value)
|
||||
self.sub(rf"\[?{_k}:: \[?\[?{_value}\]?\]?", "", is_regex=True)
|
||||
elif (is_regex and re.search(value, _value)) or (
|
||||
not is_regex and value == _value
|
||||
):
|
||||
_k = re.escape(_k)
|
||||
_value = re.escape(_value)
|
||||
self.sub(rf"\[?({_k}::) ?\[?\[?{_value}\]?\]?", r"\1", is_regex=True)
|
||||
return True
|
||||
return False
|
||||
|
||||
def write_frontmatter(self, sort_keys: bool = False) -> bool:
|
||||
"""Replace the frontmatter in the note with the current Frontmatter object. If the Frontmatter object is empty, will delete the frontmatter from the note.
|
||||
|
||||
Returns:
|
||||
bool: Whether the note was updated.
|
||||
"""
|
||||
try:
|
||||
current_frontmatter = PATTERNS.frontmatter_block.search(self.file_content).group(
|
||||
"frontmatter"
|
||||
@@ -413,36 +507,121 @@ class Note:
|
||||
current_frontmatter = None
|
||||
|
||||
if current_frontmatter is None and self.frontmatter.dict == {}:
|
||||
return
|
||||
return False
|
||||
|
||||
new_frontmatter = self.frontmatter.to_yaml(sort_keys=sort_keys)
|
||||
new_frontmatter = f"---\n{new_frontmatter}---\n"
|
||||
new_frontmatter = "" if self.frontmatter.dict == {} else f"---\n{new_frontmatter}---\n"
|
||||
|
||||
if current_frontmatter is None:
|
||||
self.file_content = new_frontmatter + self.file_content
|
||||
return
|
||||
return True
|
||||
|
||||
current_frontmatter = re.escape(current_frontmatter)
|
||||
current_frontmatter = f"{re.escape(current_frontmatter)}\n?"
|
||||
self.sub(current_frontmatter, new_frontmatter, is_regex=True)
|
||||
return True
|
||||
|
||||
def write(self, path: Path = None) -> None:
|
||||
"""Write the note's content to disk.
|
||||
def write_all_inline_metadata(
|
||||
self,
|
||||
location: InsertLocation,
|
||||
) -> bool:
|
||||
"""Write all metadata found in the InlineMetadata object to the note at a specified insert location.
|
||||
|
||||
Args:
|
||||
path (Path): Path to write the note to. Defaults to the note's path.
|
||||
location (InsertLocation): Where to insert the metadata.
|
||||
|
||||
Raises:
|
||||
typer.Exit: If the note's path is not found.
|
||||
Returns:
|
||||
bool: Whether the note was updated.
|
||||
"""
|
||||
p = self.note_path if path is None else path
|
||||
if self.dry_run:
|
||||
log.trace(f"DRY RUN: Writing note {p} to disk")
|
||||
return
|
||||
if self.inline_metadata.dict != {}:
|
||||
string = ""
|
||||
for k, v in sorted(self.inline_metadata.dict.items()):
|
||||
for value in v:
|
||||
string += f"{k}:: {value}\n"
|
||||
|
||||
try:
|
||||
with open(p, "w") as f:
|
||||
log.trace(f"Writing note {p} to disk")
|
||||
f.write(self.file_content)
|
||||
except FileNotFoundError as e:
|
||||
alerts.error(f"Note {p} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
if self.write_string(new_string=string, location=location, allow_multiple=True):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def write_inline_metadata_change(self, key: str, value_1: str, value_2: str = None) -> None:
|
||||
"""Write changes to a specific inline metadata key or value.
|
||||
|
||||
Args:
|
||||
key (str): Key to rename.
|
||||
value_1 (str): Value to replace OR new key name (if value_2 is None).
|
||||
value_2 (str, optional): New value.
|
||||
|
||||
"""
|
||||
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||
|
||||
for _k, _v in stripped_null_values:
|
||||
if re.search(key, _k):
|
||||
if value_2 is None:
|
||||
if re.search(rf"{key}[^\\w\\d_-]+", _k):
|
||||
key_text = re.split(r"[^\\w\\d_-]+$", _k)[0]
|
||||
key_markdown = re.split(r"^[\\w\\d_-]+", _k)[1]
|
||||
self.sub(
|
||||
rf"{key_text}{key_markdown}::",
|
||||
rf"{value_1}{key_markdown}::",
|
||||
)
|
||||
else:
|
||||
self.sub(f"{_k}::", f"{value_1}::")
|
||||
elif re.search(key, _k) and re.search(value_1, _v):
|
||||
_k = re.escape(_k)
|
||||
_v = re.escape(_v)
|
||||
self.sub(f"{_k}:: ?{_v}", f"{_k}:: {value_2}", is_regex=True)
|
||||
|
||||
def write_string(
|
||||
self,
|
||||
new_string: str,
|
||||
location: InsertLocation,
|
||||
allow_multiple: bool = False,
|
||||
) -> bool:
|
||||
"""Insert a string into the note at a requested location.
|
||||
|
||||
Args:
|
||||
new_string (str): String to insert at the top of the note.
|
||||
allow_multiple (bool): Whether to allow inserting the string if it already exists in the note.
|
||||
location (InsertLocation): Location to insert the string.
|
||||
|
||||
Returns:
|
||||
bool: Whether the note was updated.
|
||||
"""
|
||||
if not allow_multiple and len(re.findall(re.escape(new_string), self.file_content)) > 0:
|
||||
return False
|
||||
|
||||
match location:
|
||||
case InsertLocation.BOTTOM:
|
||||
self.file_content += f"\n{new_string}"
|
||||
return True
|
||||
case InsertLocation.TOP:
|
||||
try:
|
||||
top = PATTERNS.frontmatter_block.search(self.file_content).group("frontmatter")
|
||||
except AttributeError:
|
||||
top = ""
|
||||
|
||||
if not top:
|
||||
self.file_content = f"{new_string}\n{self.file_content}"
|
||||
return True
|
||||
|
||||
new_string = f"{top}\n{new_string}"
|
||||
top = re.escape(top)
|
||||
self.sub(top, new_string, is_regex=True)
|
||||
return True
|
||||
case InsertLocation.AFTER_TITLE:
|
||||
try:
|
||||
top = PATTERNS.top_with_header.search(self.file_content).group("top")
|
||||
except AttributeError:
|
||||
top = ""
|
||||
|
||||
if not top:
|
||||
self.file_content = f"{new_string}\n{self.file_content}"
|
||||
return True
|
||||
|
||||
new_string = f"{top}\n{new_string}"
|
||||
top = re.escape(top)
|
||||
self.sub(top, new_string, is_regex=True)
|
||||
return True
|
||||
case _: # pragma: no cover
|
||||
raise ValueError(f"Invalid location: {location}")
|
||||
|
||||
@@ -25,7 +25,7 @@ class Patterns:
|
||||
([-_\w\d\/\*\u263a-\U0001f999]+?)::[ ]? # Find key
|
||||
(.*?)\] # Find value until closing bracket
|
||||
| # Else look for key values at start of line
|
||||
(?:^|[^ \w\d]+| \[) # Any non-word or non-digit character
|
||||
(?:^|[^ \w\d]+|^ *>?[-\d\|]?\.? ) # Any non-word or non-digit character
|
||||
([-_\w\d\/\*\u263a-\U0001f9995]+?)::(?!\n)(?:[ ](?!\n))? # Capture the key if not a new line
|
||||
(.*?)$ # Capture the value
|
||||
""",
|
||||
|
||||
@@ -86,7 +86,7 @@ class Questions:
|
||||
self.vault = vault
|
||||
self.key = key
|
||||
|
||||
def _validate_existing_inline_tag(self, text: str) -> bool | str:
|
||||
def _validate_existing_tag(self, text: str) -> bool | str:
|
||||
"""Validate an existing inline tag.
|
||||
|
||||
Returns:
|
||||
@@ -200,6 +200,23 @@ class Questions:
|
||||
|
||||
return True
|
||||
|
||||
def _validate_path_is_file(self, text: str) -> bool | str:
|
||||
"""Validate a path is a file.
|
||||
|
||||
Args:
|
||||
text (str): The path to validate.
|
||||
|
||||
Returns:
|
||||
bool | str: True if the path is valid, otherwise a string with the error message.
|
||||
"""
|
||||
path_to_validate: Path = Path(text).expanduser().resolve()
|
||||
if not path_to_validate.exists():
|
||||
return f"Path does not exist: {path_to_validate}"
|
||||
if not path_to_validate.is_file():
|
||||
return f"Path is not a file: {path_to_validate}"
|
||||
|
||||
return True
|
||||
|
||||
def _validate_valid_vault_regex(self, text: str) -> bool | str:
|
||||
"""Validate a valid regex.
|
||||
|
||||
@@ -274,12 +291,17 @@ class Questions:
|
||||
return questionary.select(
|
||||
"What do you want to do?",
|
||||
choices=[
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Vault Actions", "value": "vault_actions"},
|
||||
{"name": "Export Metadata", "value": "export_metadata"},
|
||||
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
||||
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Bulk changes from imported CSV", "value": "import_from_csv"},
|
||||
{"name": "Add Metadata", "value": "add_metadata"},
|
||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||
{"name": "Reorganize Metadata", "value": "reorganize_metadata"},
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Review Changes", "value": "review_changes"},
|
||||
{"name": "Commit Changes", "value": "commit_changes"},
|
||||
@@ -322,11 +344,11 @@ class Questions:
|
||||
question, default=default, style=self.style, qmark="INPUT |"
|
||||
).ask()
|
||||
|
||||
def ask_existing_inline_tag(self, question: str = "Enter a tag") -> str: # pragma: no cover
|
||||
def ask_existing_tag(self, question: str = "Enter a tag") -> str: # pragma: no cover
|
||||
"""Ask the user for an existing inline tag."""
|
||||
return questionary.text(
|
||||
question,
|
||||
validate=self._validate_existing_inline_tag,
|
||||
validate=self._validate_existing_tag,
|
||||
style=self.style,
|
||||
qmark="INPUT |",
|
||||
).ask()
|
||||
@@ -420,7 +442,7 @@ class Questions:
|
||||
|
||||
return self.ask_selection(
|
||||
choices=choices,
|
||||
question="Select the location for the metadata",
|
||||
question=question,
|
||||
)
|
||||
|
||||
def ask_new_key(self, question: str = "New key name") -> str: # pragma: no cover
|
||||
@@ -472,15 +494,27 @@ class Questions:
|
||||
question, validate=self._validate_number, style=self.style, qmark="INPUT |"
|
||||
).ask()
|
||||
|
||||
def ask_path(self, question: str = "Enter a path") -> str: # pragma: no cover
|
||||
def ask_path(
|
||||
self, question: str = "Enter a path", valid_file: bool = False
|
||||
) -> str: # pragma: no cover
|
||||
"""Ask the user for a path.
|
||||
|
||||
Args:
|
||||
question (str, optional): The question to ask. Defaults to "Enter a path".
|
||||
valid_file (bool, optional): Whether the path should be a valid file. Defaults to False.
|
||||
|
||||
Returns:
|
||||
str: A path.
|
||||
"""
|
||||
if valid_file:
|
||||
return questionary.path(
|
||||
question,
|
||||
only_directories=False,
|
||||
style=self.style,
|
||||
validate=self._validate_path_is_file,
|
||||
qmark="INPUT |",
|
||||
).ask()
|
||||
|
||||
return questionary.path(question, style=self.style, qmark="INPUT |").ask()
|
||||
|
||||
def ask_selection(
|
||||
|
||||
@@ -1,21 +1,23 @@
|
||||
"""Obsidian vault representation."""
|
||||
|
||||
import csv
|
||||
import json
|
||||
import re
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
import json
|
||||
from typing import Any
|
||||
|
||||
import rich.repr
|
||||
import typer
|
||||
from rich import box
|
||||
from rich.console import Console
|
||||
from rich.progress import Progress, SpinnerColumn, TextColumn
|
||||
from rich.prompt import Confirm
|
||||
from rich.table import Table
|
||||
|
||||
from obsidian_metadata._config.config import Config, VaultConfig
|
||||
from obsidian_metadata._config.config import VaultConfig
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import InsertLocation, MetadataType, Note, VaultMetadata
|
||||
|
||||
|
||||
@@ -45,27 +47,26 @@ class Vault:
|
||||
config: VaultConfig,
|
||||
dry_run: bool = False,
|
||||
filters: list[VaultFilter] = [],
|
||||
):
|
||||
) -> None:
|
||||
self.config = config.config
|
||||
self.vault_path: Path = config.path
|
||||
self.name = self.vault_path.name
|
||||
self.insert_location: InsertLocation = self._find_insert_location()
|
||||
self.dry_run: bool = dry_run
|
||||
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
||||
self.exclude_paths: list[Path] = []
|
||||
self.metadata = VaultMetadata()
|
||||
self.exclude_paths: list[Path] = []
|
||||
|
||||
for p in config.exclude_paths:
|
||||
self.exclude_paths.append(Path(self.vault_path / p))
|
||||
|
||||
self.filters = filters
|
||||
self.all_note_paths = self._find_markdown_notes()
|
||||
|
||||
with Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
transient=True,
|
||||
) as progress:
|
||||
progress.add_task(description="Processing notes...", total=None)
|
||||
with console.status(
|
||||
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
self.all_notes: list[Note] = [
|
||||
Note(note_path=p, dry_run=self.dry_run) for p in self.all_note_paths
|
||||
]
|
||||
@@ -75,12 +76,16 @@ class Vault:
|
||||
|
||||
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||
"""Define rich representation of Vault."""
|
||||
yield "vault_path", self.vault_path
|
||||
yield "dry_run", self.dry_run
|
||||
yield "backup_path", self.backup_path
|
||||
yield "num_notes", len(self.all_notes)
|
||||
yield "num_notes_in_scope", len(self.notes_in_scope)
|
||||
yield "config", self.config
|
||||
yield "dry_run", self.dry_run
|
||||
yield "exclude_paths", self.exclude_paths
|
||||
yield "filters", self.filters
|
||||
yield "insert_location", self.insert_location
|
||||
yield "name", self.name
|
||||
yield "num_notes_in_scope", len(self.notes_in_scope)
|
||||
yield "num_notes", len(self.all_notes)
|
||||
yield "vault_path", self.vault_path
|
||||
|
||||
def _filter_notes(self) -> list[Note]:
|
||||
"""Filter notes by path and metadata using the filters defined in self.filters.
|
||||
@@ -99,7 +104,7 @@ class Vault:
|
||||
]
|
||||
|
||||
if _filter.tag_filter is not None:
|
||||
notes_list = [n for n in notes_list if n.contains_inline_tag(_filter.tag_filter)]
|
||||
notes_list = [n for n in notes_list if n.contains_tag(_filter.tag_filter)]
|
||||
|
||||
if _filter.key_filter is not None and _filter.value_filter is not None:
|
||||
notes_list = [
|
||||
@@ -113,20 +118,40 @@ class Vault:
|
||||
return notes_list
|
||||
|
||||
def _find_insert_location(self) -> InsertLocation:
|
||||
"""Find the insert location for a note.
|
||||
"""Find the insert location for a note from the configuration file.
|
||||
|
||||
Returns:
|
||||
InsertLocation: Insert location for the note.
|
||||
"""
|
||||
if self.config["insert_location"].upper() == "TOP":
|
||||
return InsertLocation.TOP
|
||||
elif self.config["insert_location"].upper() == "HEADER":
|
||||
|
||||
if self.config["insert_location"].upper() == "AFTER_TITLE":
|
||||
return InsertLocation.AFTER_TITLE
|
||||
elif self.config["insert_location"].upper() == "BOTTOM":
|
||||
return InsertLocation.BOTTOM
|
||||
else:
|
||||
|
||||
if self.config["insert_location"].upper() == "BOTTOM":
|
||||
return InsertLocation.BOTTOM
|
||||
|
||||
return InsertLocation.BOTTOM
|
||||
|
||||
@property
|
||||
def insert_location(self) -> InsertLocation:
|
||||
"""Location to insert new or reorganized metadata.
|
||||
|
||||
Returns:
|
||||
InsertLocation: The insert location.
|
||||
"""
|
||||
return self._insert_location
|
||||
|
||||
@insert_location.setter
|
||||
def insert_location(self, value: InsertLocation) -> None:
|
||||
"""Set the insert location for the vault.
|
||||
|
||||
Args:
|
||||
value (InsertLocation): The insert location to set.
|
||||
"""
|
||||
self._insert_location = value
|
||||
|
||||
def _find_markdown_notes(self) -> list[Path]:
|
||||
"""Build list of all markdown files in the vault.
|
||||
|
||||
@@ -144,12 +169,10 @@ class Vault:
|
||||
def _rebuild_vault_metadata(self) -> None:
|
||||
"""Rebuild vault metadata."""
|
||||
self.metadata = VaultMetadata()
|
||||
with Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
transient=True,
|
||||
) as progress:
|
||||
progress.add_task(description="Processing notes...", total=None)
|
||||
with console.status(
|
||||
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
for _note in self.notes_in_scope:
|
||||
self.metadata.index_metadata(
|
||||
area=MetadataType.FRONTMATTER, metadata=_note.frontmatter.dict
|
||||
@@ -159,7 +182,7 @@ class Vault:
|
||||
)
|
||||
self.metadata.index_metadata(
|
||||
area=MetadataType.TAGS,
|
||||
metadata=_note.inline_tags.list,
|
||||
metadata=_note.tags.list,
|
||||
)
|
||||
|
||||
def add_metadata(
|
||||
@@ -187,6 +210,7 @@ class Vault:
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.add_metadata(area=area, key=key, value=value, location=location):
|
||||
log.trace(f"Added metadata to {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
if num_changed > 0:
|
||||
@@ -199,7 +223,7 @@ class Vault:
|
||||
log.debug("Backing up vault")
|
||||
if self.dry_run:
|
||||
alerts.dryrun(f"Backup up vault to: {self.backup_path}")
|
||||
print("\n")
|
||||
console.print("\n")
|
||||
return
|
||||
|
||||
try:
|
||||
@@ -231,7 +255,7 @@ class Vault:
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.has_changes():
|
||||
log.trace(f"writing to {_note.note_path}")
|
||||
_note.write()
|
||||
_note.commit()
|
||||
|
||||
def delete_backup(self) -> None:
|
||||
"""Delete the vault backup."""
|
||||
@@ -244,7 +268,7 @@ class Vault:
|
||||
else:
|
||||
alerts.info("No backup found")
|
||||
|
||||
def delete_inline_tag(self, tag: str) -> int:
|
||||
def delete_tag(self, tag: str) -> int:
|
||||
"""Delete an inline tag in the vault.
|
||||
|
||||
Args:
|
||||
@@ -256,7 +280,8 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.delete_inline_tag(tag):
|
||||
if _note.delete_tag(tag):
|
||||
log.trace(f"Deleted tag from {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
if num_changed > 0:
|
||||
@@ -264,10 +289,18 @@ class Vault:
|
||||
|
||||
return num_changed
|
||||
|
||||
def delete_metadata(self, key: str, value: str = None) -> int:
|
||||
def delete_metadata(
|
||||
self,
|
||||
key: str,
|
||||
value: str = None,
|
||||
area: MetadataType = MetadataType.ALL,
|
||||
is_regex: bool = False,
|
||||
) -> int:
|
||||
"""Delete metadata in the vault.
|
||||
|
||||
Args:
|
||||
area (MetadataType): Area of metadata to delete from.
|
||||
is_regex (bool): Whether to use regex for key and value. Defaults to False.
|
||||
key (str): Key to delete. Regex is supported
|
||||
value (str, optional): Value to delete. Regex is supported
|
||||
|
||||
@@ -277,7 +310,8 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.delete_metadata(key, value):
|
||||
if _note.delete_metadata(key=key, value=value, area=area, is_regex=is_regex):
|
||||
log.trace(f"Deleted metadata from {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
if num_changed > 0:
|
||||
@@ -285,18 +319,21 @@ class Vault:
|
||||
|
||||
return num_changed
|
||||
|
||||
def export_metadata(self, path: str, format: str = "csv") -> None:
|
||||
def export_metadata(self, path: str, export_format: str = "csv") -> None:
|
||||
"""Write metadata to a csv file.
|
||||
|
||||
Args:
|
||||
path (Path): Path to write csv file to.
|
||||
export_as (str, optional): Export as 'csv' or 'json'. Defaults to "csv".
|
||||
export_format (str, optional): Export as 'csv' or 'json'. Defaults to "csv".
|
||||
"""
|
||||
export_file = Path(path).expanduser().resolve()
|
||||
if not export_file.parent.exists():
|
||||
alerts.error(f"Path does not exist: {export_file.parent}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
match format: # noqa: E999
|
||||
match export_format:
|
||||
case "csv":
|
||||
with open(export_file, "w", encoding="UTF8") as f:
|
||||
with export_file.open(mode="w", encoding="UTF8") as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(["Metadata Type", "Key", "Value"])
|
||||
|
||||
@@ -324,11 +361,49 @@ class Vault:
|
||||
"tags": self.metadata.tags,
|
||||
}
|
||||
|
||||
with open(export_file, "w", encoding="UTF8") as f:
|
||||
with export_file.open(mode="w", encoding="UTF8") as f:
|
||||
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
||||
|
||||
def export_notes_to_csv(self, path: str) -> None:
|
||||
"""Export notes and their associated metadata to a csv file. This is useful as a template for importing metadata changes to a vault.
|
||||
|
||||
Args:
|
||||
path (str): Path to write csv file to.
|
||||
"""
|
||||
export_file = Path(path).expanduser().resolve()
|
||||
if not export_file.parent.exists():
|
||||
alerts.error(f"Path does not exist: {export_file.parent}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
with export_file.open(mode="w", encoding="UTF8") as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(["path", "type", "key", "value"])
|
||||
|
||||
for _note in self.all_notes:
|
||||
for key, value in _note.frontmatter.dict.items():
|
||||
for v in value:
|
||||
writer.writerow(
|
||||
[_note.note_path.relative_to(self.vault_path), "frontmatter", key, v]
|
||||
)
|
||||
|
||||
for key, value in _note.inline_metadata.dict.items():
|
||||
for v in value:
|
||||
writer.writerow(
|
||||
[
|
||||
_note.note_path.relative_to(self.vault_path),
|
||||
"inline_metadata",
|
||||
key,
|
||||
v,
|
||||
]
|
||||
)
|
||||
|
||||
for tag in _note.tags.list:
|
||||
writer.writerow(
|
||||
[_note.note_path.relative_to(self.vault_path), "tag", "", f"{tag}"]
|
||||
)
|
||||
|
||||
def get_changed_notes(self) -> list[Note]:
|
||||
"""Returns a list of notes that have changes.
|
||||
"""Return a list of notes that have changes.
|
||||
|
||||
Returns:
|
||||
list[Note]: List of notes that have changes.
|
||||
@@ -353,21 +428,45 @@ class Vault:
|
||||
table.add_row("Notes excluded from scope", str(self.num_excluded_notes()))
|
||||
table.add_row("Active filters", str(len(self.filters)))
|
||||
table.add_row("Notes with changes", str(len(self.get_changed_notes())))
|
||||
table.add_row("Insert Location", str(self.insert_location.value))
|
||||
|
||||
Console().print(table)
|
||||
console.print(table)
|
||||
|
||||
def list_editable_notes(self) -> None:
|
||||
"""Print a list of notes within the scope that are being edited."""
|
||||
table = Table(title="Notes in current scope", show_header=False, box=box.HORIZONTALS)
|
||||
for _n, _note in enumerate(self.notes_in_scope, start=1):
|
||||
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
||||
Console().print(table)
|
||||
console.print(table)
|
||||
|
||||
def move_inline_metadata(self, location: InsertLocation) -> int:
|
||||
"""Move all inline metadata to the selected location.
|
||||
|
||||
Args:
|
||||
location (InsertLocation): Location to move inline metadata to.
|
||||
|
||||
Returns:
|
||||
int: Number of notes that had inline metadata moved.
|
||||
"""
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.write_delete_inline_metadata():
|
||||
log.trace(f"Deleted inline metadata from {_note.note_path}")
|
||||
num_changed += 1
|
||||
_note.write_all_inline_metadata(location)
|
||||
log.trace(f"Wrote all inline metadata to {_note.note_path}")
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
def num_excluded_notes(self) -> int:
|
||||
"""Count number of excluded notes."""
|
||||
return len(self.all_notes) - len(self.notes_in_scope)
|
||||
|
||||
def rename_inline_tag(self, old_tag: str, new_tag: str) -> int:
|
||||
def rename_tag(self, old_tag: str, new_tag: str) -> int:
|
||||
"""Rename an inline tag in the vault.
|
||||
|
||||
Args:
|
||||
@@ -380,7 +479,8 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.rename_inline_tag(old_tag, new_tag):
|
||||
if _note.rename_tag(old_tag, new_tag):
|
||||
log.trace(f"Renamed inline tag in {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
if num_changed > 0:
|
||||
@@ -389,7 +489,7 @@ class Vault:
|
||||
return num_changed
|
||||
|
||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
||||
"""Renames a key or key-value pair in the note's metadata.
|
||||
"""Rename a key or key-value pair in the note's metadata.
|
||||
|
||||
If no value is provided, will rename an entire key.
|
||||
|
||||
@@ -405,9 +505,101 @@ class Vault:
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.rename_metadata(key, value_1, value_2):
|
||||
log.trace(f"Renamed metadata in {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
def transpose_metadata(
|
||||
self,
|
||||
begin: MetadataType,
|
||||
end: MetadataType,
|
||||
key: str = None,
|
||||
value: str | list[str] = None,
|
||||
location: InsertLocation = None,
|
||||
) -> int:
|
||||
"""Transpose metadata from one type to another.
|
||||
|
||||
Args:
|
||||
begin (MetadataType): Metadata type to transpose from.
|
||||
end (MetadataType): Metadata type to transpose to.
|
||||
key (str, optional): Key to transpose. Defaults to None.
|
||||
value (str, optional): Value to transpose. Defaults to None.
|
||||
location (InsertLocation, optional): Location to insert metadata. (Defaults to `vault.config.insert_location`)
|
||||
|
||||
Returns:
|
||||
int: Number of notes that had metadata transposed.
|
||||
"""
|
||||
if location is None:
|
||||
location = self.insert_location
|
||||
|
||||
num_changed = 0
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.transpose_metadata(
|
||||
begin=begin,
|
||||
end=end,
|
||||
key=key,
|
||||
value=value,
|
||||
location=location,
|
||||
):
|
||||
num_changed += 1
|
||||
log.trace(f"Transposed metadata in {_note.note_path}")
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
def update_from_dict(self, dictionary: dict[str, Any]) -> int:
|
||||
"""Update note metadata from a dictionary. This is a destructive operation. All metadata in the specified notes not in the dictionary will be removed.
|
||||
|
||||
Requires a dictionary with the note path as the key and a dictionary of metadata as the value. Each key must have a list of associated dictionaries in the following format:
|
||||
|
||||
{
|
||||
'type': 'frontmatter|inline_metadata|tag',
|
||||
'key': 'string',
|
||||
'value': 'string'
|
||||
}
|
||||
|
||||
Args:
|
||||
dictionary (dict[str, Any]): Dictionary to update metadata from.
|
||||
|
||||
Returns:
|
||||
int: Number of notes that had metadata updated.
|
||||
"""
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.all_notes:
|
||||
path = _note.note_path.relative_to(self.vault_path)
|
||||
if str(path) in dictionary:
|
||||
log.debug(f"Bulk update metadata for '{path}'")
|
||||
num_changed += 1
|
||||
_note.delete_all_metadata()
|
||||
for row in dictionary[str(path)]:
|
||||
if row["type"].lower() == "frontmatter":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.FRONTMATTER, key=row["key"], value=row["value"]
|
||||
)
|
||||
|
||||
if row["type"].lower() == "inline_metadata":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.INLINE,
|
||||
key=row["key"],
|
||||
value=row["value"],
|
||||
location=self.insert_location,
|
||||
)
|
||||
|
||||
if row["type"].lower() == "tag":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.TAGS,
|
||||
value=row["value"],
|
||||
location=self.insert_location,
|
||||
)
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
@@ -58,7 +58,8 @@ def test_usage(capsys):
|
||||
assert captured.out == "USAGE | This prints in usage\n"
|
||||
|
||||
alerts.usage(
|
||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua"
|
||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
||||
width=80,
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "USAGE | Lorem ipsum dolor sit amet" in captured.out
|
||||
@@ -106,7 +107,7 @@ def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
||||
if verbosity >= 3:
|
||||
assert logging.is_trace() is True
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert not captured.out
|
||||
|
||||
assert logging.is_trace("trace text") is True
|
||||
captured = capsys.readouterr()
|
||||
@@ -127,7 +128,7 @@ def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
||||
if verbosity >= 2:
|
||||
assert logging.is_debug() is True
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert not captured.out
|
||||
|
||||
assert logging.is_debug("debug text") is True
|
||||
captured = capsys.readouterr()
|
||||
@@ -148,7 +149,7 @@ def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
||||
if verbosity >= 1:
|
||||
assert logging.is_info() is True
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert not captured.out
|
||||
|
||||
assert logging.is_info("info text") is True
|
||||
captured = capsys.readouterr()
|
||||
@@ -164,11 +165,11 @@ def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
||||
|
||||
log.info("This is Info logging")
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert not captured.out
|
||||
|
||||
assert logging.is_default() is True
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
assert not captured.out
|
||||
|
||||
assert logging.is_default("default text") is True
|
||||
captured = capsys.readouterr()
|
||||
|
||||
@@ -13,11 +13,16 @@ from pathlib import Path
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from tests.helpers import Regex
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_instantiate_application(test_application) -> None:
|
||||
"""Test application."""
|
||||
"""Test application.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the application is instantiated
|
||||
THEN check the attributes are set correctly
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
|
||||
@@ -29,7 +34,12 @@ def test_instantiate_application(test_application) -> None:
|
||||
|
||||
|
||||
def test_abort(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming a key."""
|
||||
"""Test aborting the application.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the users selects "abort" from the main menu
|
||||
THEN check the application exits
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
@@ -38,12 +48,17 @@ def test_abort(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "Done!" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "Done!" in captured
|
||||
|
||||
|
||||
def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
||||
"""Test adding new metadata to the vault."""
|
||||
"""Test adding new metadata to the vault.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the wants to update a key in the frontmatter
|
||||
THEN check the application updates the key
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
@@ -65,12 +80,17 @@ def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
||||
"""Test adding new metadata to the vault."""
|
||||
"""Test adding new metadata to the vault.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the user wants to add a key in the inline metadata
|
||||
THEN check the application updates the key
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
@@ -92,12 +112,17 @@ def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||
"""Test adding new metadata to the vault."""
|
||||
"""Test adding new metadata to the vault.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the user wants to add a tag
|
||||
THEN check the application adds the tag
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
@@ -115,12 +140,17 @@ def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag."""
|
||||
def test_delete_tag_1(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the user wants to delete an inline tag
|
||||
THEN check the application deletes the tag
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
@@ -129,35 +159,45 @@ def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["delete_inline_tag", "back"],
|
||||
side_effect=["delete_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
return_value="not_a_tag_in_vault",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="breakfast",
|
||||
)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_tag_2(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag.
|
||||
|
||||
GIVEN an application
|
||||
WHEN the user wants to delete an inline tag that does not exist
|
||||
THEN check the application does not update any notes
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["delete_metadata", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["delete_inline_tag", "back"],
|
||||
side_effect=["delete_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
return_value="breakfast",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="not_a_tag_in_vault",
|
||||
)
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Deleted.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
|
||||
def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
@@ -179,8 +219,8 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes found with a.*key.*matching", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"WARNING | No notes found with a key matching: \d{7}" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -197,10 +237,8 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\|.*Deleted.*keys.*matching:.*d\\w\+.*from.*10", re.DOTALL
|
||||
)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS \| Deleted keys matching: d\\w\+ from \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
@@ -225,8 +263,8 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes found matching:", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"WARNING | No notes found matching: area: \d{7}" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -246,10 +284,8 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\| Deleted value.*\^front\\w\+\$.*from.*key.*area.*in.*\d+.*notes", re.DOTALL
|
||||
)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"SUCCESS | Deleted value ^front\w+$ from key area in 4 notes" in captured
|
||||
|
||||
|
||||
def test_filter_notes(test_application, mocker, capsys) -> None:
|
||||
@@ -271,10 +307,10 @@ def test_filter_notes(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Loaded.*\d+.*notes from.*\d+.*total", re.DOTALL)
|
||||
assert "02 inline/inline 2.md" in captured.out
|
||||
assert "03 mixed/mixed 1.md" not in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Loaded \d+ notes from \d+ total", re.DOTALL)
|
||||
assert "02 inline/inline 2.md" in captured
|
||||
assert "03 mixed/mixed 1.md" not in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -326,11 +362,11 @@ def test_filter_clear(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "02 inline/inline 2.md" in captured.out
|
||||
assert "03 mixed/mixed 1.md" in captured.out
|
||||
assert "01 frontmatter/frontmatter 4.md" in captured.out
|
||||
assert "04 no metadata/no_metadata_1.md " in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "02 inline/inline 2.md" in captured
|
||||
assert "03 mixed/mixed 1.md" in captured
|
||||
assert "01 frontmatter/frontmatter 4.md" in captured
|
||||
assert "04 no metadata/no_metadata_1.md " in captured
|
||||
|
||||
|
||||
def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
||||
@@ -348,11 +384,11 @@ def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"type +│ article", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"type +│ article", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
def test_rename_tag(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag."""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
@@ -362,10 +398,10 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["rename_inline_tag", "back"],
|
||||
side_effect=["rename_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="not_a_tag",
|
||||
)
|
||||
mocker.patch(
|
||||
@@ -375,8 +411,8 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -384,10 +420,10 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["rename_inline_tag", "back"],
|
||||
side_effect=["rename_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="breakfast",
|
||||
)
|
||||
mocker.patch(
|
||||
@@ -397,8 +433,8 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"Renamed.*breakfast.*to.*new_tag.*in.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"Renamed breakfast to new_tag in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
@@ -424,8 +460,8 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "WARNING | No notes were changed" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -446,8 +482,8 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"Renamed.*tags.*to.*new_tags.*in.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"Renamed tags to new_tags in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
@@ -476,8 +512,8 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -501,11 +537,10 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\| Renamed.*'area:frontmatter'.*to.*'area:new_key'", re.DOTALL
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(
|
||||
r"SUCCESS +\| Renamed 'area:frontmatter' to 'area:new_key' in \d+ notes", re.DOTALL
|
||||
)
|
||||
assert captured.out == Regex(r".*in.*\d+.*notes.*", re.DOTALL)
|
||||
|
||||
|
||||
def test_review_no_changes(test_application, mocker, capsys) -> None:
|
||||
@@ -518,8 +553,8 @@ def test_review_no_changes(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"INFO +\| No changes to review", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "INFO | No changes to review" in captured
|
||||
|
||||
|
||||
def test_review_changes(test_application, mocker, capsys) -> None:
|
||||
@@ -530,10 +565,6 @@ def test_review_changes(test_application, mocker, capsys) -> None:
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["rename_metadata", "review_changes", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_confirm",
|
||||
return_value=True,
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_key",
|
||||
return_value="tags",
|
||||
@@ -548,10 +579,63 @@ def test_review_changes(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r".*Found.*\d+.*changed notes in the vault.*", re.DOTALL)
|
||||
assert "- tags:" in captured.out
|
||||
assert "+ new_tags:" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r".*Found \d+ changed notes in the vault", re.DOTALL)
|
||||
assert "- tags:" in captured
|
||||
assert "+ new_tags:" in captured
|
||||
|
||||
|
||||
def test_transpose_metadata_1(test_application, mocker, capsys) -> None:
|
||||
"""Transpose metadata.
|
||||
|
||||
GIVEN a test application
|
||||
WHEN the user wants to transpose all inline metadata to frontmatter
|
||||
THEN the metadata is transposed
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
|
||||
assert app.vault.metadata.inline_metadata["inline_key"] == ["inline_key_value"]
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["reorganize_metadata", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["inline_to_frontmatter", "transpose_all"],
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
|
||||
assert app.vault.metadata.inline_metadata == {}
|
||||
assert app.vault.metadata.frontmatter["inline_key"] == ["inline_key_value"]
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
||||
|
||||
|
||||
def test_transpose_metadata_2(test_application, mocker) -> None:
|
||||
"""Transpose metadata.
|
||||
|
||||
GIVEN a test application
|
||||
WHEN the user wants to transpose all frontmatter to inline metadata
|
||||
THEN the metadata is transposed
|
||||
"""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
|
||||
assert app.vault.metadata.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["reorganize_metadata", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["frontmatter_to_inline", "transpose_all"],
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
assert app.vault.metadata.inline_metadata["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||
assert app.vault.metadata.frontmatter == {}
|
||||
|
||||
|
||||
def test_vault_backup(test_application, mocker, capsys) -> None:
|
||||
@@ -569,8 +653,10 @@ def test_vault_backup(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\|.*application\.bak", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(
|
||||
r"SUCCESS +\| Vault backed up to:[-\w\d\/\s]+application\.bak", re.DOTALL
|
||||
)
|
||||
|
||||
|
||||
def test_vault_delete(test_application, mocker, capsys, tmp_path) -> None:
|
||||
@@ -590,5 +676,5 @@ def test_vault_delete(test_application, mocker, capsys, tmp_path) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
# type: ignore
|
||||
"""Test obsidian-metadata CLI."""
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
|
||||
from typer.testing import CliRunner
|
||||
|
||||
from obsidian_metadata.cli import app
|
||||
@@ -14,16 +17,23 @@ def test_version() -> None:
|
||||
"""Test printing version and then exiting."""
|
||||
result = runner.invoke(app, ["--version"])
|
||||
assert result.exit_code == 0
|
||||
assert result.output == Regex(r"obsidian_metadata: v\d+\.\d+\.\d+$")
|
||||
assert "obsidian_metadata: v" in result.output
|
||||
|
||||
|
||||
def test_application(test_vault, tmp_path) -> None:
|
||||
def test_application(tmp_path) -> None:
|
||||
"""Test the application."""
|
||||
vault_path = test_vault
|
||||
source_dir = Path(__file__).parent / "fixtures" / "test_vault"
|
||||
dest_dir = Path(tmp_path / "vault")
|
||||
|
||||
if not source_dir.exists():
|
||||
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||
|
||||
shutil.copytree(source_dir, dest_dir)
|
||||
|
||||
config_path = tmp_path / "config.toml"
|
||||
result = runner.invoke(
|
||||
app,
|
||||
["--vault-path", vault_path, "--config-file", config_path],
|
||||
["--vault-path", dest_dir, "--config-file", config_path],
|
||||
# input=KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.ENTER, # noqa: ERA001
|
||||
)
|
||||
|
||||
@@ -41,3 +51,25 @@ def test_application(test_vault, tmp_path) -> None:
|
||||
|
||||
assert banner in result.output
|
||||
assert result.exit_code == 1
|
||||
|
||||
|
||||
def test_export_template(tmp_path) -> None:
|
||||
"""Test the export template command."""
|
||||
source_dir = Path(__file__).parent / "fixtures" / "test_vault"
|
||||
dest_dir = Path(tmp_path / "vault")
|
||||
|
||||
if not source_dir.exists():
|
||||
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||
|
||||
shutil.copytree(source_dir, dest_dir)
|
||||
|
||||
config_path = tmp_path / "config.toml"
|
||||
export_path = tmp_path / "export_template.csv"
|
||||
result = runner.invoke(
|
||||
app,
|
||||
["--vault-path", dest_dir, "--config-file", config_path, "--export-template", export_path],
|
||||
)
|
||||
|
||||
assert "SUCCESS | Exported metadata to" in result.output
|
||||
assert result.exit_code == 0
|
||||
assert export_path.exists()
|
||||
|
||||
@@ -108,9 +108,9 @@ def test_no_config_no_vault(tmp_path, mocker) -> None:
|
||||
# Folders within the vault to ignore when indexing metadata
|
||||
exclude_paths = [".git", ".obsidian"]
|
||||
|
||||
# Location to add metadata. One of:
|
||||
# Location to add new metadata. One of:
|
||||
# TOP: Directly after frontmatter.
|
||||
# AFTER_TITLE: After a header following frontmatter.
|
||||
# AFTER_TITLE: After the first header following frontmatter.
|
||||
# BOTTOM: The bottom of the note
|
||||
insert_location = "BOTTOM\"
|
||||
"""
|
||||
|
||||
@@ -9,6 +9,13 @@ import pytest
|
||||
from obsidian_metadata._config import Config
|
||||
from obsidian_metadata.models.application import Application
|
||||
|
||||
CONFIG_1 = """
|
||||
["Test Vault"]
|
||||
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||
insert_location = "TOP"
|
||||
path = "TMPDIR_VAULT_PATH"
|
||||
"""
|
||||
|
||||
|
||||
def remove_all(root: Path):
|
||||
"""Remove all files and directories in a directory."""
|
||||
@@ -38,8 +45,14 @@ def sample_note(tmp_path) -> Path:
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def short_note(tmp_path) -> Path:
|
||||
"""Fixture which creates a temporary short note file."""
|
||||
def short_notes(tmp_path) -> Path:
|
||||
"""Fixture which creates two temporary note files.
|
||||
|
||||
Yields:
|
||||
Tuple[Path, Path]: Tuple of two temporary note files.
|
||||
1. Very short note with frontmatter
|
||||
2. Very short note without any frontmatter
|
||||
"""
|
||||
source_file1: Path = Path("tests/fixtures/short_textfile.md")
|
||||
source_file2: Path = Path("tests/fixtures/no_metadata.md")
|
||||
if not source_file1.exists():
|
||||
@@ -89,10 +102,16 @@ def test_vault(tmp_path) -> Path:
|
||||
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||
|
||||
shutil.copytree(source_dir, dest_dir)
|
||||
yield dest_dir
|
||||
config_path = Path(tmp_path / "config.toml")
|
||||
config_path.write_text(CONFIG_1.replace("TMPDIR_VAULT_PATH", str(dest_dir)))
|
||||
config = Config(config_path=config_path)
|
||||
vault_config = config.vaults[0]
|
||||
|
||||
yield vault_config
|
||||
|
||||
# after test - remove fixtures
|
||||
shutil.rmtree(dest_dir)
|
||||
config_path.unlink()
|
||||
|
||||
if backup_dir.exists():
|
||||
shutil.rmtree(backup_dir)
|
||||
|
||||
6
tests/fixtures/broken_frontmatter.md
vendored
Normal file
6
tests/fixtures/broken_frontmatter.md
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
---
|
||||
tags:
|
||||
invalid = = "content"
|
||||
---
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est la
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-12-22
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note", "one-off"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-11-14
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-10-01
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,21 +3,11 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-12-22
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
|
||||
@@ -6,10 +6,6 @@ tags:
|
||||
- breakfast
|
||||
- not_food
|
||||
author: John Doe
|
||||
nested_list:
|
||||
nested_list_one:
|
||||
- nested_list_one_a
|
||||
- nested_list_one_b
|
||||
type:
|
||||
- article
|
||||
- note
|
||||
|
||||
17
tests/fixtures/test_vault/test1.md
vendored
17
tests/fixtures/test_vault/test1.md
vendored
@@ -1,14 +1,15 @@
|
||||
---
|
||||
date_created: 2022-12-22
|
||||
tags:
|
||||
- shared_tag
|
||||
- frontmatter_tag1
|
||||
- frontmatter_tag2
|
||||
-
|
||||
- 📅/frontmatter_tag3
|
||||
- shared_tag
|
||||
- frontmatter_tag1
|
||||
- frontmatter_tag2
|
||||
- 📅/frontmatter_tag3
|
||||
frontmatter_Key1: author name
|
||||
frontmatter_Key2: ["article", "note"]
|
||||
shared_key1: shared_key1_value
|
||||
shared_key1:
|
||||
- shared_key1_value
|
||||
- shared_key1_value3
|
||||
shared_key2: shared_key2_value1
|
||||
---
|
||||
|
||||
@@ -18,10 +19,12 @@ top_key1:: top_key1_value
|
||||
**top_key2:: top_key2_value**
|
||||
top_key3:: [[top_key3_value_as_link]]
|
||||
shared_key1:: shared_key1_value
|
||||
shared_key1:: shared_key1_value2
|
||||
shared_key2:: shared_key2_value2
|
||||
emoji_📅_key:: emoji_📅_key_value
|
||||
key📅:: 📅_key_value
|
||||
|
||||
# Heading 1
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. #intext_tag1 Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu [intext_key:: intext_value] fugiat nulla (#intext_tag2) pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est lab
|
||||
|
||||
```python
|
||||
|
||||
@@ -22,6 +22,19 @@ class KeyInputs:
|
||||
THREE = "3"
|
||||
|
||||
|
||||
def remove_ansi(text) -> str:
|
||||
"""Remove ANSI escape sequences from a string.
|
||||
|
||||
Args:
|
||||
text (str): String to remove ANSI escape sequences from.
|
||||
|
||||
Returns:
|
||||
str: String without ANSI escape sequences.
|
||||
"""
|
||||
ansi_chars = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]")
|
||||
return ansi_chars.sub("", text)
|
||||
|
||||
|
||||
class Regex:
|
||||
"""Assert that a given string meets some expectations.
|
||||
|
||||
|
||||
530
tests/metadata_frontmatter_test.py
Normal file
530
tests/metadata_frontmatter_test.py
Normal file
@@ -0,0 +1,530 @@
|
||||
# type: ignore
|
||||
"""Test the Frontmatter object from metadata.py."""
|
||||
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.metadata import Frontmatter
|
||||
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
---
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
-
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: "frontmatter_Key1_value"
|
||||
frontmatter_Key2: ["note", "article"]
|
||||
shared_key1: "shared_key1_value"
|
||||
---
|
||||
more content
|
||||
|
||||
---
|
||||
horizontal: rule
|
||||
---
|
||||
"""
|
||||
|
||||
INLINE_CONTENT = """\
|
||||
repeated_key:: repeated_key_value1
|
||||
#inline_tag_top1,#inline_tag_top2
|
||||
**bold_key1**:: bold_key1_value
|
||||
**bold_key2:: bold_key2_value**
|
||||
link_key:: [[link_key_value]]
|
||||
tag_key:: #tag_key_value
|
||||
emoji_📅_key:: emoji_📅_key_value
|
||||
**#bold_tag**
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. [in_text_key1:: in_text_key1_value] Ut enim ad minim veniam, quis nostrud exercitation [in_text_key2:: in_text_key2_value] ullamco laboris nisi ut aliquip ex ea commodo consequat. #in_text_tag Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||
|
||||
```python
|
||||
#ffffff
|
||||
# This is sample text [no_key:: value]with tags and metadata
|
||||
#in_codeblock_tag1
|
||||
#ffffff;
|
||||
in_codeblock_key:: in_codeblock_value
|
||||
The quick brown fox jumped over the #in_codeblock_tag2
|
||||
```
|
||||
repeated_key:: repeated_key_value2
|
||||
"""
|
||||
|
||||
|
||||
def test_create_1() -> None:
|
||||
"""Test frontmatter creation.
|
||||
|
||||
GIVEN valid frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN parse the YAML frontmatter and add it to the object
|
||||
"""
|
||||
frontmatter = Frontmatter(INLINE_CONTENT)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.dict == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
assert frontmatter.dict_original == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
|
||||
def test_create_2() -> None:
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN invalid frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN raise ValueError
|
||||
"""
|
||||
fn = """---
|
||||
tags: tag
|
||||
invalid = = "content"
|
||||
---
|
||||
"""
|
||||
with pytest.raises(AttributeError):
|
||||
Frontmatter(fn)
|
||||
|
||||
|
||||
def test_create_3():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN set the dict to an empty dict
|
||||
"""
|
||||
content = "---\n\n---"
|
||||
frontmatter = Frontmatter(content)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_create_4():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content with a yaml marker
|
||||
WHEN a Frontmatter object is created
|
||||
THEN set the dict to an empty dict
|
||||
"""
|
||||
content = "---\n-\n---"
|
||||
frontmatter = Frontmatter(content)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_add_1():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with an existing key
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
|
||||
assert frontmatter.add("frontmatter_Key1") is False
|
||||
|
||||
|
||||
def test_add_2():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with an existing key and existing value
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.add("frontmatter_Key1", "frontmatter_Key1_value") is False
|
||||
|
||||
|
||||
def test_add_3():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with a new key
|
||||
THEN return True and add the key to the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.add("added_key") is True
|
||||
assert "added_key" in frontmatter.dict
|
||||
|
||||
|
||||
def test_add_4():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with a new key and a new value
|
||||
THEN return True and add the key and the value to the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.add("added_key", "added_value") is True
|
||||
assert frontmatter.dict["added_key"] == ["added_value"]
|
||||
|
||||
|
||||
def test_add_5():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with an existing key and a new value
|
||||
THEN return True and add the value to the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.add("frontmatter_Key1", "new_value") is True
|
||||
assert frontmatter.dict["frontmatter_Key1"] == ["frontmatter_Key1_value", "new_value"]
|
||||
|
||||
|
||||
def test_add_6():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with an existing key and a list of new values
|
||||
THEN return True and add the values to the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.add("frontmatter_Key1", ["new_value", "new_value2"]) is True
|
||||
assert frontmatter.dict["frontmatter_Key1"] == [
|
||||
"frontmatter_Key1_value",
|
||||
"new_value",
|
||||
"new_value2",
|
||||
]
|
||||
|
||||
|
||||
def test_add_7():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the add() method is called with an existing key and a list of values including an existing value
|
||||
THEN return True and add the new values to the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert (
|
||||
frontmatter.add("frontmatter_Key1", ["frontmatter_Key1_value", "new_value", "new_value2"])
|
||||
is True
|
||||
)
|
||||
assert frontmatter.dict["frontmatter_Key1"] == [
|
||||
"frontmatter_Key1_value",
|
||||
"new_value",
|
||||
"new_value2",
|
||||
]
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return True if the key is found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("no_key") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return True if the key and value is found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return False if the key and value is not found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
||||
|
||||
|
||||
def test_contains_5():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return True if a key matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_6():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return False if no key matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_7():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return True if a value matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_8():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return False if a value does not match the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with a key that does not exist
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("no key") is False
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with an existing key and a value that does not exist
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", "no value") is False
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with a regex that does not match any keys
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete(r"\d{3}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with an existing key and a regex that does not match any values
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", r"\d{5}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_5():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with an existing key and an existing value
|
||||
THEN return True and delete the value from the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", "tag_2") is True
|
||||
assert "tag_2" not in frontmatter.dict["tags"]
|
||||
assert "tags" in frontmatter.dict
|
||||
|
||||
|
||||
def test_delete_6():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with an existing key
|
||||
THEN return True and delete the key from the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags") is True
|
||||
assert "tags" not in frontmatter.dict
|
||||
|
||||
|
||||
def test_delete_7():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with a regex that matches a key
|
||||
THEN return True and delete the matching keys from the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete(r"front\w+", is_regex=True) is True
|
||||
assert "frontmatter_Key1" not in frontmatter.dict
|
||||
assert "frontmatter_Key2" not in frontmatter.dict
|
||||
|
||||
|
||||
def test_delete_8():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the delete() method is called with an existing key and a regex that matches values
|
||||
THEN return True and delete the matching values
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", r"\w+_[23]", is_regex=True) is True
|
||||
assert "tag_2" not in frontmatter.dict["tags"]
|
||||
assert "📅/tag_3" not in frontmatter.dict["tags"]
|
||||
assert "tag_1" in frontmatter.dict["tags"]
|
||||
|
||||
|
||||
def test_delete_all():
|
||||
"""Test Frontmatter delete_all method.
|
||||
|
||||
GIVEN Frontmatter with multiple keys
|
||||
WHEN delete_all is called
|
||||
THEN all keys and values are deleted
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
frontmatter.delete_all()
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_has_changes_1():
|
||||
"""Test frontmatter has_changes() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN no changes have been made to the object
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.has_changes() is False
|
||||
|
||||
|
||||
def test_has_changes_2():
|
||||
"""Test frontmatter has_changes() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN changes have been made to the object
|
||||
THEN return True
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
frontmatter.dict["new key"] = ["new value"]
|
||||
assert frontmatter.has_changes() is True
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the rename() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the rename() method is called with an existing key and non-existing value
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.rename("tags", "no tag", "new key") is False
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the rename() method is called with an existing key
|
||||
THEN return True and rename the key
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.rename("frontmatter_Key1", "new key") is True
|
||||
assert "frontmatter_Key1" not in frontmatter.dict
|
||||
assert frontmatter.dict["new key"] == ["frontmatter_Key1_value"]
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the rename() method is called with an existing key and value
|
||||
THEN return True and rename the value
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.rename("tags", "tag_2", "new tag") is True
|
||||
assert "tag_2" not in frontmatter.dict["tags"]
|
||||
assert "new tag" in frontmatter.dict["tags"]
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
||||
THEN return True and remove the old value leaving one instance of the new value
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.rename("tags", "tag_1", "tag_2") is True
|
||||
assert "tag_1" not in frontmatter.dict["tags"]
|
||||
assert frontmatter.dict["tags"] == ["tag_2", "📅/tag_3"]
|
||||
|
||||
|
||||
def test_to_yaml_1():
|
||||
"""Test Frontmatter to_yaml method.
|
||||
|
||||
GIVEN a dictionary
|
||||
WHEN the to_yaml method is called
|
||||
THEN return a string with the yaml representation of the dictionary
|
||||
"""
|
||||
new_frontmatter: str = """\
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: frontmatter_Key1_value
|
||||
frontmatter_Key2:
|
||||
- article
|
||||
- note
|
||||
shared_key1: shared_key1_value
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.to_yaml() == new_frontmatter
|
||||
|
||||
|
||||
def test_to_yaml_2():
|
||||
"""Test Frontmatter to_yaml method.
|
||||
|
||||
GIVEN a dictionary
|
||||
WHEN the to_yaml method is called with sort_keys=True
|
||||
THEN return a string with the sorted yaml representation of the dictionary
|
||||
"""
|
||||
new_frontmatter_sorted: str = """\
|
||||
frontmatter_Key1: frontmatter_Key1_value
|
||||
frontmatter_Key2:
|
||||
- article
|
||||
- note
|
||||
shared_key1: shared_key1_value
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
- 📅/tag_3
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.to_yaml(sort_keys=True) == new_frontmatter_sorted
|
||||
438
tests/metadata_inline_test.py
Normal file
438
tests/metadata_inline_test.py
Normal file
@@ -0,0 +1,438 @@
|
||||
# type: ignore
|
||||
"""Test inline metadata from metadata.py."""
|
||||
|
||||
from obsidian_metadata.models.metadata import InlineMetadata
|
||||
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
---
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
-
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: "frontmatter_Key1_value"
|
||||
frontmatter_Key2: ["note", "article"]
|
||||
shared_key1: "shared_key1_value"
|
||||
---
|
||||
more content
|
||||
|
||||
---
|
||||
horizontal: rule
|
||||
---
|
||||
"""
|
||||
|
||||
INLINE_CONTENT = """\
|
||||
key1:: value1
|
||||
key1:: value2
|
||||
key1:: value3
|
||||
key2:: value1
|
||||
Paragraph of text with an [inline_key:: value1] and [inline_key:: value2] and [inline_key:: value3] which should do it.
|
||||
> blockquote_key:: value1
|
||||
> blockquote_key:: value2
|
||||
|
||||
- list_key:: value1
|
||||
- list_key:: value2
|
||||
|
||||
1. list_key:: value1
|
||||
2. list_key:: value2
|
||||
"""
|
||||
|
||||
|
||||
def test__grab_inline_metadata_1():
|
||||
"""Test grab inline metadata.
|
||||
|
||||
GIVEN content that has no inline metadata
|
||||
WHEN grab_inline_metadata is called
|
||||
THEN an empty dict is returned
|
||||
|
||||
"""
|
||||
content = """
|
||||
---
|
||||
frontmatter_key1: frontmatter_key1_value
|
||||
---
|
||||
not_a_key: not_a_value
|
||||
```
|
||||
key:: in_codeblock
|
||||
```
|
||||
"""
|
||||
inline = InlineMetadata(content)
|
||||
assert inline.dict == {}
|
||||
|
||||
|
||||
def test__grab_inline_metadata_2():
|
||||
"""Test grab inline metadata.
|
||||
|
||||
GIVEN content that has inline metadata
|
||||
WHEN grab_inline_metadata is called
|
||||
THEN the inline metadata is parsed and returned as a dict
|
||||
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.dict == {
|
||||
"blockquote_key": ["value1", "value2"],
|
||||
"inline_key": ["value1", "value2", "value3"],
|
||||
"key1": ["value1", "value2", "value3"],
|
||||
"key2": ["value1"],
|
||||
"list_key": ["value1", "value2", "value1", "value2"],
|
||||
}
|
||||
|
||||
|
||||
def test_add_1():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with an existing key
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("key1") is False
|
||||
|
||||
|
||||
def test_add_2():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with an existing key and existing value
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("key1", "value1") is False
|
||||
|
||||
|
||||
def test_add_3():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with a new key
|
||||
THEN return True and add the key to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("added_key") is True
|
||||
assert "added_key" in inline.dict
|
||||
|
||||
|
||||
def test_add_4():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with a new key and a new value
|
||||
THEN return True and add the key and the value to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("added_key", "added_value") is True
|
||||
assert inline.dict["added_key"] == ["added_value"]
|
||||
|
||||
|
||||
def test_add_5():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with an existing key and a new value
|
||||
THEN return True and add the value to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("key1", "new_value") is True
|
||||
assert inline.dict["key1"] == ["value1", "value2", "value3", "new_value"]
|
||||
|
||||
|
||||
def test_add_6():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with an existing key and a list of new values
|
||||
THEN return True and add the values to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("key2", ["new_value", "new_value2"]) is True
|
||||
assert inline.dict["key2"] == ["new_value", "new_value2", "value1"]
|
||||
|
||||
|
||||
def test_add_7():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with an existing key and a list of values including an existing value
|
||||
THEN return True and add the new values to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("key1", ["value1", "new_value", "new_value2"]) is True
|
||||
assert inline.dict["key1"] == ["new_value", "new_value2", "value1", "value2", "value3"]
|
||||
|
||||
|
||||
def test_add_8():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with a new key and a list of values
|
||||
THEN return True and add the new values to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("new_key", ["value1", "new_value", "new_value2"]) is True
|
||||
assert inline.dict["new_key"] == ["value1", "new_value", "new_value2"]
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return True if the key is found
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains("key1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains("no_key") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return True if the key and value is found
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains("key1", "value1") is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return False if the key and value is not found
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains("key1", "no value") is False
|
||||
|
||||
|
||||
def test_contains_5():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return True if a key matches the regex
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains(r"\d$", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_6():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return False if no key matches the regex
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains(r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_7():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return True if a value matches the regex
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains(r"key\d", r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_8():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return False if a value does not match the regex
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.contains("key1", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with a key that does not exist
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("no key") is False
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with an existing key and a value that does not exist
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", "no value") is False
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with a regex that does not match any keys
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete(r"\d{3}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with an existing key and a regex that does not match any values
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", r"\d{5}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_5():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with an existing key and an existing value
|
||||
THEN return True and delete the value from the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", "value1") is True
|
||||
assert "value1" not in inline.dict["key1"]
|
||||
assert "key1" in inline.dict
|
||||
|
||||
|
||||
def test_delete_6():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with an existing key
|
||||
THEN return True and delete the key from the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1") is True
|
||||
assert "key1" not in inline.dict
|
||||
|
||||
|
||||
def test_delete_7():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with a regex that matches a key
|
||||
THEN return True and delete the matching keys from the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete(r"key\w+", is_regex=True) is True
|
||||
assert "key1" not in inline.dict
|
||||
assert "key2" not in inline.dict
|
||||
|
||||
|
||||
def test_delete_8():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the delete() method is called with an existing key and a regex that matches values
|
||||
THEN return True and delete the matching values
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", r"\w+\d", is_regex=True) is True
|
||||
assert "value1" not in inline.dict["key1"]
|
||||
assert "value2" not in inline.dict["key1"]
|
||||
assert "value3" not in inline.dict["key1"]
|
||||
|
||||
|
||||
def test_has_changes_1():
|
||||
"""Test InlineMetadata has_changes() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN no changes have been made to the object
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.has_changes() is False
|
||||
|
||||
|
||||
def test_has_changes_2():
|
||||
"""Test InlineMetadata has_changes() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN changes have been made to the object
|
||||
THEN return True
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
inline.dict["new key"] = ["new value"]
|
||||
assert inline.has_changes() is True
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the rename() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the rename() method is called with an existing key and non-existing value
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.rename("key1", "no value", "new value") is False
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the rename() method is called with an existing key
|
||||
THEN return True and rename the key
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.rename("key1", "new key") is True
|
||||
assert "key1" not in inline.dict
|
||||
assert inline.dict["new key"] == ["value1", "value2", "value3"]
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the rename() method is called with an existing key and value
|
||||
THEN return True and rename the value
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.rename("key1", "value1", "new value") is True
|
||||
assert "value1" not in inline.dict["key1"]
|
||||
assert "new value" in inline.dict["key1"]
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
||||
THEN return True and remove the old value leaving one instance of the new value
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.rename("key1", "value1", "value2") is True
|
||||
assert inline.dict["key1"] == ["value2", "value3"]
|
||||
367
tests/metadata_tags_test.py
Normal file
367
tests/metadata_tags_test.py
Normal file
@@ -0,0 +1,367 @@
|
||||
# type: ignore
|
||||
"""Test inline tags from metadata.py."""
|
||||
|
||||
from obsidian_metadata.models.metadata import InlineTags
|
||||
|
||||
CONTENT = """\
|
||||
#tag1 #tag2
|
||||
> #tag3
|
||||
**#tag4**
|
||||
I am a sentence with #tag5 and #tag6 in the middle
|
||||
#tag🙈7
|
||||
#tag/8
|
||||
#tag/👋/9
|
||||
"""
|
||||
|
||||
|
||||
def test__grab_inline_tags_1() -> None:
|
||||
"""Test _grab_inline_tags() method.
|
||||
|
||||
GIVEN a string with a codeblock
|
||||
WHEN the method is called
|
||||
THEN the codeblock is ignored
|
||||
"""
|
||||
content = """
|
||||
some text
|
||||
|
||||
```python
|
||||
#tag1
|
||||
#tag2
|
||||
```
|
||||
|
||||
```
|
||||
#tag3
|
||||
#tag4
|
||||
```
|
||||
"""
|
||||
tags = InlineTags(content)
|
||||
assert tags.list == []
|
||||
assert tags.list_original == []
|
||||
|
||||
|
||||
def test__grab_inline_tags_2() -> None:
|
||||
"""Test _grab_inline_tags() method.
|
||||
|
||||
GIVEN a string with tags
|
||||
WHEN the method is called
|
||||
THEN the tags are extracted
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.list == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
assert tags.list_original == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_1():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a tag that exists in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("tag1") is False
|
||||
|
||||
|
||||
def test_add_2():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a new tag
|
||||
THEN return True and add the tag to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("new_tag") is True
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_add_3():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of new tags
|
||||
THEN return True and add the tags to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["new_tag1", "new_tag2"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert "new_tag1" in tags.list
|
||||
assert "new_tag2" in tags.list
|
||||
|
||||
|
||||
def test_add_4():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags, some of which already exist
|
||||
THEN return True and add only the new tags to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["new_tag1", "new_tag2", "tag1", "tag2"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert tags.list == [
|
||||
"new_tag1",
|
||||
"new_tag2",
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_5():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags which are already in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["tag1", "tag2"]
|
||||
assert tags.add(new_tags) is False
|
||||
assert "tag1" in tags.list
|
||||
assert "tag2" in tags.list
|
||||
|
||||
|
||||
def test_add_6():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags which have a # in the name
|
||||
THEN strip the # from the tag name
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["#tag1", "#tag2", "#new_tag"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert tags.list == [
|
||||
"new_tag",
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_7():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a tag which has a # in the name
|
||||
THEN strip the # from the tag name
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("#tag1") is False
|
||||
assert tags.add("#new_tag") is True
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a tag that exists in the list
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains("tag1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains("no_tag") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a regex that matches a tag in the list
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains(r"tag\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a regex that does not match any tags in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains(r"tag\d\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a tag that exists in the list
|
||||
THEN return True and remove the tag from the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete("tag1") is True
|
||||
assert "tag1" not in tags.list
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete("no_tag") is False
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a regex that matches a tag in the list
|
||||
THEN return True and remove the tag from the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete(r"tag\d") is True
|
||||
assert tags.list == ["tag/8", "tag/👋/9", "tag🙈7"]
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a regex that does not match any tags in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete(r"tag\d\d") is False
|
||||
|
||||
|
||||
def test_has_changes_1():
|
||||
"""Test has_changes() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the has_changes() method is called
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.has_changes() is False
|
||||
|
||||
|
||||
def test_has_changes_2():
|
||||
"""Test has_changes() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the has_changes() method after the list has been updated
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
tags.list = ["new_tag"]
|
||||
assert tags.has_changes() is True
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that exists in the list
|
||||
THEN return True and rename the tag in the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", "new_tag") is True
|
||||
assert "tag1" not in tags.list
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("no_tag", "new_tag") is False
|
||||
assert "new_tag" not in tags.list
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that exists and the new tag name already exists in the list
|
||||
THEN return True and ensure the new tag name is only in the list once
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename(r"tag1", "tag2") is True
|
||||
assert tags.list == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a new tag value that is None
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", None) is False
|
||||
assert "tag1" in tags.list
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a new tag value that is empty
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", "") is False
|
||||
assert "tag1" in tags.list
|
||||
@@ -1,746 +0,0 @@
|
||||
# type: ignore
|
||||
"""Test metadata.py."""
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.metadata import (
|
||||
Frontmatter,
|
||||
InlineMetadata,
|
||||
InlineTags,
|
||||
VaultMetadata,
|
||||
)
|
||||
from tests.helpers import Regex
|
||||
|
||||
FILE_CONTENT: str = Path("tests/fixtures/test_vault/test1.md").read_text()
|
||||
TAG_LIST: list[str] = ["tag 1", "tag 2", "tag 3"]
|
||||
METADATA: dict[str, list[str]] = {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["note", "article"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 2", "tag 1", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
}
|
||||
METADATA_2: dict[str, list[str]] = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
---
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
-
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: "frontmatter_Key1_value"
|
||||
frontmatter_Key2: ["note", "article"]
|
||||
shared_key1: "shared_key1_value"
|
||||
---
|
||||
more content
|
||||
|
||||
---
|
||||
horizontal: rule
|
||||
---
|
||||
"""
|
||||
INLINE_CONTENT = """\
|
||||
repeated_key:: repeated_key_value1
|
||||
|
||||
#inline_tag_top1,#inline_tag_top2
|
||||
**bold_key1**:: bold_key1_value
|
||||
**bold_key2:: bold_key2_value**
|
||||
link_key:: [[link_key_value]]
|
||||
tag_key:: #tag_key_value
|
||||
emoji_📅_key:: emoji_📅_key_value
|
||||
**#bold_tag**
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. [in_text_key1:: in_text_key1_value] Ut enim ad minim veniam, quis nostrud exercitation [in_text_key2:: in_text_key2_value] ullamco laboris nisi ut aliquip ex ea commodo consequat. #in_text_tag Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||
|
||||
```python
|
||||
#ffffff
|
||||
# This is sample text [no_key:: value]with tags and metadata
|
||||
#in_codeblock_tag1
|
||||
#ffffff;
|
||||
in_codeblock_key:: in_codeblock_value
|
||||
The quick brown fox jumped over the #in_codeblock_tag2
|
||||
```
|
||||
repeated_key:: repeated_key_value2
|
||||
"""
|
||||
|
||||
|
||||
def test_frontmatter_create() -> None:
|
||||
"""Test frontmatter creation."""
|
||||
frontmatter = Frontmatter(INLINE_CONTENT)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.dict == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
assert frontmatter.dict_original == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
|
||||
def test_frontmatter_contains() -> None:
|
||||
"""Test frontmatter contains."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
|
||||
assert frontmatter.contains("frontmatter_Key1") is True
|
||||
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
||||
assert frontmatter.contains("frontmatter_Key3") is False
|
||||
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
||||
|
||||
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
||||
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
||||
|
||||
|
||||
def test_frontmatter_add() -> None:
|
||||
"""Test frontmatter add."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
|
||||
assert frontmatter.add("frontmatter_Key1") is False
|
||||
assert frontmatter.add("added_key") is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": [],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key", "added_value") is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": ["added_value"],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key", "added_value_2") is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": ["added_value", "added_value_2"],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key", ["added_value_3", "added_value_4"]) is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": ["added_value", "added_value_2", "added_value_3", "added_value_4"],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key2", ["added_value_1", "added_value_2"]) is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": ["added_value", "added_value_2", "added_value_3", "added_value_4"],
|
||||
"added_key2": ["added_value_1", "added_value_2"],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key3", "added_value_1") is True
|
||||
assert frontmatter.dict == {
|
||||
"added_key": ["added_value", "added_value_2", "added_value_3", "added_value_4"],
|
||||
"added_key2": ["added_value_1", "added_value_2"],
|
||||
"added_key3": ["added_value_1"],
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.add("added_key3", "added_value_1") is False
|
||||
|
||||
|
||||
def test_frontmatter_rename() -> None:
|
||||
"""Test frontmatter rename."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.dict == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.rename("no key", "new key") is False
|
||||
assert frontmatter.rename("tags", "no tag", "new key") is False
|
||||
|
||||
assert frontmatter.has_changes() is False
|
||||
assert frontmatter.rename("tags", "tag_2", "new tag") is True
|
||||
|
||||
assert frontmatter.dict["tags"] == ["new tag", "tag_1", "📅/tag_3"]
|
||||
assert frontmatter.rename("tags", "old_tags") is True
|
||||
assert frontmatter.dict["old_tags"] == ["new tag", "tag_1", "📅/tag_3"]
|
||||
assert "tags" not in frontmatter.dict
|
||||
|
||||
assert frontmatter.has_changes() is True
|
||||
|
||||
|
||||
def test_frontmatter_delete() -> None:
|
||||
"""Test Frontmatter delete method."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.dict == {
|
||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
||||
}
|
||||
|
||||
assert frontmatter.delete("no key") is False
|
||||
assert frontmatter.delete("tags", "no value") is False
|
||||
assert frontmatter.delete(r"\d{3}") is False
|
||||
assert frontmatter.has_changes() is False
|
||||
assert frontmatter.delete("tags", "tag_2") is True
|
||||
assert frontmatter.dict["tags"] == ["tag_1", "📅/tag_3"]
|
||||
assert frontmatter.delete("tags") is True
|
||||
assert "tags" not in frontmatter.dict
|
||||
assert frontmatter.has_changes() is True
|
||||
assert frontmatter.delete("shared_key1", r"\w+") is True
|
||||
assert frontmatter.dict["shared_key1"] == []
|
||||
assert frontmatter.delete(r"\w.tter") is True
|
||||
assert frontmatter.dict == {"shared_key1": []}
|
||||
|
||||
|
||||
def test_frontmatter_yaml_conversion():
|
||||
"""Test Frontmatter to_yaml method."""
|
||||
new_frontmatter: str = """\
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: frontmatter_Key1_value
|
||||
frontmatter_Key2:
|
||||
- article
|
||||
- note
|
||||
shared_key1: shared_key1_value
|
||||
"""
|
||||
new_frontmatter_sorted: str = """\
|
||||
frontmatter_Key1: frontmatter_Key1_value
|
||||
frontmatter_Key2:
|
||||
- article
|
||||
- note
|
||||
shared_key1: shared_key1_value
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
- 📅/tag_3
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.to_yaml() == new_frontmatter
|
||||
assert frontmatter.to_yaml(sort_keys=True) == new_frontmatter_sorted
|
||||
|
||||
|
||||
def test_inline_metadata_add() -> None:
|
||||
"""Test inline add."""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
|
||||
assert inline.add("bold_key1") is False
|
||||
assert inline.add("bold_key1", "bold_key1_value") is False
|
||||
assert inline.add("added_key") is True
|
||||
assert inline.dict == {
|
||||
"added_key": [],
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
assert inline.add("added_key1", "added_value") is True
|
||||
assert inline.dict == {
|
||||
"added_key": [],
|
||||
"added_key1": ["added_value"],
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
assert inline.add("added_key1", "added_value_2") is True
|
||||
|
||||
assert inline.dict == {
|
||||
"added_key": [],
|
||||
"added_key1": ["added_value"],
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
assert inline.add("added_key", "added_value")
|
||||
assert inline.dict == {
|
||||
"added_key": ["added_value"],
|
||||
"added_key1": ["added_value"],
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
|
||||
def test_inline_metadata_contains() -> None:
|
||||
"""Test inline metadata contains method."""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
|
||||
assert inline.contains("bold_key1") is True
|
||||
assert inline.contains("bold_key2", "bold_key2_value") is True
|
||||
assert inline.contains("bold_key3") is False
|
||||
assert inline.contains("bold_key2", "no value") is False
|
||||
|
||||
assert inline.contains(r"\w{4}_key", is_regex=True) is True
|
||||
assert inline.contains(r"^\d", is_regex=True) is False
|
||||
assert inline.contains("1$", r"\d_value", is_regex=True) is True
|
||||
assert inline.contains("key", r"^\d_value", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_metadata_create() -> None:
|
||||
"""Test inline metadata creation."""
|
||||
inline = InlineMetadata(FRONTMATTER_CONTENT)
|
||||
assert inline.dict == {}
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.dict == {
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
assert inline.dict_original == {
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
|
||||
def test_inline_metadata_delete() -> None:
|
||||
"""Test inline metadata delete."""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.dict == {
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
assert inline.delete("no key") is False
|
||||
assert inline.delete("repeated_key", "no value") is False
|
||||
assert inline.has_changes() is False
|
||||
assert inline.delete("repeated_key", "repeated_key_value1") is True
|
||||
assert inline.dict["repeated_key"] == ["repeated_key_value2"]
|
||||
assert inline.delete("repeated_key") is True
|
||||
assert "repeated_key" not in inline.dict
|
||||
assert inline.has_changes() is True
|
||||
assert inline.delete(r"\d{3}") is False
|
||||
assert inline.delete(r"bold_key\d") is True
|
||||
assert inline.dict == {
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
assert inline.delete("emoji_📅_key", ".*📅.*") is True
|
||||
assert inline.dict == {
|
||||
"emoji_📅_key": [],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
|
||||
def test_inline_metadata_rename() -> None:
|
||||
"""Test inline metadata rename."""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.dict == {
|
||||
"bold_key1": ["bold_key1_value"],
|
||||
"bold_key2": ["bold_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"in_text_key1": ["in_text_key1_value"],
|
||||
"in_text_key2": ["in_text_key2_value"],
|
||||
"link_key": ["link_key_value"],
|
||||
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||
"tag_key": ["tag_key_value"],
|
||||
}
|
||||
|
||||
assert inline.rename("no key", "new key") is False
|
||||
assert inline.rename("repeated_key", "no value", "new key") is False
|
||||
assert inline.has_changes() is False
|
||||
assert inline.rename("repeated_key", "repeated_key_value1", "new value") is True
|
||||
assert inline.dict["repeated_key"] == ["new value", "repeated_key_value2"]
|
||||
assert inline.rename("repeated_key", "old_key") is True
|
||||
assert inline.dict["old_key"] == ["new value", "repeated_key_value2"]
|
||||
assert "repeated_key" not in inline.dict
|
||||
assert inline.has_changes() is True
|
||||
|
||||
|
||||
def test_inline_tags_add() -> None:
|
||||
"""Test inline tags add."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
|
||||
assert tags.add("bold_tag") is False
|
||||
assert tags.add("new_tag") is True
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"new_tag",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
|
||||
def test_inline_tags_contains() -> None:
|
||||
"""Test inline tags contains."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.contains("bold_tag") is True
|
||||
assert tags.contains("no tag") is False
|
||||
|
||||
assert tags.contains(r"\w_\w", is_regex=True) is True
|
||||
assert tags.contains(r"\d_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_tags_create() -> None:
|
||||
"""Test inline tags creation."""
|
||||
tags = InlineTags(FRONTMATTER_CONTENT)
|
||||
tags.metadata_key
|
||||
assert tags.list == []
|
||||
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.list_original == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
|
||||
def test_inline_tags_delete() -> None:
|
||||
"""Test inline tags delete."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
assert tags.delete("no tag") is False
|
||||
assert tags.has_changes() is False
|
||||
assert tags.delete("bold_tag") is True
|
||||
assert tags.list == [
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.has_changes() is True
|
||||
assert tags.delete(r"\d{3}") is False
|
||||
assert tags.delete(r"inline_tag_top\d") is True
|
||||
assert tags.list == ["in_text_tag", "tag_key_value"]
|
||||
|
||||
|
||||
def test_inline_tags_rename() -> None:
|
||||
"""Test inline tags rename."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
assert tags.rename("no tag", "new tag") is False
|
||||
assert tags.has_changes() is False
|
||||
assert tags.rename("bold_tag", "new tag") is True
|
||||
assert tags.list == [
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"new tag",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.has_changes() is True
|
||||
|
||||
|
||||
def test_vault_metadata() -> None:
|
||||
"""Test VaultMetadata class."""
|
||||
vm = VaultMetadata()
|
||||
assert vm.dict == {}
|
||||
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3"]
|
||||
|
||||
new_metadata = {"added_key": ["added_value"], "frontmatter_Key2": ["new_value"]}
|
||||
new_tags = ["tag 4", "tag 5"]
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_metadata)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_tags)
|
||||
assert vm.dict == {
|
||||
"added_key": ["added_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "new_value", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"added_key": ["added_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "new_value", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3", "tag 4", "tag 5"]
|
||||
|
||||
|
||||
def test_vault_metadata_print(capsys) -> None:
|
||||
"""Test print_metadata method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
|
||||
vm.print_metadata(area=MetadataType.ALL)
|
||||
captured = capsys.readouterr()
|
||||
assert "All metadata" in captured.out
|
||||
assert "All inline tags" in captured.out
|
||||
assert "┃ Keys ┃ Values ┃" in captured.out
|
||||
assert "│ shared_key1 │ shared_key1_value │" in captured.out
|
||||
assert captured.out == Regex("#tag 1 +#tag 2")
|
||||
|
||||
vm.print_metadata(area=MetadataType.FRONTMATTER)
|
||||
captured = capsys.readouterr()
|
||||
assert "All frontmatter" in captured.out
|
||||
assert "┃ Keys ┃ Values ┃" in captured.out
|
||||
assert "│ shared_key1 │ shared_key1_value │" in captured.out
|
||||
assert "value1" not in captured.out
|
||||
|
||||
vm.print_metadata(area=MetadataType.INLINE)
|
||||
captured = capsys.readouterr()
|
||||
assert "All inline" in captured.out
|
||||
assert "┃ Keys ┃ Values ┃" in captured.out
|
||||
assert "shared_key1" not in captured.out
|
||||
assert "│ key1 │ value1 │" in captured.out
|
||||
|
||||
vm.print_metadata(area=MetadataType.TAGS)
|
||||
captured = capsys.readouterr()
|
||||
assert "All inline tags " in captured.out
|
||||
assert "┃ Keys ┃ Values ┃" not in captured.out
|
||||
assert captured.out == Regex("#tag 1 +#tag 2")
|
||||
|
||||
vm.print_metadata(area=MetadataType.KEYS)
|
||||
captured = capsys.readouterr()
|
||||
assert "All Keys " in captured.out
|
||||
assert "┃ Keys ┃ Values ┃" not in captured.out
|
||||
assert captured.out != Regex("#tag 1 +#tag 2")
|
||||
assert captured.out == Regex("frontmatter_Key1 +frontmatter_Key2")
|
||||
|
||||
|
||||
def test_vault_metadata_contains() -> None:
|
||||
"""Test contains method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3"]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
vm.contains(area=MetadataType.ALL, value="key1")
|
||||
|
||||
assert vm.contains(area=MetadataType.ALL, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.ALL, key="key1") is True
|
||||
assert vm.contains(area=MetadataType.ALL, key="frontmatter_Key2", value="article") is True
|
||||
assert vm.contains(area=MetadataType.ALL, key="frontmatter_Key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.ALL, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key1") is True
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key2", value="article") is True
|
||||
)
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.INLINE, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key1") is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value3") is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.INLINE, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.TAGS, value="no_tag") is False
|
||||
assert vm.contains(area=MetadataType.TAGS, value="tag 1") is True
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"\w+ \d$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"\w+ \d\d$", is_regex=True) is False
|
||||
with pytest.raises(ValueError):
|
||||
vm.contains(area=MetadataType.TAGS, key="key1")
|
||||
|
||||
|
||||
def test_vault_metadata_delete() -> None:
|
||||
"""Test delete method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
|
||||
assert vm.delete("no key") is False
|
||||
assert vm.delete("tags", "no value") is False
|
||||
assert vm.delete("tags", "tag 2") is True
|
||||
assert vm.dict["tags"] == ["tag 1", "tag 3"]
|
||||
assert vm.delete("tags") is True
|
||||
assert "tags" not in vm.dict
|
||||
|
||||
|
||||
def test_vault_metadata_rename() -> None:
|
||||
"""Test rename method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
|
||||
assert vm.rename("no key", "new key") is False
|
||||
assert vm.rename("tags", "no tag", "new key") is False
|
||||
assert vm.rename("tags", "tag 2", "new tag") is True
|
||||
assert vm.dict["tags"] == ["new tag", "tag 1", "tag 3"]
|
||||
assert vm.rename("tags", "old_tags") is True
|
||||
assert vm.dict["old_tags"] == ["new tag", "tag 1", "tag 3"]
|
||||
assert "tags" not in vm.dict
|
||||
814
tests/metadata_vault_test.py
Normal file
814
tests/metadata_vault_test.py
Normal file
@@ -0,0 +1,814 @@
|
||||
# type: ignore
|
||||
"""Test VaultMetadata object from metadata.py."""
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.metadata import (
|
||||
VaultMetadata,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_vault_metadata__init_1() -> None:
|
||||
"""Test VaultMetadata class."""
|
||||
vm = VaultMetadata()
|
||||
assert vm.dict == {}
|
||||
assert vm.frontmatter == {}
|
||||
assert vm.inline_metadata == {}
|
||||
assert vm.tags == []
|
||||
|
||||
|
||||
def test_index_metadata_1():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is FRONTMATTER and the old dictionary is empty
|
||||
THEN the new dictionary is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
||||
assert vm.dict == new_dict
|
||||
assert vm.frontmatter == new_dict
|
||||
|
||||
|
||||
def test_index_metadata_2():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is FRONTMATTER and the old dictionary is not empty
|
||||
THEN the new dictionary is merged with the old dictionary
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
||||
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
||||
assert vm.dict == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
"other_key": ["value1"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
}
|
||||
|
||||
|
||||
def test_index_metadata_3():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is INLINE and the old dictionary is empty
|
||||
THEN the new dictionary is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
||||
assert vm.dict == new_dict
|
||||
assert vm.inline_metadata == new_dict
|
||||
|
||||
|
||||
def test_index_metadata_4():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is INLINE and the old dictionary is not empty
|
||||
THEN the new dictionary is merged with the old dictionary
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
||||
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
||||
assert vm.dict == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
"other_key": ["value1"],
|
||||
}
|
||||
assert vm.inline_metadata == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
}
|
||||
|
||||
|
||||
def test_index_metadata_5():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is TAGS and the old list is empty
|
||||
THEN the new list is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_list = ["tag1", "tag2", "tag3"]
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
||||
assert vm.dict == {}
|
||||
assert vm.tags == new_list
|
||||
|
||||
|
||||
def test_index_metadata_6():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is TAGS and the old list is not empty
|
||||
THEN the new list is merged with the old list
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
new_list = ["tag1", "tag2", "tag4", "tag5"]
|
||||
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
||||
assert vm.dict == {}
|
||||
assert vm.tags == ["tag1", "tag2", "tag3", "tag4", "tag5"]
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_5():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_6():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_7():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
)
|
||||
|
||||
|
||||
def test_contains_8():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"^\d", is_regex=True) is False
|
||||
)
|
||||
|
||||
|
||||
def test_contains_9():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.FRONTMATTER, value="value1")
|
||||
|
||||
|
||||
def test_contains_10():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_11():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_12():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_13():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_14():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_15():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_16():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_17():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_18():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.INLINE, value="value1")
|
||||
|
||||
|
||||
def test_contains_19():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a key but not a value
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
with pytest.raises(ValueError, match="Value must be provided"):
|
||||
vm.contains(area=MetadataType.TAGS, key="key1")
|
||||
|
||||
|
||||
def test_contains_20():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value="tag1") is True
|
||||
|
||||
|
||||
def test_contains_21():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value="value1") is False
|
||||
|
||||
|
||||
def test_contains_22():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a key regex but no value
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
with pytest.raises(ValueError, match="Value must be provided"):
|
||||
vm.contains(area=MetadataType.TAGS, key=r"\w", is_regex=True)
|
||||
|
||||
|
||||
def test_contains_23():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_24():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"^tag\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_25():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_26():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_27():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_28():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_29():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_30():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_31():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_32():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_33():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.ALL, value="value1")
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key is deleted
|
||||
THEN return True and the key is removed
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key1") is True
|
||||
assert vm.dict == {"key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key is deleted that does not exist
|
||||
THEN return False and the key is not removed
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key3") is False
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key and value are specified
|
||||
THEN return True and remove the value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key2", value_to_delete="value1") is True
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key and nonexistent value are specified
|
||||
THEN return False
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key2", value_to_delete="value11") is False
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and non-existing value
|
||||
THEN return False
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "no value", "new value") is False
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key
|
||||
THEN return True and rename the key
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "new key") is True
|
||||
assert vm.dict == {"key2": ["value1", "value2"], "new key": ["value1"]}
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and value
|
||||
THEN return True and rename the value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "value1", "new value") is True
|
||||
assert vm.dict == {"key1": ["new value"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
||||
THEN return True and remove the old value leaving one instance of the new value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key2", "value1", "value2") is True
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_print_metadata_1(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN ALL is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.ALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All metadata" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured == Regex("│ key2 +│ value1 +│")
|
||||
assert captured == Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" in captured
|
||||
assert captured == Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_2(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN FRONTMATTER is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.FRONTMATTER)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All frontmatter" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured == Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" not in captured
|
||||
assert captured != Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_3(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN INLINE is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.INLINE)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured == Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" not in captured
|
||||
assert captured != Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_4(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN TAGS is specified
|
||||
THEN print all the tags
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.TAGS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline tags" in captured
|
||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured != Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert captured == Regex("#tag1 +#tag2 +#tag3")
|
||||
|
||||
|
||||
def test_print_metadata_5(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN KEYS is specified
|
||||
THEN print all the tags
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.KEYS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All Keys" in captured
|
||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured != Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert captured != Regex("#tag1 +#tag2 +#tag3")
|
||||
assert captured == Regex("key1 +key2 +key3 +key4")
|
||||
1240
tests/notes_test.py
1240
tests/notes_test.py
File diff suppressed because it is too large
Load Diff
@@ -6,17 +6,7 @@ import pytest
|
||||
from obsidian_metadata.models.patterns import Patterns
|
||||
|
||||
TAG_CONTENT: str = "#1 #2 **#3** [[#4]] [[#5|test]] #6#notag #7_8 #9/10 #11-12 #13; #14, #15. #16: #17* #18(#19) #20[#21] #22\\ #23& #24# #25 **#26** #📅/tag [link](#no_tag) https://example.com/somepage.html_#no_url_tags"
|
||||
INLINE_METADATA: str = """
|
||||
**1:: 1**
|
||||
2_2:: [[2_2]] | 2
|
||||
asdfasdf [3:: 3] asdfasdf [7::7] asdf
|
||||
[4:: 4] [5:: 5]
|
||||
> 6:: 6
|
||||
**8**:: **8**
|
||||
10::
|
||||
📅11:: 11/📅/11
|
||||
emoji_📅_key:: 📅emoji_📅_key_value
|
||||
"""
|
||||
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
---
|
||||
tags:
|
||||
@@ -150,17 +140,64 @@ def test_find_inline_tags():
|
||||
def test_find_inline_metadata():
|
||||
"""Test find_inline_metadata regex."""
|
||||
pattern = Patterns()
|
||||
content = """
|
||||
**1:: 1**
|
||||
2_2:: [[2_2]] | 2
|
||||
asdfasdf [3:: 3] asdfasdf [7::7] asdf
|
||||
[4:: 4] [5:: 5]
|
||||
> 6:: 6
|
||||
**8**:: **8**
|
||||
10::
|
||||
📅11:: 11/📅/11
|
||||
emoji_📅_key::emoji_📅_key_value
|
||||
key1:: value1
|
||||
key1:: value2
|
||||
key1:: value3
|
||||
indented_key:: value1
|
||||
Paragraph of text with an [inline_key:: value1] and [inline_key:: value2] and [inline_key:: value3] which should do it.
|
||||
> blockquote_key:: value1
|
||||
> blockquote_key:: value2
|
||||
|
||||
result = pattern.find_inline_metadata.findall(INLINE_METADATA)
|
||||
- list_key:: value1
|
||||
- list_key:: [[value2]]
|
||||
|
||||
1. list_key:: value1
|
||||
2. list_key:: value2
|
||||
|
||||
| table_key:: value1 | table_key:: value2 |
|
||||
---
|
||||
frontmatter_key1: frontmatter_key1_value
|
||||
---
|
||||
not_a_key: not_a_value
|
||||
paragraph metadata:: key in text
|
||||
"""
|
||||
|
||||
result = pattern.find_inline_metadata.findall(content)
|
||||
assert result == [
|
||||
("", "", "1", "1**"),
|
||||
("", "", "2_2", "[[2_2]] | 2"),
|
||||
("3", "3", "", ""),
|
||||
("7", "7", "", ""),
|
||||
("", "", "4", "4] [5:: 5]"),
|
||||
("", "", "6", "6"),
|
||||
("", "", "8**", "**8**"),
|
||||
("", "", "11", "11/📅/11"),
|
||||
("", "", "emoji_📅_key", "📅emoji_📅_key_value"),
|
||||
("", "", "emoji_📅_key", "emoji_📅_key_value"),
|
||||
("", "", "key1", "value1"),
|
||||
("", "", "key1", "value2"),
|
||||
("", "", "key1", "value3"),
|
||||
("", "", "indented_key", "value1"),
|
||||
("inline_key", "value1", "", ""),
|
||||
("inline_key", "value2", "", ""),
|
||||
("inline_key", "value3", "", ""),
|
||||
("", "", "blockquote_key", "value1"),
|
||||
("", "", "blockquote_key", "value2"),
|
||||
("", "", "list_key", "value1"),
|
||||
("", "", "list_key", "[[value2]]"),
|
||||
("", "", "list_key", "value1"),
|
||||
("", "", "list_key", "value2"),
|
||||
("", "", "table_key", "value1 | table_key:: value2 |"),
|
||||
("", "", "metadata", "key in text"),
|
||||
]
|
||||
|
||||
|
||||
|
||||
@@ -68,12 +68,12 @@ def test_validate_number() -> None:
|
||||
assert questions._validate_number("1") is True
|
||||
|
||||
|
||||
def test_validate_existing_inline_tag() -> None:
|
||||
def test_validate_existing_tag() -> None:
|
||||
"""Test existing tag validation."""
|
||||
questions = Questions(vault=VAULT)
|
||||
assert "Tag cannot be empty" in questions._validate_existing_inline_tag("")
|
||||
assert "'test' does not exist" in questions._validate_existing_inline_tag("test")
|
||||
assert questions._validate_existing_inline_tag("shared_tag") is True
|
||||
assert "Tag cannot be empty" in questions._validate_existing_tag("")
|
||||
assert "'test' does not exist" in questions._validate_existing_tag("test")
|
||||
assert questions._validate_existing_tag("shared_tag") is True
|
||||
|
||||
|
||||
def test_validate_key_exists_regex() -> None:
|
||||
|
||||
@@ -1,13 +1,177 @@
|
||||
# type: ignore
|
||||
"""Test the utilities module."""
|
||||
|
||||
import pytest
|
||||
import typer
|
||||
|
||||
from obsidian_metadata._utils import (
|
||||
clean_dictionary,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
validate_csv_bulk_imports,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_delete_from_dict_1():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values
|
||||
WHEN the delete_from_dict() function is called with a key that exists
|
||||
THEN the key should be deleted from the dictionary and the original dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key1") == {
|
||||
"key2": ["value2", "value3"],
|
||||
"key3": "value4",
|
||||
}
|
||||
assert test_dict == {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
|
||||
def test_delete_from_dict_2():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values
|
||||
WHEN the delete_from_dict() function is called with a key that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key5") == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_3():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values in a list
|
||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
||||
THEN the value should be deleted from the specified key in dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key2", value="value3") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2"],
|
||||
"key3": "value4",
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_4():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
||||
THEN the value and key should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value4") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_5():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value5") == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_6():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key[23]", is_regex=True) == {
|
||||
"key1": ["value1"]
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_7():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that does not match
|
||||
THEN no keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key=r"key\d\d", is_regex=True) == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_8():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key2", value=r"\w+", is_regex=True) == {
|
||||
"key1": ["value1"],
|
||||
"key2": [],
|
||||
"key3": "value4",
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_9():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that does not match
|
||||
THEN no keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert (
|
||||
delete_from_dict(dictionary=test_dict, key=r"key2", value=r"^\d", is_regex=True)
|
||||
== test_dict
|
||||
)
|
||||
|
||||
|
||||
def test_delete_from_dict_10():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value=r"\w+", is_regex=True) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_11():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that matches multiple and values that match
|
||||
THEN the values matching the associated keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(
|
||||
dictionary=test_dict, key=r"key[23]", value=r"\w+[34]$", is_regex=True
|
||||
) == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_dict_contains() -> None:
|
||||
@@ -25,6 +189,17 @@ def test_dict_contains() -> None:
|
||||
assert dict_contains(d, r"key\d", "value5", is_regex=True) is True
|
||||
|
||||
|
||||
def test_dict_keys_to_lower() -> None:
|
||||
"""Test the dict_keys_to_lower() function.
|
||||
|
||||
GIVEN a dictionary with mixed case keys
|
||||
WHEN the dict_keys_to_lower() function is called
|
||||
THEN the dictionary keys should be converted to lowercase
|
||||
"""
|
||||
test_dict = {"Key1": "Value1", "KEY2": "Value2", "key3": "Value3"}
|
||||
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings():
|
||||
"""Test converting dictionary values to lists of strings."""
|
||||
dictionary = {
|
||||
@@ -66,6 +241,78 @@ def test_dict_values_to_lists_strings():
|
||||
}
|
||||
|
||||
|
||||
def test_rename_in_dict_1():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that does not exist
|
||||
THEN no keys should be renamed in the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key4", value_1="key5") == test_dict
|
||||
|
||||
|
||||
def test_rename_in_dict_2():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a new value for the key
|
||||
THEN the key should be renamed in the returned dictionary and the original dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key2", value_1="new_key") == {
|
||||
"key1": ["value1"],
|
||||
"new_key": ["value2", "value3"],
|
||||
}
|
||||
assert test_dict == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_3():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists value that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert (
|
||||
rename_in_dict(dictionary=test_dict, key="key2", value_1="no_value", value_2="new_value")
|
||||
== test_dict
|
||||
)
|
||||
|
||||
|
||||
def test_rename_in_dict_4():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a new value for a value
|
||||
THEN update the specified value in the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(
|
||||
dictionary=test_dict, key="key2", value_1="value2", value_2="new_value"
|
||||
) == {"key1": ["value1"], "key2": ["new_value", "value3"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_5():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a an existing value for a renamed value
|
||||
THEN only one instance of the new value should be in the key
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key2", value_1="value2", value_2="value3") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_remove_markdown_sections():
|
||||
"""Test removing markdown sections."""
|
||||
text: str = """
|
||||
@@ -106,3 +353,172 @@ def test_clean_dictionary():
|
||||
|
||||
new_dict = clean_dictionary(dictionary)
|
||||
assert new_dict == {"key": ["value", "value2", "value3"]}
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_1(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a csv file missing the `path` column
|
||||
WHEN the validate_csv_bulk_imports function is called
|
||||
THEN an exception should be raised
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
PATH,type,key,value
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_2(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a csv file missing the `type` column
|
||||
WHEN the validate_csv_bulk_imports function is called
|
||||
THEN an exception should be raised
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,Type,key,value
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_3(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a csv file missing the `key` column
|
||||
WHEN the validate_csv_bulk_imports function is called
|
||||
THEN an exception should be raised
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,value
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_4(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a csv file missing the `value` column
|
||||
WHEN the validate_csv_bulk_imports function is called
|
||||
THEN an exception should be raised
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,values
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_5(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a csv file with only headers
|
||||
WHEN the validate_csv_bulk_imports function is called
|
||||
THEN an exception should be raised
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = "path,type,key,value"
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_6(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN a path is given that does not exist in the vault
|
||||
THEN show the user a warning
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_7(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN if a type is not 'frontmatter' or 'inline_metadata', 'tag'
|
||||
THEN exit the program
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note2.md,notvalid,key,value
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_8(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN more than one row has the same path
|
||||
THEN add the row to the list of rows for that path
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
csv_dict = validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
assert csv_dict == {
|
||||
"note1.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
"note2.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -3,55 +3,58 @@
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
import typer
|
||||
from rich import print
|
||||
|
||||
from obsidian_metadata._config import Config
|
||||
from obsidian_metadata.models import Vault, VaultFilter
|
||||
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||
from tests.helpers import Regex
|
||||
|
||||
|
||||
def test_vault_creation(test_vault):
|
||||
"""Test creating a Vault object."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_vault_creation(test_vault, tmp_path):
|
||||
"""Test creating a Vault object.
|
||||
|
||||
GIVEN a Config object
|
||||
WHEN a Vault object is created
|
||||
THEN the Vault object is created with the correct attributes.
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.name == "vault"
|
||||
assert vault.vault_path == vault_path
|
||||
assert vault.insert_location == InsertLocation.BOTTOM
|
||||
assert vault.backup_path == Path(f"{vault_path}.bak")
|
||||
assert vault.insert_location == InsertLocation.TOP
|
||||
assert vault.backup_path == Path(tmp_path, "vault.bak")
|
||||
assert vault.dry_run is False
|
||||
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
||||
assert len(vault.all_notes) == 3
|
||||
assert len(vault.all_notes) == 2
|
||||
|
||||
assert vault.metadata.dict == {
|
||||
"author": ["author name"],
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
|
||||
assert vault.metadata.tags == [
|
||||
"ignored_file_tag2",
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
"inline_tag_top1",
|
||||
@@ -63,148 +66,192 @@ def test_vault_creation(test_vault):
|
||||
assert vault.metadata.inline_metadata == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"intext_key": ["intext_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
assert vault.metadata.frontmatter == {
|
||||
"author": ["author name"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
|
||||
|
||||
def test_add_metadata(test_vault) -> None:
|
||||
"""Test adding metadata to the vault."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def set_insert_location(test_vault):
|
||||
"""Test setting a new insert location.
|
||||
|
||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key") == 3
|
||||
GIVEN a vault object
|
||||
WHEN the insert location is changed
|
||||
THEN the insert location is changed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.name == "vault"
|
||||
assert vault.insert_location == InsertLocation.TOP
|
||||
vault.insert_location = InsertLocation.BOTTOM
|
||||
assert vault.insert_location == InsertLocation.BOTTOM
|
||||
|
||||
|
||||
def test_add_metadata_1(test_vault) -> None:
|
||||
"""Test adding metadata to the vault.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN a new metadata key is added
|
||||
THEN the metadata is added to the vault
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key") == 2
|
||||
assert vault.metadata.dict == {
|
||||
"author": ["author name"],
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"new_key": [],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
assert vault.metadata.frontmatter == {
|
||||
"author": ["author name"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"new_key": [],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key2", "new_key2_value") == 3
|
||||
|
||||
|
||||
def test_add_metadata_2(test_vault) -> None:
|
||||
"""Test adding metadata to the vault.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN a new metadata key and value is added
|
||||
THEN the metadata is added to the vault
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key2", "new_key2_value") == 2
|
||||
assert vault.metadata.dict == {
|
||||
"author": ["author name"],
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"new_key": [],
|
||||
"key📅": ["📅_key_value"],
|
||||
"new_key2": ["new_key2_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
assert vault.metadata.frontmatter == {
|
||||
"author": ["author name"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"new_key": [],
|
||||
"new_key2": ["new_key2_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
|
||||
|
||||
def test_backup(test_vault, capsys):
|
||||
"""Test backing up the vault."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_commit_changes_1(test_vault, tmp_path):
|
||||
"""Test committing changes to content in the vault.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the commit_changes method is called
|
||||
THEN the changes are committed to the vault
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" not in content
|
||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||
vault.commit_changes()
|
||||
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" in committed_content
|
||||
|
||||
|
||||
def test_commit_changes_2(test_vault, tmp_path):
|
||||
"""Test committing changes to content in the vault in dry run mode.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN dry_run is set to True
|
||||
THEN no changes are committed to the vault
|
||||
"""
|
||||
vault = Vault(config=test_vault, dry_run=True)
|
||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" not in content
|
||||
|
||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||
vault.commit_changes()
|
||||
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" not in committed_content
|
||||
|
||||
|
||||
def test_backup_1(test_vault, capsys):
|
||||
"""Test the backup method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the backup method is called
|
||||
THEN the vault is backed up
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
vault.backup()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert Path(f"{vault_path}.bak").exists() is True
|
||||
assert vault.backup_path.exists() is True
|
||||
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
||||
|
||||
vault.info()
|
||||
@@ -213,42 +260,15 @@ def test_backup(test_vault, capsys):
|
||||
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
||||
|
||||
|
||||
def test_commit(test_vault, tmp_path):
|
||||
"""Test committing changes to content in the vault."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" not in content
|
||||
def test_backup_2(test_vault, capsys):
|
||||
"""Test the backup method.
|
||||
|
||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||
vault.commit_changes()
|
||||
assert "new_key: new_key_value" not in content
|
||||
GIVEN a vault object
|
||||
WHEN dry_run is set to True and the backup method is called
|
||||
THEN the vault is not backed up
|
||||
"""
|
||||
vault = Vault(config=test_vault, dry_run=True)
|
||||
|
||||
|
||||
def test_commit_dry_run(test_vault, tmp_path):
|
||||
"""Test committing changes to content in the vault in dry run mode."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config, dry_run=True)
|
||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||
assert "new_key: new_key_value" not in content
|
||||
|
||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||
vault.commit_changes()
|
||||
assert "new_key: new_key_value" not in content
|
||||
|
||||
|
||||
def test_backup_dryrun(test_vault, capsys):
|
||||
"""Test backing up the vault."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config, dry_run=True)
|
||||
|
||||
print(f"vault.dry_run: {vault.dry_run}")
|
||||
vault.backup()
|
||||
|
||||
captured = capsys.readouterr()
|
||||
@@ -256,12 +276,14 @@ def test_backup_dryrun(test_vault, capsys):
|
||||
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
||||
|
||||
|
||||
def test_delete_backup(test_vault, capsys):
|
||||
"""Test deleting the vault backup."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_delete_backup_1(test_vault, capsys):
|
||||
"""Test deleting the vault backup.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_backup method is called
|
||||
THEN the backup is deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
vault.backup()
|
||||
vault.delete_backup()
|
||||
@@ -276,12 +298,14 @@ def test_delete_backup(test_vault, capsys):
|
||||
assert captured.out == Regex(r"Backup +\│ None")
|
||||
|
||||
|
||||
def test_delete_backup_dryrun(test_vault, capsys):
|
||||
"""Test deleting the vault backup."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config, dry_run=True)
|
||||
def test_delete_backup_2(test_vault, capsys):
|
||||
"""Test delete_backup method in dry run mode.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the dry_run is True and the delete_backup method is called
|
||||
THEN the backup is not deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault, dry_run=True)
|
||||
|
||||
Path.mkdir(vault.backup_path)
|
||||
vault.delete_backup()
|
||||
@@ -291,17 +315,17 @@ def test_delete_backup_dryrun(test_vault, capsys):
|
||||
assert vault.backup_path.exists() is True
|
||||
|
||||
|
||||
def test_delete_inline_tag(test_vault) -> None:
|
||||
"""Test deleting an inline tag."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_delete_tag_1(test_vault) -> None:
|
||||
"""Test delete_tag() method.
|
||||
|
||||
assert vault.delete_inline_tag("no tag") == 0
|
||||
assert vault.delete_inline_tag("intext_tag2") == 2
|
||||
GIVEN a vault object
|
||||
WHEN the delete_tag method is called
|
||||
THEN the inline tag is deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_tag("intext_tag2") == 1
|
||||
assert vault.metadata.tags == [
|
||||
"ignored_file_tag2",
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
"inline_tag_top1",
|
||||
@@ -311,51 +335,141 @@ def test_delete_inline_tag(test_vault) -> None:
|
||||
]
|
||||
|
||||
|
||||
def test_delete_metadata(test_vault) -> None:
|
||||
"""Test deleting a metadata key/value."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_delete_tag_2(test_vault) -> None:
|
||||
"""Test delete_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_tag method is called with a tag that does not exist
|
||||
THEN no changes are made
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_tag("no tag") == 0
|
||||
|
||||
|
||||
def test_delete_metadata_1(test_vault) -> None:
|
||||
"""Test deleting a metadata key/value.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_metadata method is called with a key and value
|
||||
THEN the specified metadata key/value is deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_metadata("top_key1", "top_key1_value") == 1
|
||||
assert vault.metadata.dict["top_key1"] == []
|
||||
|
||||
|
||||
def test_delete_metadata_2(test_vault) -> None:
|
||||
"""Test deleting a metadata key/value.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_metadata method is called with a key
|
||||
THEN the specified metadata key is deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_metadata("top_key2") == 1
|
||||
assert "top_key2" not in vault.metadata.dict
|
||||
|
||||
|
||||
def test_delete_metadata_3(test_vault) -> None:
|
||||
"""Test deleting a metadata key/value.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_metadata method is called with a key and/or value that does not exist
|
||||
THEN no changes are made
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_metadata("no key") == 0
|
||||
assert vault.delete_metadata("top_key1", "no_value") == 0
|
||||
|
||||
assert vault.delete_metadata("top_key1", "top_key1_value") == 2
|
||||
assert vault.metadata.dict["top_key1"] == []
|
||||
|
||||
assert vault.delete_metadata("top_key2") == 2
|
||||
assert "top_key2" not in vault.metadata.dict
|
||||
def test_export_csv_1(tmp_path, test_vault):
|
||||
"""Test exporting the vault to a CSV file.
|
||||
|
||||
|
||||
def test_export_csv(tmp_path, test_vault):
|
||||
"""Test exporting the vault to a CSV file."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
GIVEN a vault object
|
||||
WHEN the export_metadata method is called with a path and export_format of csv
|
||||
THEN the vault metadata is exported to a CSV file
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path(f"{tmp_path}/export.csv")
|
||||
|
||||
vault.export_metadata(path=export_file, format="csv")
|
||||
vault.export_metadata(path=export_file, export_format="csv")
|
||||
assert export_file.exists() is True
|
||||
assert "frontmatter,date_created,2022-12-22" in export_file.read_text()
|
||||
|
||||
|
||||
def test_export_csv_2(tmp_path, test_vault):
|
||||
"""Test exporting the vault to a CSV file.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_metadata method is called with a path that does not exist and export_format of csv
|
||||
THEN an error is raised
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path(f"{tmp_path}/does_not_exist/export.csv")
|
||||
|
||||
with pytest.raises(typer.Exit):
|
||||
vault.export_metadata(path=export_file, export_format="csv")
|
||||
assert export_file.exists() is False
|
||||
|
||||
|
||||
def test_export_json(tmp_path, test_vault):
|
||||
"""Test exporting the vault to a CSV file."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
"""Test exporting the vault to a JSON file.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_metadata method is called with a path and export_format of csv
|
||||
THEN the vault metadata is exported to a JSON file
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path(f"{tmp_path}/export.json")
|
||||
|
||||
vault.export_metadata(path=export_file, format="json")
|
||||
vault.export_metadata(path=export_file, export_format="json")
|
||||
assert export_file.exists() is True
|
||||
assert '"frontmatter": {' in export_file.read_text()
|
||||
|
||||
|
||||
def test_get_filtered_notes(sample_vault) -> None:
|
||||
"""Test filtering notes."""
|
||||
def test_export_notes_to_csv_1(tmp_path, test_vault):
|
||||
"""Test export_notes_to_csv() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_notes_to_csv method is called with a path
|
||||
THEN the notes are exported to a CSV file
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path(f"{tmp_path}/export.csv")
|
||||
vault.export_notes_to_csv(path=export_file)
|
||||
assert export_file.exists() is True
|
||||
assert "path,type,key,value" in export_file.read_text()
|
||||
assert "test1.md,frontmatter,shared_key1,shared_key1_value" in export_file.read_text()
|
||||
assert "test1.md,inline_metadata,shared_key1,shared_key1_value" in export_file.read_text()
|
||||
assert "test1.md,tag,,shared_tag" in export_file.read_text()
|
||||
assert "test1.md,frontmatter,tags,📅/frontmatter_tag3" in export_file.read_text()
|
||||
assert "test1.md,inline_metadata,key📅,📅_key_value" in export_file.read_text()
|
||||
|
||||
|
||||
def test_export_notes_to_csv_2(test_vault):
|
||||
"""Test export_notes_to_csv() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_notes_to_csv method is called with a path where the parent directory does not exist
|
||||
THEN an error is raised
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path("/I/do/not/exist/export.csv")
|
||||
with pytest.raises(typer.Exit):
|
||||
vault.export_notes_to_csv(path=export_file)
|
||||
|
||||
|
||||
def test_get_filtered_notes_1(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the get_filtered_notes method is called with a path filter
|
||||
THEN the notes in scope are filtered
|
||||
"""
|
||||
vault_path = sample_vault
|
||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
@@ -370,21 +484,66 @@ def test_get_filtered_notes(sample_vault) -> None:
|
||||
assert len(vault.all_notes) == 13
|
||||
assert len(vault.notes_in_scope) == 1
|
||||
|
||||
|
||||
def test_get_filtered_notes_2(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the get_filtered_notes method is called with a key filter
|
||||
THEN the notes in scope are filtered
|
||||
"""
|
||||
vault_path = sample_vault
|
||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
|
||||
filters = [VaultFilter(key_filter="on_one_note")]
|
||||
vault = Vault(config=vault_config, filters=filters)
|
||||
assert len(vault.all_notes) == 13
|
||||
assert len(vault.notes_in_scope) == 1
|
||||
|
||||
|
||||
def test_get_filtered_notes_3(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the get_filtered_notes method is called with a key and a value filter
|
||||
THEN the notes in scope are filtered
|
||||
"""
|
||||
vault_path = sample_vault
|
||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
filters = [VaultFilter(key_filter="type", value_filter="book")]
|
||||
vault = Vault(config=vault_config, filters=filters)
|
||||
assert len(vault.all_notes) == 13
|
||||
assert len(vault.notes_in_scope) == 10
|
||||
|
||||
|
||||
def test_get_filtered_notes_4(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the get_filtered_notes method is called with a tag filter
|
||||
THEN the notes in scope are filtered
|
||||
"""
|
||||
vault_path = sample_vault
|
||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
filters = [VaultFilter(tag_filter="brunch")]
|
||||
vault = Vault(config=vault_config, filters=filters)
|
||||
assert len(vault.all_notes) == 13
|
||||
assert len(vault.notes_in_scope) == 1
|
||||
|
||||
|
||||
def test_get_filtered_notes_5(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the get_filtered_notes method is called with a tag and a path filter
|
||||
THEN the notes in scope are filtered
|
||||
"""
|
||||
vault_path = sample_vault
|
||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
filters = [VaultFilter(tag_filter="brunch"), VaultFilter(path_filter="inbox")]
|
||||
vault = Vault(config=vault_config, filters=filters)
|
||||
assert len(vault.all_notes) == 13
|
||||
@@ -392,11 +551,13 @@ def test_get_filtered_notes(sample_vault) -> None:
|
||||
|
||||
|
||||
def test_info(test_vault, capsys):
|
||||
"""Test printing vault information."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
"""Test info() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the info method is called
|
||||
THEN the vault info is printed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
vault.info()
|
||||
|
||||
@@ -407,11 +568,13 @@ def test_info(test_vault, capsys):
|
||||
|
||||
|
||||
def test_list_editable_notes(test_vault, capsys) -> None:
|
||||
"""Test listing editable notes."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
"""Test list_editable_notes() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the list_editable_notes() method is called
|
||||
THEN the editable notes in scope are printed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
vault.list_editable_notes()
|
||||
captured = capsys.readouterr()
|
||||
@@ -419,17 +582,29 @@ def test_list_editable_notes(test_vault, capsys) -> None:
|
||||
assert captured.out == Regex(r"\d +test1\.md")
|
||||
|
||||
|
||||
def test_rename_inline_tag(test_vault) -> None:
|
||||
"""Test renaming an inline tag."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_move_inline_metadata_1(test_vault) -> None:
|
||||
"""Test move_inline_metadata() method.
|
||||
|
||||
assert vault.rename_inline_tag("no tag", "new_tag") == 0
|
||||
assert vault.rename_inline_tag("intext_tag2", "new_tag") == 2
|
||||
GIVEN a vault with inline metadata.
|
||||
WHEN the move_inline_metadata() method is called.
|
||||
THEN the inline metadata is moved to the top of the file.
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.move_inline_metadata(location=InsertLocation.TOP) == 1
|
||||
|
||||
|
||||
def test_rename_tag_1(test_vault) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_tag() method is called with a tag that is found
|
||||
THEN the inline tag is renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_tag("intext_tag2", "new_tag") == 1
|
||||
assert vault.metadata.tags == [
|
||||
"ignored_file_tag2",
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
"inline_tag_top1",
|
||||
@@ -440,33 +615,165 @@ def test_rename_inline_tag(test_vault) -> None:
|
||||
]
|
||||
|
||||
|
||||
def test_rename_metadata(test_vault) -> None:
|
||||
"""Test renaming a metadata key/value."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
def test_rename_tag_2(test_vault) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_tag() method is called with a tag that is not found
|
||||
THEN the inline tag is not renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_tag("no tag", "new_tag") == 0
|
||||
|
||||
|
||||
def test_rename_metadata_1(test_vault) -> None:
|
||||
"""Test rename_metadata() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_metadata() method is called with a key or key/value that is found
|
||||
THEN the metadata is not renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_metadata("no key", "new_key") == 0
|
||||
assert vault.rename_metadata("tags", "nonexistent_value", "new_vaule") == 0
|
||||
|
||||
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") == 2
|
||||
|
||||
def test_rename_metadata_2(test_vault) -> None:
|
||||
"""Test rename_metadata() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_metadata() method with a key and no value
|
||||
THEN the metadata key is renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_metadata("tags", "new_key") == 1
|
||||
assert "tags" not in vault.metadata.dict
|
||||
assert vault.metadata.dict["new_key"] == [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
]
|
||||
|
||||
|
||||
def test_rename_metadata_3(test_vault) -> None:
|
||||
"""Test rename_metadata() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_metadata() method is called with a key and value
|
||||
THEN the metadata key/value is renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") == 1
|
||||
assert vault.metadata.dict["tags"] == [
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"new_vaule",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
]
|
||||
|
||||
assert vault.rename_metadata("tags", "new_key") == 2
|
||||
assert "tags" not in vault.metadata.dict
|
||||
assert vault.metadata.dict["new_key"] == [
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"new_vaule",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
]
|
||||
|
||||
def test_transpose_metadata(test_vault) -> None:
|
||||
"""Test transpose_metadata() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the transpose_metadata() method is called
|
||||
THEN the metadata is transposed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER) == 1
|
||||
|
||||
assert vault.metadata.inline_metadata == {}
|
||||
assert vault.metadata.frontmatter == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
assert (
|
||||
vault.transpose_metadata(
|
||||
begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER, location=InsertLocation.TOP
|
||||
)
|
||||
== 0
|
||||
)
|
||||
|
||||
|
||||
def test_update_from_dict_1(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN no dictionary keys match paths in the vault
|
||||
THEN no notes are updated and 0 is returned
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
update_dict = {
|
||||
"path1": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
"path2": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
}
|
||||
|
||||
assert vault.update_from_dict(update_dict) == 0
|
||||
assert vault.get_changed_notes() == []
|
||||
|
||||
|
||||
def test_update_from_dict_2(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN the dictionary is empty
|
||||
THEN no notes are updated and 0 is returned
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
update_dict = {}
|
||||
|
||||
assert vault.update_from_dict(update_dict) == 0
|
||||
assert vault.get_changed_notes() == []
|
||||
|
||||
|
||||
def test_update_from_dict_3(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN a dictionary key matches a path in the vault
|
||||
THEN the note is updated to match the dictionary values
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
update_dict = {
|
||||
"test1.md": [
|
||||
{"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
{"type": "inline_metadata", "key": "new_key2", "value": "new_value"},
|
||||
{"type": "tag", "key": "", "value": "new_tag"},
|
||||
]
|
||||
}
|
||||
assert vault.update_from_dict(update_dict) == 1
|
||||
assert vault.get_changed_notes()[0].note_path.name == "test1.md"
|
||||
assert vault.get_changed_notes()[0].frontmatter.dict == {"new_key": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].inline_metadata.dict == {"new_key2": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].tags.list == ["new_tag"]
|
||||
assert vault.metadata.frontmatter == {"new_key": ["new_value"]}
|
||||
assert vault.metadata.inline_metadata == {"new_key2": ["new_value"]}
|
||||
assert vault.metadata.tags == ["new_tag"]
|
||||
|
||||
Reference in New Issue
Block a user