mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-16 08:53:48 -05:00
Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c75d18200e | ||
|
|
ffdac91537 | ||
|
|
e8f408ee33 | ||
|
|
1dd3ddfb22 | ||
|
|
8968127c95 | ||
|
|
4bf1acb775 | ||
|
|
98fa996462 | ||
|
|
fdb1b8b5bc | ||
|
|
08999cb055 | ||
|
|
4e053bda29 | ||
|
|
fa568de369 | ||
|
|
696e19f3e2 | ||
|
|
7b762f1a11 |
24
CHANGELOG.md
24
CHANGELOG.md
@@ -1,3 +1,27 @@
|
||||
## v0.11.0 (2023-03-24)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `--import-csv` option to cli
|
||||
|
||||
## v0.10.0 (2023-03-21)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `--export-template` cli option
|
||||
|
||||
### Fix
|
||||
|
||||
- `--export-template` correctly exports all notes
|
||||
- `--export-csv` exports csv not json
|
||||
- **csv-import**: fail if `type` does not validate
|
||||
|
||||
### Refactor
|
||||
|
||||
- pave the way for non-regex key/value deletions
|
||||
- remove unused code
|
||||
- cleanup rename and delete from dict functions
|
||||
|
||||
## v0.9.0 (2023-03-20)
|
||||
|
||||
### Feat
|
||||
|
||||
25
README.md
25
README.md
@@ -25,8 +25,10 @@ pip install obsidian-metadata
|
||||
|
||||
- `--config-file`: Specify a custom configuration file location
|
||||
- `--dry-run`: Make no destructive changes
|
||||
- `--import-csv` Import a CSV file with bulk updates
|
||||
- `--export-csv`: Specify a path and create a CSV export of all metadata
|
||||
- `--export-json`: Specify a path and create a JSON export of all metadata
|
||||
- `--export-template`: Specify a path and export all notes with their associated metadata to a CSV file for use as a bulk import template
|
||||
- `--help`: Shows interactive help and exits
|
||||
- `--log-file`: Specify a log file location
|
||||
- `--log-to-file`: Will log to a file
|
||||
@@ -64,7 +66,7 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- **List and clear filters**: List all current filters and clear one or all
|
||||
- **List notes in scope**: List notes that will be processed.
|
||||
|
||||
**Bulk Edit Metadata** from a CSV file (See the _making bulk edits_ section below)
|
||||
**Bulk Edit Metadata** from a CSV file (See the _[Make Bulk Updates](https://github.com/natelandau/obsidian-metadata#make-bulk-updates)_ section below)
|
||||
|
||||
**Add Metadata**: Add new metadata to your vault.
|
||||
|
||||
@@ -139,7 +141,7 @@ Below is an example with two vaults.
|
||||
|
||||
To bypass the configuration file and specify a vault to use at runtime use the `--vault-path` option.
|
||||
|
||||
### Making bulk edits
|
||||
### Make Bulk Updates
|
||||
|
||||
Bulk edits are supported by importing a CSV file containing the following columns. Column headers must be lowercase.
|
||||
|
||||
@@ -162,12 +164,17 @@ folder 1/note1.md,tag,,tag2
|
||||
|
||||
How bulk imports work:
|
||||
|
||||
- Only notes which match the path in the CSV file are updated
|
||||
- Effected notes will have ALL of their metadata changed to reflect the values in the CSV file
|
||||
- Existing metadata in an effected note will be rewritten. This may result in it's location and/or formatting within the note being changed
|
||||
- inline tags ignore any value added to the `key` column
|
||||
- **Only notes which match the path in the CSV file are updated**
|
||||
- **Effected notes will have ALL of their metadata changed** to reflect the values in the CSV file
|
||||
- **Existing metadata in a matching note will be rewritten**. This may result in it's location and/or formatting within the note being changed
|
||||
- Inline tags ignore any value added to the `key` column
|
||||
|
||||
You can export all your notes with their associated metadata in this format from the "Export Metadata" section of the script to be used as a template for your bulk changes.
|
||||
Create a CSV template for making bulk updates containing all your notes and their associated metadata by
|
||||
|
||||
1. Using the `--export-template` cli command; or
|
||||
2. Selecting the `Metadata by note` option within the `Export Metadata` section of the app
|
||||
|
||||
Once you have a template created you can import it using the `--import-csv` flag or by navigating to the `Import bulk changes from CSV` option.
|
||||
|
||||
# Contributing
|
||||
|
||||
@@ -200,7 +207,3 @@ There are two ways to contribute to this project.
|
||||
- Run `poetry add {package}` from within the development environment to install a run time dependency and add it to `pyproject.toml` and `poetry.lock`.
|
||||
- Run `poetry remove {package}` from within the development environment to uninstall a run time dependency and remove it from `pyproject.toml` and `poetry.lock`.
|
||||
- Run `poetry update` from within the development environment to upgrade all dependencies to the latest versions allowed by `pyproject.toml`.
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
238
poetry.lock
generated
238
poetry.lock
generated
@@ -1,10 +1,10 @@
|
||||
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "argcomplete"
|
||||
version = "2.0.6"
|
||||
description = "Bash tab completion for argparse"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -100,7 +100,7 @@ files = [
|
||||
name = "charset-normalizer"
|
||||
version = "2.1.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
files = [
|
||||
@@ -142,7 +142,7 @@ files = [
|
||||
name = "commitizen"
|
||||
version = "2.42.1"
|
||||
description = "Python commitizen client tool"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2,<4.0.0"
|
||||
files = [
|
||||
@@ -231,7 +231,7 @@ toml = ["tomli"]
|
||||
name = "decli"
|
||||
version = "0.5.2"
|
||||
description = "Minimal, easy-to-use, declarative cli tool"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -283,19 +283,19 @@ testing = ["pre-commit"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.10.0"
|
||||
version = "3.10.4"
|
||||
description = "A platform independent file lock."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"},
|
||||
{file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"},
|
||||
{file = "filelock-3.10.4-py3-none-any.whl", hash = "sha256:6d332dc5c896f18ba93a21d987155e97c434a96d3fe4042ca70d0b3b46e3b470"},
|
||||
{file = "filelock-3.10.4.tar.gz", hash = "sha256:9fc1734dbddcdcd4aaa02c160dd94db5272b92dfa859b44ec8df28e160b751f0"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
@@ -354,7 +354,7 @@ tests = ["pytest", "pytest-cov", "pytest-mock"]
|
||||
name = "jinja2"
|
||||
version = "3.1.2"
|
||||
description = "A very fast and expressive template engine."
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -416,7 +416,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
name = "markupsafe"
|
||||
version = "2.1.2"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -562,7 +562,7 @@ setuptools = "*"
|
||||
name = "packaging"
|
||||
version = "23.0"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -596,14 +596,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pdoc"
|
||||
version = "13.0.0"
|
||||
version = "13.0.1"
|
||||
description = "API Documentation for Python Projects"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "pdoc-13.0.0-py3-none-any.whl", hash = "sha256:f9088b1c10f3296f46a08796e05e307470af5f4253f71d536781f6c305baf912"},
|
||||
{file = "pdoc-13.0.0.tar.gz", hash = "sha256:aadbf6c757c6e65c4754d6c26c4eb6c1bf8c7a9fb893f1fbe5a7b879dde59e46"},
|
||||
{file = "pdoc-13.0.1-py3-none-any.whl", hash = "sha256:16a24914280ed318896ad798674e2b0d11832297fdea95632fa472e3d171e247"},
|
||||
{file = "pdoc-13.0.1.tar.gz", hash = "sha256:4d84056847728203b8789ca8a8d0c8003f25002b3caef3365f6f21a1e4228a1b"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -648,14 +648,14 @@ testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "poethepoet"
|
||||
version = "0.18.1"
|
||||
version = "0.19.0"
|
||||
description = "A task runner that works well with poetry."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "poethepoet-0.18.1-py3-none-any.whl", hash = "sha256:e85727bf6f4a10bf6c1a43026bdeb40df689bea3c4682d03cbe531cabc8f2ba6"},
|
||||
{file = "poethepoet-0.18.1.tar.gz", hash = "sha256:5f3566b14c2f5dccdfbc3bb26f0096006b38dc0b9c74bd4f8dd1eba7b0e29f6a"},
|
||||
{file = "poethepoet-0.19.0-py3-none-any.whl", hash = "sha256:87038be589077e4b407050a9da644d9cd9e4076ccfc8abc7f855cf6870d5c6c2"},
|
||||
{file = "poethepoet-0.19.0.tar.gz", hash = "sha256:897eb85ec15876d79befc7d19d4c80ce7c8b214d1bb0dcfec640abd81616bfed"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -839,7 +839,7 @@ testing = ["filelock"]
|
||||
name = "pyyaml"
|
||||
version = "6.0"
|
||||
description = "YAML parser and emitter for Python"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -905,100 +905,72 @@ docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphin
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2022.10.31"
|
||||
version = "2023.3.23"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"},
|
||||
{file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"},
|
||||
{file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1087,29 +1059,29 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.257"
|
||||
version = "0.0.259"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7280640690c1d0046b20e0eb924319a89d8e22925d7d232180ce31196e7478f8"},
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4582b73da61ab410ffda35b2987a6eacb33f18263e1c91810f0b9779ec4f41a9"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5acae9878f1136893e266348acdb9d30dfae23c296d3012043816432a5abdd51"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9f0912d045eee15e8e02e335c16d7a7f9fb6821aa5eb1628eeb5bbfa3d88908"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9542c34ee5298b31be6c6ba304f14b672dcf104846ee65adb2466d3e325870"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3464f1ad4cea6c4b9325da13ae306bd22bf15d226e18d19c52db191b1f4355ac"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a54bfd559e558ee0df2a2f3756423fe6a9de7307bc290d807c3cdf351cb4c24"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3438fd38446e1a0915316f4085405c9feca20fe00a4b614995ab7034dbfaa7ff"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358cc2b547bd6451dcf2427b22a9c29a2d9c34e66576c693a6381c5f2ed3011d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:783390f1e94a168c79d7004426dae3e4ae2999cc85f7d00fdd86c62262b71854"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:aaa3b5b6929c63a854b6bcea7a229453b455ab26337100b2905fae4523ca5667"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4ecd7a84db4816df2dcd0f11c5365a9a2cf4fa70a19b3ac161b7b0bfa592959d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3db8d77d5651a2c0d307102d717627a025d4488d406f54c2764b21cfbe11d822"},
|
||||
{file = "ruff-0.0.257-py3-none-win32.whl", hash = "sha256:d2c8755fa4f6c5e5ec032ad341ca3beeecd16786e12c3f26e6b0cc40418ae998"},
|
||||
{file = "ruff-0.0.257-py3-none-win_amd64.whl", hash = "sha256:3cec07d6fecb1ebbc45ea8eeb1047b929caa2f7dfb8dd4b0e1869ff789326da5"},
|
||||
{file = "ruff-0.0.257-py3-none-win_arm64.whl", hash = "sha256:352f1bdb9b433b3b389aee512ffb0b82226ae1e25b3d92e4eaf0e7be6b1b6f6a"},
|
||||
{file = "ruff-0.0.257.tar.gz", hash = "sha256:fedfd06a37ddc17449203c3e38fc83fb68de7f20b5daa0ee4e60d3599b38bab0"},
|
||||
{file = "ruff-0.0.259-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"},
|
||||
{file = "ruff-0.0.259-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9"},
|
||||
{file = "ruff-0.0.259-py3-none-win32.whl", hash = "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456"},
|
||||
{file = "ruff-0.0.259-py3-none-win_amd64.whl", hash = "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8"},
|
||||
{file = "ruff-0.0.259-py3-none-win_arm64.whl", hash = "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086"},
|
||||
{file = "ruff-0.0.259.tar.gz", hash = "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1172,7 +1144,7 @@ widechars = ["wcwidth"]
|
||||
name = "termcolor"
|
||||
version = "2.2.0"
|
||||
description = "ANSI color formatting for output in terminal"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -1221,14 +1193,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "typeguard"
|
||||
version = "3.0.1"
|
||||
version = "3.0.2"
|
||||
description = "Run-time type checker for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7.4"
|
||||
files = [
|
||||
{file = "typeguard-3.0.1-py3-none-any.whl", hash = "sha256:15628045c830abf68533247afd2cb04683b5ce6f4e30d5401a5ef6f5182280de"},
|
||||
{file = "typeguard-3.0.1.tar.gz", hash = "sha256:beb0e67c5dc76eea4a6d00a6606d444d899589908362960769d0c4a1d32bca70"},
|
||||
{file = "typeguard-3.0.2-py3-none-any.whl", hash = "sha256:bbe993854385284ab42fd5bd3bee6f6556577ce8b50696d6cb956d704f286c8e"},
|
||||
{file = "typeguard-3.0.2.tar.gz", hash = "sha256:fee5297fdb28f8e9efcb8142b5ee219e02375509cd77ea9d270b5af826358d5a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1275,7 +1247,7 @@ files = [
|
||||
name = "typing-extensions"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "main"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -1349,4 +1321,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "e9e2ff35a5ae15991d1d123dffa9f15fdf5afaf00624c26577412555d0464eaf"
|
||||
content-hash = "45fc32e73a5670e7a8060985528c690a0739d76293e92e82ba1376f58f038638"
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
name = "obsidian-metadata"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||
version = "0.9.0"
|
||||
version = "0.11.0"
|
||||
|
||||
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||
@@ -20,13 +20,12 @@
|
||||
loguru = "^0.6.0"
|
||||
python = "^3.10"
|
||||
questionary = "^1.10.0"
|
||||
regex = "^2022.10.31"
|
||||
regex = "^2023.3.23"
|
||||
rich = "^13.3.2"
|
||||
ruamel-yaml = "^0.17.21"
|
||||
shellingham = "^1.5.0.post1"
|
||||
tomlkit = "^0.11.6"
|
||||
typer = "^0.7.0"
|
||||
commitizen = "^2.42.1"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.2.2"
|
||||
@@ -41,14 +40,14 @@
|
||||
coverage = "^7.2.2"
|
||||
interrogate = "^1.5.0"
|
||||
mypy = "^1.1.1"
|
||||
pdoc = "^13.0.0"
|
||||
poethepoet = "^0.18.1"
|
||||
pdoc = "^13.0.1"
|
||||
poethepoet = "^0.19.0"
|
||||
pre-commit = "^3.2.0"
|
||||
ruff = "0.0.257"
|
||||
typeguard = "^3.0.1"
|
||||
ruff = "^0.0.259"
|
||||
sh = "2.0.3"
|
||||
typeguard = "^3.0.2"
|
||||
types-python-dateutil = "^2.8.19.10"
|
||||
vulture = "^2.7"
|
||||
sh = "2.0.3"
|
||||
|
||||
[tool.black]
|
||||
line-length = 100
|
||||
@@ -58,7 +57,7 @@
|
||||
changelog_incremental = true
|
||||
tag_format = "v$version"
|
||||
update_changelog_on_bump = true
|
||||
version = "0.9.0"
|
||||
version = "0.11.0"
|
||||
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
||||
|
||||
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||
|
||||
@@ -143,7 +143,7 @@ for group in groups:
|
||||
notice(
|
||||
f"Updating {p} from {packages[p]['current_version']} to {packages[p]['new_version']}"
|
||||
)
|
||||
sh.poetry("add", f"{p}@{packages[p]['new_version']}", "--group", group, _fg=True)
|
||||
sh.poetry("add", f"{p}@latest", "--group", group, _fg=True)
|
||||
|
||||
sh.poetry("update", _fg=True)
|
||||
success("All dependencies are up to date")
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
"""obsidian-metadata version."""
|
||||
__version__ = "0.9.0"
|
||||
__version__ = "0.11.0"
|
||||
|
||||
@@ -5,12 +5,14 @@ from obsidian_metadata._utils.alerts import LoggerManager
|
||||
from obsidian_metadata._utils.utilities import (
|
||||
clean_dictionary,
|
||||
clear_screen,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
docstring_parameter,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
validate_csv_bulk_imports,
|
||||
version_callback,
|
||||
)
|
||||
@@ -19,12 +21,14 @@ __all__ = [
|
||||
"alerts",
|
||||
"clean_dictionary",
|
||||
"clear_screen",
|
||||
"delete_from_dict",
|
||||
"dict_contains",
|
||||
"dict_keys_to_lower",
|
||||
"dict_values_to_lists_strings",
|
||||
"docstring_parameter",
|
||||
"LoggerManager",
|
||||
"merge_dictionaries",
|
||||
"rename_in_dict",
|
||||
"remove_markdown_sections",
|
||||
"validate_csv_bulk_imports",
|
||||
"version_callback",
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
"""Utility functions."""
|
||||
import copy
|
||||
import csv
|
||||
import re
|
||||
from os import name, system
|
||||
@@ -8,8 +9,6 @@ from typing import Any
|
||||
import typer
|
||||
|
||||
from obsidian_metadata.__version__ import __version__
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
|
||||
|
||||
@@ -22,24 +21,26 @@ def clean_dictionary(dictionary: dict[str, Any]) -> dict[str, Any]:
|
||||
Returns:
|
||||
dict: Cleaned dictionary
|
||||
"""
|
||||
new_dict = {key.strip(): value for key, value in dictionary.items()}
|
||||
new_dict = {key.strip("*[]#"): value for key, value in new_dict.items()}
|
||||
new_dict = copy.deepcopy(dictionary)
|
||||
new_dict = {key.strip("*[]# "): value for key, value in new_dict.items()}
|
||||
for key, value in new_dict.items():
|
||||
new_dict[key] = [s.strip("*[]#") for s in value if isinstance(value, list)]
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = [s.strip("*[]# ") for s in value if isinstance(value, list)]
|
||||
elif isinstance(value, str):
|
||||
new_dict[key] = value.strip("*[]# ")
|
||||
|
||||
return new_dict
|
||||
|
||||
|
||||
def clear_screen() -> None: # pragma: no cover
|
||||
"""Clear the screen."""
|
||||
# for windows
|
||||
_ = system("cls") if name == "nt" else system("clear")
|
||||
|
||||
|
||||
def dict_contains(
|
||||
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if a dictionary contains a key or if a specified key contains a value.
|
||||
"""Check if a dictionary contains a key or if a key contains a value.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to check
|
||||
@@ -48,7 +49,7 @@ def dict_contains(
|
||||
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: Whether the dictionary contains the key
|
||||
bool: Whether the dictionary contains the key or value
|
||||
"""
|
||||
if value is None:
|
||||
if is_regex:
|
||||
@@ -56,13 +57,11 @@ def dict_contains(
|
||||
return key in dictionary
|
||||
|
||||
if is_regex:
|
||||
found_keys = []
|
||||
for _key in dictionary:
|
||||
if re.search(key, str(_key)):
|
||||
found_keys.append(
|
||||
any(re.search(value, _v) for _v in dictionary[_key]),
|
||||
)
|
||||
return any(found_keys)
|
||||
if re.search(key, str(_key)) and any(re.search(value, _v) for _v in dictionary[_key]):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return key in dictionary and value in dictionary[key]
|
||||
|
||||
@@ -94,6 +93,7 @@ def dict_values_to_lists_strings(
|
||||
|
||||
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
new_dict = {}
|
||||
|
||||
if strip_null_values:
|
||||
@@ -101,7 +101,7 @@ def dict_values_to_lists_strings(
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
||||
elif isinstance(value, dict):
|
||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||
new_dict[key] = dict_values_to_lists_strings(value, strip_null_values=True) # type: ignore[assignment]
|
||||
elif value is None or value == "None" or not value:
|
||||
new_dict[key] = []
|
||||
else:
|
||||
@@ -111,15 +111,57 @@ def dict_values_to_lists_strings(
|
||||
|
||||
for key, value in dictionary.items():
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = sorted([str(item) for item in value])
|
||||
new_dict[key] = sorted([str(item) if item is not None else "" for item in value])
|
||||
elif isinstance(value, dict):
|
||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||
else:
|
||||
new_dict[key] = [str(value)]
|
||||
new_dict[key] = [str(value) if value is not None else ""]
|
||||
|
||||
return new_dict
|
||||
|
||||
|
||||
def delete_from_dict( # noqa: C901
|
||||
dictionary: dict, key: str, value: str = None, is_regex: bool = False
|
||||
) -> dict:
|
||||
"""Delete a key or a value from a dictionary.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to delete from
|
||||
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||
key (str): Key to delete
|
||||
value (str, optional): Value to delete. Defaults to None.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary without the key
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
|
||||
if value is None:
|
||||
if is_regex:
|
||||
return {k: v for k, v in dictionary.items() if not re.search(key, str(k))}
|
||||
|
||||
return {k: v for k, v in dictionary.items() if k != key}
|
||||
|
||||
if is_regex:
|
||||
keys_to_delete = []
|
||||
for _key in dictionary:
|
||||
if re.search(key, str(_key)):
|
||||
if isinstance(dictionary[_key], list):
|
||||
dictionary[_key] = [v for v in dictionary[_key] if not re.search(value, v)]
|
||||
elif isinstance(dictionary[_key], str) and re.search(value, dictionary[_key]):
|
||||
keys_to_delete.append(_key)
|
||||
|
||||
for key in keys_to_delete:
|
||||
dictionary.pop(key)
|
||||
|
||||
elif key in dictionary and isinstance(dictionary[key], list):
|
||||
dictionary[key] = [v for v in dictionary[key] if v != value]
|
||||
elif key in dictionary and dictionary[key] == value:
|
||||
dictionary.pop(key)
|
||||
|
||||
return dictionary
|
||||
|
||||
|
||||
def docstring_parameter(*sub: Any) -> Any:
|
||||
"""Replace variables within docstrings.
|
||||
|
||||
@@ -151,22 +193,49 @@ def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
Returns:
|
||||
dict: Merged dictionary.
|
||||
"""
|
||||
for k, v in dict2.items():
|
||||
if k in dict1:
|
||||
if isinstance(v, list):
|
||||
dict1[k].extend(v)
|
||||
d1 = copy.deepcopy(dict1)
|
||||
d2 = copy.deepcopy(dict2)
|
||||
|
||||
for _key in d1:
|
||||
if not isinstance(d1[_key], list):
|
||||
raise TypeError(f"Key {_key} is not a list.")
|
||||
for _key in d2:
|
||||
if not isinstance(d2[_key], list):
|
||||
raise TypeError(f"Key {_key} is not a list.")
|
||||
|
||||
for k, v in d2.items():
|
||||
if k in d1:
|
||||
d1[k].extend(v)
|
||||
d1[k] = sorted(set(d1[k]))
|
||||
else:
|
||||
dict1[k] = v
|
||||
d1[k] = sorted(set(v))
|
||||
|
||||
for k, v in dict1.items():
|
||||
if isinstance(v, list):
|
||||
dict1[k] = sorted(set(v))
|
||||
elif isinstance(v, dict): # pragma: no cover
|
||||
for kk, vv in v.items():
|
||||
if isinstance(vv, list):
|
||||
v[kk] = sorted(set(vv))
|
||||
return dict(sorted(d1.items()))
|
||||
|
||||
return dict(sorted(dict1.items()))
|
||||
|
||||
def rename_in_dict(
|
||||
dictionary: dict[str, list[str]], key: str, value_1: str, value_2: str = None
|
||||
) -> dict:
|
||||
"""Rename a key or a value in a dictionary who's values are lists of strings.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to rename in.
|
||||
key (str): Key to check.
|
||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
value_2 (str, Optional): New value.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with renamed key or value
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
|
||||
if value_2 is None:
|
||||
if key in dictionary and value_1 not in dictionary:
|
||||
dictionary[value_1] = dictionary.pop(key)
|
||||
elif key in dictionary and value_1 in dictionary[key]:
|
||||
dictionary[key] = sorted({value_2 if x == value_1 else x for x in dictionary[key]})
|
||||
|
||||
return dictionary
|
||||
|
||||
|
||||
def remove_markdown_sections(
|
||||
@@ -175,7 +244,7 @@ def remove_markdown_sections(
|
||||
strip_inlinecode: bool = False,
|
||||
strip_frontmatter: bool = False,
|
||||
) -> str:
|
||||
"""Strip markdown sections from text.
|
||||
"""Strip unwanted markdown sections from text. This is used to remove code blocks and frontmatter from the body of notes before tags and inline metadata are processed.
|
||||
|
||||
Args:
|
||||
text (str): Text to remove code blocks from
|
||||
@@ -190,7 +259,7 @@ def remove_markdown_sections(
|
||||
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
||||
|
||||
if strip_inlinecode:
|
||||
text = re.sub(r"`.*?`", "", text)
|
||||
text = re.sub(r"(?<!`{2})`[^`]+?`", "", text)
|
||||
|
||||
if strip_frontmatter:
|
||||
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
||||
@@ -224,6 +293,11 @@ def validate_csv_bulk_imports(csv_path: Path, note_paths: list) -> dict[str, lis
|
||||
raise typer.BadParameter("Missing 'value' column in CSV file")
|
||||
row_num += 1
|
||||
|
||||
if row_num > 0 and row["type"] not in ["tag", "frontmatter", "inline_metadata"]:
|
||||
raise typer.BadParameter(
|
||||
f"Invalid type '{row['type']}' in CSV file. Must be one of 'tag', 'frontmatter', 'inline_metadata'"
|
||||
)
|
||||
|
||||
if row["path"] not in csv_dict:
|
||||
csv_dict[row["path"]] = []
|
||||
|
||||
@@ -237,12 +311,9 @@ def validate_csv_bulk_imports(csv_path: Path, note_paths: list) -> dict[str, lis
|
||||
paths_to_remove = [x for x in csv_dict if x not in note_paths]
|
||||
|
||||
for _path in paths_to_remove:
|
||||
alerts.warning(f"'{_path}' does not exist in vault. Skipping...")
|
||||
del csv_dict[_path]
|
||||
|
||||
if len(csv_dict) == 0:
|
||||
log.error("No paths in the CSV file matched paths in the vault")
|
||||
raise typer.Exit(1)
|
||||
raise typer.BadParameter(
|
||||
f"'{_path}' in CSV does not exist in vault. Ensure all paths are relative to the vault root."
|
||||
)
|
||||
|
||||
return csv_dict
|
||||
|
||||
|
||||
@@ -34,14 +34,28 @@ def main(
|
||||
),
|
||||
export_csv: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all metadata to a specified CSV file and exits. (Will overwrite any existing file)",
|
||||
help="Exports all metadata to a specified CSV file and exits.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
export_json: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all metadata to a specified JSON file and exits. (Will overwrite any existing file)",
|
||||
help="Exports all metadata to a specified JSON file and exits.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
export_template: Path = typer.Option(
|
||||
None,
|
||||
help="Exports all notes and their metadata to a specified CSV file and exits. Use to create a template for batch updates.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
import_csv: Path = typer.Option(
|
||||
None,
|
||||
help="Import a CSV file with bulk updates to metadata.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
@@ -142,6 +156,14 @@ def main(
|
||||
path = Path(export_json).expanduser().resolve()
|
||||
application.noninteractive_export_csv(path)
|
||||
raise typer.Exit(code=0)
|
||||
if export_template is not None:
|
||||
path = Path(export_template).expanduser().resolve()
|
||||
application.noninteractive_export_template(path)
|
||||
raise typer.Exit(code=0)
|
||||
if import_csv is not None:
|
||||
path = Path(import_csv).expanduser().resolve()
|
||||
application.noninteractive_bulk_import(path)
|
||||
raise typer.Exit(code=0)
|
||||
|
||||
application.application_main()
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ class Application:
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Delete inline tag", "value": "delete_inline_tag"},
|
||||
{"name": "Delete inline tag", "value": "delete_tag"},
|
||||
{"name": "Delete key", "value": "delete_key"},
|
||||
{"name": "Delete value", "value": "delete_value"},
|
||||
questionary.Separator(),
|
||||
@@ -142,8 +142,8 @@ class Application:
|
||||
self.delete_key()
|
||||
case "delete_value":
|
||||
self.delete_value()
|
||||
case "delete_inline_tag":
|
||||
self.delete_inline_tag()
|
||||
case "delete_tag":
|
||||
self.delete_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
@@ -153,7 +153,7 @@ class Application:
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Rename inline tag", "value": "rename_inline_tag"},
|
||||
{"name": "Rename inline tag", "value": "rename_tag"},
|
||||
{"name": "Rename key", "value": "rename_key"},
|
||||
{"name": "Rename value", "value": "rename_value"},
|
||||
questionary.Separator(),
|
||||
@@ -166,8 +166,8 @@ class Application:
|
||||
self.rename_key()
|
||||
case "rename_value":
|
||||
self.rename_value()
|
||||
case "rename_inline_tag":
|
||||
self.rename_inline_tag()
|
||||
case "rename_tag":
|
||||
self.rename_tag()
|
||||
case _: # pragma: no cover
|
||||
return
|
||||
|
||||
@@ -213,7 +213,7 @@ class Application:
|
||||
self._load_vault()
|
||||
|
||||
case "apply_tag_filter":
|
||||
tag = self.questions.ask_existing_inline_tag()
|
||||
tag = self.questions.ask_existing_tag()
|
||||
if tag is None or not tag:
|
||||
return
|
||||
|
||||
@@ -482,11 +482,11 @@ class Application:
|
||||
|
||||
return True
|
||||
|
||||
def delete_inline_tag(self) -> None:
|
||||
def delete_tag(self) -> None:
|
||||
"""Delete an inline tag."""
|
||||
tag = self.questions.ask_existing_inline_tag(question="Which tag would you like to delete?")
|
||||
tag = self.questions.ask_existing_tag(question="Which tag would you like to delete?")
|
||||
|
||||
num_changed = self.vault.delete_inline_tag(tag)
|
||||
num_changed = self.vault.delete_tag(tag)
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
@@ -502,7 +502,9 @@ class Application:
|
||||
if key_to_delete is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.delete_metadata(key_to_delete)
|
||||
num_changed = self.vault.delete_metadata(
|
||||
key=key_to_delete, area=MetadataType.ALL, is_regex=True
|
||||
)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes found with a key matching: [reverse]{key_to_delete}[/]")
|
||||
return
|
||||
@@ -524,7 +526,9 @@ class Application:
|
||||
if value is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.delete_metadata(key, value)
|
||||
num_changed = self.vault.delete_metadata(
|
||||
key=key, value=value, area=MetadataType.ALL, is_regex=True
|
||||
)
|
||||
if num_changed == 0:
|
||||
alerts.warning(f"No notes found matching: {key}: {value}")
|
||||
return
|
||||
@@ -544,10 +548,45 @@ class Application:
|
||||
|
||||
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
||||
|
||||
def noninteractive_bulk_import(self, path: Path) -> None:
|
||||
"""Bulk update metadata from a CSV from the command line.
|
||||
|
||||
Args:
|
||||
path: Path to the CSV file containing the metadata to update.
|
||||
"""
|
||||
self._load_vault()
|
||||
note_paths = [
|
||||
str(n.note_path.relative_to(self.vault.vault_path)) for n in self.vault.all_notes
|
||||
]
|
||||
dict_from_csv = validate_csv_bulk_imports(path, note_paths)
|
||||
num_changed = self.vault.update_from_dict(dict_from_csv)
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"{num_changed} notes specified in '{path}'")
|
||||
alerts.info("Review changes and commit.")
|
||||
while True:
|
||||
self.vault.info()
|
||||
|
||||
match self.questions.ask_application_main():
|
||||
case "vault_actions":
|
||||
self.application_vault()
|
||||
case "inspect_metadata":
|
||||
self.application_inspect_metadata()
|
||||
case "review_changes":
|
||||
self.review_changes()
|
||||
case "commit_changes":
|
||||
self.commit_changes()
|
||||
case _:
|
||||
break
|
||||
|
||||
console.print("Done!")
|
||||
|
||||
def noninteractive_export_csv(self, path: Path) -> None:
|
||||
"""Export the vault metadata to CSV."""
|
||||
self._load_vault()
|
||||
self.vault.export_metadata(export_format="json", path=str(path))
|
||||
self.vault.export_metadata(export_format="csv", path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def noninteractive_export_json(self, path: Path) -> None:
|
||||
@@ -556,6 +595,16 @@ class Application:
|
||||
self.vault.export_metadata(export_format="json", path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def noninteractive_export_template(self, path: Path) -> None:
|
||||
"""Export the vault metadata to CSV."""
|
||||
self._load_vault()
|
||||
with console.status(
|
||||
"Preparing export... [dim](Can take a while for large vaults)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
self.vault.export_notes_to_csv(path=str(path))
|
||||
alerts.success(f"Exported metadata to {path}")
|
||||
|
||||
def rename_key(self) -> None:
|
||||
"""Rename a key in the vault."""
|
||||
original_key = self.questions.ask_existing_key(
|
||||
@@ -577,9 +626,9 @@ class Application:
|
||||
f"Renamed [reverse]{original_key}[/] to [reverse]{new_key}[/] in {num_changed} notes"
|
||||
)
|
||||
|
||||
def rename_inline_tag(self) -> None:
|
||||
def rename_tag(self) -> None:
|
||||
"""Rename an inline tag."""
|
||||
original_tag = self.questions.ask_existing_inline_tag(question="Which tag to rename?")
|
||||
original_tag = self.questions.ask_existing_tag(question="Which tag to rename?")
|
||||
if original_tag is None: # pragma: no cover
|
||||
return
|
||||
|
||||
@@ -587,7 +636,7 @@ class Application:
|
||||
if new_tag is None: # pragma: no cover
|
||||
return
|
||||
|
||||
num_changed = self.vault.rename_inline_tag(original_tag, new_tag)
|
||||
num_changed = self.vault.rename_tag(original_tag, new_tag)
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
@@ -10,10 +10,12 @@ from ruamel.yaml import YAML
|
||||
|
||||
from obsidian_metadata._utils import (
|
||||
clean_dictionary,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_values_to_lists_strings,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
)
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import Patterns # isort: ignore
|
||||
@@ -24,7 +26,14 @@ INLINE_TAG_KEY: str = "inline_tag"
|
||||
|
||||
|
||||
class VaultMetadata:
|
||||
"""Representation of all Metadata in the Vault."""
|
||||
"""Representation of all Metadata in the Vault.
|
||||
|
||||
Attributes:
|
||||
dict (dict): Dictionary of all frontmatter and inline metadata. Does not include tags.
|
||||
frontmatter (dict): Dictionary of all frontmatter metadata.
|
||||
inline_metadata (dict): Dictionary of all inline metadata.
|
||||
tags (list): List of all tags.
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.dict: dict[str, list[str]] = {}
|
||||
@@ -59,7 +68,7 @@ class VaultMetadata:
|
||||
self.tags.extend(metadata)
|
||||
self.tags = sorted({s.strip("#") for s in self.tags})
|
||||
|
||||
def contains( # noqa: PLR0911
|
||||
def contains(
|
||||
self, area: MetadataType, key: str = None, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if a key and/or a value exists in the metadata.
|
||||
@@ -82,13 +91,7 @@ class VaultMetadata:
|
||||
|
||||
match area:
|
||||
case MetadataType.ALL:
|
||||
if dict_contains(self.dict, key, value, is_regex):
|
||||
return True
|
||||
if key is None and value is not None:
|
||||
if is_regex:
|
||||
return any(re.search(value, tag) for tag in self.tags)
|
||||
return value in self.tags
|
||||
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
case MetadataType.FRONTMATTER:
|
||||
return dict_contains(self.frontmatter, key, value, is_regex)
|
||||
case MetadataType.INLINE:
|
||||
@@ -102,10 +105,8 @@ class VaultMetadata:
|
||||
return any(re.search(value, tag) for tag in self.tags)
|
||||
return value in self.tags
|
||||
|
||||
return False
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
"""Delete a key or a key's value from the metadata. Regex is supported to allow deleting more than one key or value.
|
||||
"""Delete a key or a value from the VaultMetadata dict object. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
key (str): Key to check.
|
||||
@@ -114,17 +115,12 @@ class VaultMetadata:
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = copy.deepcopy(self.dict)
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=True,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
@@ -138,28 +134,24 @@ class VaultMetadata:
|
||||
Args:
|
||||
area (MetadataType): Type of metadata to print
|
||||
"""
|
||||
dict_to_print: dict[str, list[str]] = None
|
||||
list_to_print: list[str] = None
|
||||
dict_to_print = None
|
||||
list_to_print = None
|
||||
match area:
|
||||
case MetadataType.INLINE:
|
||||
dict_to_print = self.inline_metadata.copy()
|
||||
dict_to_print = self.inline_metadata
|
||||
header = "All inline metadata"
|
||||
case MetadataType.FRONTMATTER:
|
||||
dict_to_print = self.frontmatter.copy()
|
||||
dict_to_print = self.frontmatter
|
||||
header = "All frontmatter"
|
||||
case MetadataType.TAGS:
|
||||
list_to_print = []
|
||||
for tag in self.tags:
|
||||
list_to_print.append(f"#{tag}")
|
||||
list_to_print = [f"#{x}" for x in self.tags]
|
||||
header = "All inline tags"
|
||||
case MetadataType.KEYS:
|
||||
list_to_print = sorted(self.dict.keys())
|
||||
header = "All Keys"
|
||||
case MetadataType.ALL:
|
||||
dict_to_print = self.dict.copy()
|
||||
list_to_print = []
|
||||
for tag in self.tags:
|
||||
list_to_print.append(f"#{tag}")
|
||||
dict_to_print = self.dict
|
||||
list_to_print = [f"#{x}" for x in self.tags]
|
||||
header = "All metadata"
|
||||
|
||||
if dict_to_print is not None:
|
||||
@@ -189,19 +181,14 @@ class VaultMetadata:
|
||||
key (str): Key to check.
|
||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
value_2 (str, Optional): New value.
|
||||
bypass_check (bool, optional): Bypass the check if the key exists. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -302,27 +289,23 @@ class Frontmatter:
|
||||
"""
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
||||
"""Delete a value or key in the frontmatter. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): Use regex to check. Defaults to False.
|
||||
key (str): If no value, key to delete. If value, key containing the value.
|
||||
value_to_delete (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = copy.deepcopy(self.dict)
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=is_regex,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
@@ -353,14 +336,10 @@ class Frontmatter:
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -481,27 +460,23 @@ class InlineMetadata:
|
||||
"""
|
||||
return dict_contains(self.dict, key, value, is_regex)
|
||||
|
||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
||||
"""Delete a value or key in the inline metadata. Regex is supported to allow deleting more than one key or value.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
||||
key (str): If no value, key to delete. If value, key containing the value.
|
||||
value_to_delete (str, optional): Value to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if a value was deleted
|
||||
"""
|
||||
new_dict = dict(self.dict)
|
||||
|
||||
if value_to_delete is None:
|
||||
for _k in list(new_dict):
|
||||
if re.search(key, _k):
|
||||
del new_dict[_k]
|
||||
else:
|
||||
for _k, _v in new_dict.items():
|
||||
if re.search(key, _k):
|
||||
new_values = [x for x in _v if not re.search(value_to_delete, x)]
|
||||
new_dict[_k] = sorted(new_values)
|
||||
new_dict = delete_from_dict(
|
||||
dictionary=self.dict,
|
||||
key=key,
|
||||
value=value_to_delete,
|
||||
is_regex=is_regex,
|
||||
)
|
||||
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
@@ -528,14 +503,10 @@ class InlineMetadata:
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if value_2 is None:
|
||||
if key in self.dict and value_1 not in self.dict:
|
||||
self.dict[value_1] = self.dict.pop(key)
|
||||
return True
|
||||
return False
|
||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
||||
|
||||
if key in self.dict and value_1 in self.dict[key]:
|
||||
self.dict[key] = sorted({value_2 if x == value_1 else x for x in self.dict[key]})
|
||||
if new_dict != self.dict:
|
||||
self.dict = dict(new_dict)
|
||||
return True
|
||||
|
||||
return False
|
||||
@@ -580,32 +551,34 @@ class InlineTags:
|
||||
"""Add a new inline tag.
|
||||
|
||||
Args:
|
||||
new_tag (str): Tag to add.
|
||||
new_tag (str, list[str]): Tag to add.
|
||||
|
||||
Returns:
|
||||
bool: True if a tag was added.
|
||||
"""
|
||||
added_tag = False
|
||||
if isinstance(new_tag, list):
|
||||
for _tag in new_tag:
|
||||
if _tag.startswith("#"):
|
||||
_tag = _tag[1:]
|
||||
if _tag in self.list:
|
||||
return False
|
||||
new_list = self.list.copy()
|
||||
new_list.append(_tag)
|
||||
self.list = sorted(new_list)
|
||||
return True
|
||||
else:
|
||||
if new_tag.startswith("#"):
|
||||
new_tag = new_tag[1:]
|
||||
if new_tag in self.list:
|
||||
return False
|
||||
new_list = self.list.copy()
|
||||
new_list.append(new_tag)
|
||||
self.list = sorted(new_list)
|
||||
return True
|
||||
continue
|
||||
self.list.append(_tag)
|
||||
added_tag = True
|
||||
|
||||
return False
|
||||
if added_tag:
|
||||
self.list = sorted(self.list)
|
||||
return True
|
||||
return False
|
||||
|
||||
if new_tag.startswith("#"):
|
||||
new_tag = new_tag[1:]
|
||||
if new_tag in self.list:
|
||||
return False
|
||||
new_list = self.list.copy()
|
||||
new_list.append(new_tag)
|
||||
self.list = sorted(new_list)
|
||||
return True
|
||||
|
||||
def contains(self, tag: str, is_regex: bool = False) -> bool:
|
||||
"""Check if a tag exists in the metadata.
|
||||
@@ -653,13 +626,13 @@ class InlineTags:
|
||||
"""Replace an inline tag with another string.
|
||||
|
||||
Args:
|
||||
old_tag (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
||||
new_tag (str, Optional): New value.
|
||||
old_tag (str): `With value_2` this is the value to rename.
|
||||
new_tag (str): New value
|
||||
|
||||
Returns:
|
||||
bool: True if a value was renamed
|
||||
"""
|
||||
if old_tag in self.list:
|
||||
self.list = sorted([new_tag if i == old_tag else i for i in self.list])
|
||||
if old_tag in self.list and new_tag is not None and new_tag:
|
||||
self.list = sorted({new_tag if i == old_tag else i for i in self.list})
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -37,8 +37,9 @@ class Note:
|
||||
dry_run (bool): Whether to run in dry-run mode.
|
||||
file_content (str): Total contents of the note file (frontmatter and content).
|
||||
frontmatter (dict): Frontmatter of the note.
|
||||
inline_tags (list): List of inline tags in the note.
|
||||
tags (list): List of inline tags in the note.
|
||||
inline_metadata (dict): Dictionary of inline metadata in the note.
|
||||
original_file_content (str): Original contents of the note file (frontmatter and content)
|
||||
"""
|
||||
|
||||
def __init__(self, note_path: Path, dry_run: bool = False) -> None:
|
||||
@@ -59,7 +60,7 @@ class Note:
|
||||
alerts.error(f"Note {self.note_path} has invalid frontmatter.\n{e}")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
self.inline_tags: InlineTags = InlineTags(self.file_content)
|
||||
self.tags: InlineTags = InlineTags(self.file_content)
|
||||
self.inline_metadata: InlineMetadata = InlineMetadata(self.file_content)
|
||||
self.original_file_content: str = self.file_content
|
||||
|
||||
@@ -68,7 +69,7 @@ class Note:
|
||||
yield "note_path", self.note_path
|
||||
yield "dry_run", self.dry_run
|
||||
yield "frontmatter", self.frontmatter
|
||||
yield "inline_tags", self.inline_tags
|
||||
yield "tags", self.tags
|
||||
yield "inline_metadata", self.inline_metadata
|
||||
|
||||
def add_metadata( # noqa: C901
|
||||
@@ -114,8 +115,8 @@ class Note:
|
||||
case MetadataType.TAGS:
|
||||
new_values = []
|
||||
if isinstance(value, list):
|
||||
new_values = [_v for _v in value if self.inline_tags.add(_v)]
|
||||
elif self.inline_tags.add(value):
|
||||
new_values = [_v for _v in value if self.tags.add(_v)]
|
||||
elif self.tags.add(value):
|
||||
new_values = [value]
|
||||
|
||||
if new_values:
|
||||
@@ -153,7 +154,7 @@ class Note:
|
||||
alerts.error(f"Note {p} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||
def contains_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||
"""Check if a note contains the specified inline tag.
|
||||
|
||||
Args:
|
||||
@@ -163,7 +164,7 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the note has inline tags.
|
||||
"""
|
||||
return self.inline_tags.contains(tag, is_regex=is_regex)
|
||||
return self.tags.contains(tag, is_regex=is_regex)
|
||||
|
||||
def contains_metadata(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||
"""Check if a note has a key or a key-value pair in its Frontmatter or InlineMetadata.
|
||||
@@ -195,14 +196,14 @@ class Note:
|
||||
for key in self.inline_metadata.dict:
|
||||
self.delete_metadata(key=key, area=MetadataType.INLINE)
|
||||
|
||||
for tag in self.inline_tags.list:
|
||||
self.delete_inline_tag(tag=tag)
|
||||
for tag in self.tags.list:
|
||||
self.delete_tag(tag=tag)
|
||||
|
||||
self.frontmatter.delete_all()
|
||||
self.write_frontmatter()
|
||||
|
||||
def delete_inline_tag(self, tag: str) -> bool:
|
||||
"""Delete an inline tag from the `inline_tags` attribute AND removes the tag from the text of the note if it exists.
|
||||
def delete_tag(self, tag: str) -> bool:
|
||||
"""Delete an inline tag from the `tags` attribute AND removes the tag from the text of the note if it exists.
|
||||
|
||||
Args:
|
||||
tag (str): Tag to delete.
|
||||
@@ -210,30 +211,35 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the tag was deleted.
|
||||
"""
|
||||
new_list = self.inline_tags.list.copy()
|
||||
new_list = self.tags.list.copy()
|
||||
|
||||
for _t in new_list:
|
||||
if re.search(tag, _t):
|
||||
_t = re.escape(_t)
|
||||
self.sub(rf"#{_t}([ \|,;:\*\(\)\[\]\\\.\n#&])", r"\1", is_regex=True)
|
||||
self.inline_tags.delete(tag)
|
||||
self.tags.delete(tag)
|
||||
|
||||
if new_list != self.inline_tags.list:
|
||||
if new_list != self.tags.list:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def delete_metadata(
|
||||
self, key: str, value: str = None, area: MetadataType = MetadataType.ALL
|
||||
self,
|
||||
key: str,
|
||||
value: str = None,
|
||||
area: MetadataType = MetadataType.ALL,
|
||||
is_regex: bool = False,
|
||||
) -> bool:
|
||||
"""Delete a key or key-value pair from the note's Metadata object and the content of the note. Regex is supported.
|
||||
|
||||
If no value is provided, will delete an entire specified key.
|
||||
|
||||
Args:
|
||||
area (MetadataType, optional): Area to delete metadata from. Defaults to MetadataType.ALL.
|
||||
is_regex (bool, optional): Whether to use regex to match the key/value.
|
||||
key (str): Key to delete.
|
||||
value (str, optional): Value to delete.
|
||||
area (MetadataType, optional): Area to delete metadata from. Defaults to MetadataType.ALL.
|
||||
|
||||
Returns:
|
||||
bool: Whether the key or key-value pair was deleted.
|
||||
@@ -242,15 +248,15 @@ class Note:
|
||||
|
||||
if (
|
||||
area == MetadataType.FRONTMATTER or area == MetadataType.ALL
|
||||
) and self.frontmatter.delete(key, value):
|
||||
) and self.frontmatter.delete(key=key, value_to_delete=value, is_regex=is_regex):
|
||||
self.write_frontmatter()
|
||||
changed_value = True
|
||||
|
||||
if (
|
||||
area == MetadataType.INLINE or area == MetadataType.ALL
|
||||
) and self.inline_metadata.contains(key, value):
|
||||
self.write_delete_inline_metadata(key, value)
|
||||
self.inline_metadata.delete(key, value)
|
||||
self.write_delete_inline_metadata(key=key, value=value, is_regex=is_regex)
|
||||
self.inline_metadata.delete(key=key, value_to_delete=value, is_regex=is_regex)
|
||||
changed_value = True
|
||||
|
||||
if changed_value:
|
||||
@@ -266,7 +272,7 @@ class Note:
|
||||
if self.frontmatter.has_changes():
|
||||
return True
|
||||
|
||||
if self.inline_tags.has_changes():
|
||||
if self.tags.has_changes():
|
||||
return True
|
||||
|
||||
if self.inline_metadata.has_changes():
|
||||
@@ -298,7 +304,7 @@ class Note:
|
||||
"""Print the note to the console."""
|
||||
console.print(self.file_content)
|
||||
|
||||
def rename_inline_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||
def rename_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||
"""Rename an inline tag. Updates the Metadata object and the text of the note.
|
||||
|
||||
Args:
|
||||
@@ -308,13 +314,13 @@ class Note:
|
||||
Returns:
|
||||
bool: Whether the tag was renamed.
|
||||
"""
|
||||
if tag_1 in self.inline_tags.list:
|
||||
if tag_1 in self.tags.list:
|
||||
self.sub(
|
||||
rf"#{tag_1}([ \|,;:\*\(\)\[\]\\\.\n#&])",
|
||||
rf"#{tag_2}\1",
|
||||
is_regex=True,
|
||||
)
|
||||
self.inline_tags.rename(tag_1, tag_2)
|
||||
self.tags.rename(tag_1, tag_2)
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -447,12 +453,15 @@ class Note:
|
||||
|
||||
return False
|
||||
|
||||
def write_delete_inline_metadata(self, key: str = None, value: str = None) -> bool:
|
||||
def write_delete_inline_metadata(
|
||||
self, key: str = None, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""For a given inline metadata key and/or key-value pair, delete it from the text of the note. If no key is provided, will delete all inline metadata from the text of the note.
|
||||
|
||||
IMPORTANT: This method makes no changes to the InlineMetadata object.
|
||||
|
||||
Args:
|
||||
is_regex (bool, optional): Whether the key is a regex pattern or plain text. Defaults to False.
|
||||
key (str, optional): Key to delete.
|
||||
value (str, optional): Value to delete.
|
||||
|
||||
@@ -469,13 +478,15 @@ class Note:
|
||||
return True
|
||||
|
||||
for _k, _v in self.inline_metadata.dict.items():
|
||||
if re.search(key, _k):
|
||||
if (is_regex and re.search(key, _k)) or (not is_regex and key == _k):
|
||||
for _value in _v:
|
||||
if value is None:
|
||||
_k = re.escape(_k)
|
||||
_value = re.escape(_value)
|
||||
self.sub(rf"\[?{_k}:: \[?\[?{_value}\]?\]?", "", is_regex=True)
|
||||
elif re.search(value, _value):
|
||||
elif (is_regex and re.search(value, _value)) or (
|
||||
not is_regex and value == _value
|
||||
):
|
||||
_k = re.escape(_k)
|
||||
_value = re.escape(_value)
|
||||
self.sub(rf"\[?({_k}::) ?\[?\[?{_value}\]?\]?", r"\1", is_regex=True)
|
||||
|
||||
@@ -86,7 +86,7 @@ class Questions:
|
||||
self.vault = vault
|
||||
self.key = key
|
||||
|
||||
def _validate_existing_inline_tag(self, text: str) -> bool | str:
|
||||
def _validate_existing_tag(self, text: str) -> bool | str:
|
||||
"""Validate an existing inline tag.
|
||||
|
||||
Returns:
|
||||
@@ -297,7 +297,7 @@ class Questions:
|
||||
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
||||
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Bulk changes from imported CSV", "value": "import_from_csv"},
|
||||
{"name": "Import bulk changes from CSV", "value": "import_from_csv"},
|
||||
{"name": "Add Metadata", "value": "add_metadata"},
|
||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||
@@ -344,11 +344,11 @@ class Questions:
|
||||
question, default=default, style=self.style, qmark="INPUT |"
|
||||
).ask()
|
||||
|
||||
def ask_existing_inline_tag(self, question: str = "Enter a tag") -> str: # pragma: no cover
|
||||
def ask_existing_tag(self, question: str = "Enter a tag") -> str: # pragma: no cover
|
||||
"""Ask the user for an existing inline tag."""
|
||||
return questionary.text(
|
||||
question,
|
||||
validate=self._validate_existing_inline_tag,
|
||||
validate=self._validate_existing_tag,
|
||||
style=self.style,
|
||||
qmark="INPUT |",
|
||||
).ask()
|
||||
|
||||
@@ -11,7 +11,6 @@ from typing import Any
|
||||
import rich.repr
|
||||
import typer
|
||||
from rich import box
|
||||
from rich.progress import Progress, SpinnerColumn, TextColumn
|
||||
from rich.prompt import Confirm
|
||||
from rich.table import Table
|
||||
|
||||
@@ -64,12 +63,10 @@ class Vault:
|
||||
self.filters = filters
|
||||
self.all_note_paths = self._find_markdown_notes()
|
||||
|
||||
with Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
transient=True,
|
||||
) as progress:
|
||||
progress.add_task(description="Processing notes...", total=None)
|
||||
with console.status(
|
||||
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
self.all_notes: list[Note] = [
|
||||
Note(note_path=p, dry_run=self.dry_run) for p in self.all_note_paths
|
||||
]
|
||||
@@ -107,7 +104,7 @@ class Vault:
|
||||
]
|
||||
|
||||
if _filter.tag_filter is not None:
|
||||
notes_list = [n for n in notes_list if n.contains_inline_tag(_filter.tag_filter)]
|
||||
notes_list = [n for n in notes_list if n.contains_tag(_filter.tag_filter)]
|
||||
|
||||
if _filter.key_filter is not None and _filter.value_filter is not None:
|
||||
notes_list = [
|
||||
@@ -172,12 +169,10 @@ class Vault:
|
||||
def _rebuild_vault_metadata(self) -> None:
|
||||
"""Rebuild vault metadata."""
|
||||
self.metadata = VaultMetadata()
|
||||
with Progress(
|
||||
SpinnerColumn(),
|
||||
TextColumn("[progress.description]{task.description}"),
|
||||
transient=True,
|
||||
) as progress:
|
||||
progress.add_task(description="Processing notes...", total=None)
|
||||
with console.status(
|
||||
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
||||
spinner="bouncingBall",
|
||||
):
|
||||
for _note in self.notes_in_scope:
|
||||
self.metadata.index_metadata(
|
||||
area=MetadataType.FRONTMATTER, metadata=_note.frontmatter.dict
|
||||
@@ -187,7 +182,7 @@ class Vault:
|
||||
)
|
||||
self.metadata.index_metadata(
|
||||
area=MetadataType.TAGS,
|
||||
metadata=_note.inline_tags.list,
|
||||
metadata=_note.tags.list,
|
||||
)
|
||||
|
||||
def add_metadata(
|
||||
@@ -273,7 +268,7 @@ class Vault:
|
||||
else:
|
||||
alerts.info("No backup found")
|
||||
|
||||
def delete_inline_tag(self, tag: str) -> int:
|
||||
def delete_tag(self, tag: str) -> int:
|
||||
"""Delete an inline tag in the vault.
|
||||
|
||||
Args:
|
||||
@@ -285,7 +280,7 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.delete_inline_tag(tag):
|
||||
if _note.delete_tag(tag):
|
||||
log.trace(f"Deleted tag from {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
@@ -294,10 +289,18 @@ class Vault:
|
||||
|
||||
return num_changed
|
||||
|
||||
def delete_metadata(self, key: str, value: str = None) -> int:
|
||||
def delete_metadata(
|
||||
self,
|
||||
key: str,
|
||||
value: str = None,
|
||||
area: MetadataType = MetadataType.ALL,
|
||||
is_regex: bool = False,
|
||||
) -> int:
|
||||
"""Delete metadata in the vault.
|
||||
|
||||
Args:
|
||||
area (MetadataType): Area of metadata to delete from.
|
||||
is_regex (bool): Whether to use regex for key and value. Defaults to False.
|
||||
key (str): Key to delete. Regex is supported
|
||||
value (str, optional): Value to delete. Regex is supported
|
||||
|
||||
@@ -307,7 +310,7 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.delete_metadata(key, value):
|
||||
if _note.delete_metadata(key=key, value=value, area=area, is_regex=is_regex):
|
||||
log.trace(f"Deleted metadata from {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
@@ -394,7 +397,7 @@ class Vault:
|
||||
]
|
||||
)
|
||||
|
||||
for tag in _note.inline_tags.list:
|
||||
for tag in _note.tags.list:
|
||||
writer.writerow(
|
||||
[_note.note_path.relative_to(self.vault_path), "tag", "", f"{tag}"]
|
||||
)
|
||||
@@ -463,7 +466,7 @@ class Vault:
|
||||
"""Count number of excluded notes."""
|
||||
return len(self.all_notes) - len(self.notes_in_scope)
|
||||
|
||||
def rename_inline_tag(self, old_tag: str, new_tag: str) -> int:
|
||||
def rename_tag(self, old_tag: str, new_tag: str) -> int:
|
||||
"""Rename an inline tag in the vault.
|
||||
|
||||
Args:
|
||||
@@ -476,7 +479,7 @@ class Vault:
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.notes_in_scope:
|
||||
if _note.rename_inline_tag(old_tag, new_tag):
|
||||
if _note.rename_tag(old_tag, new_tag):
|
||||
log.trace(f"Renamed inline tag in {_note.note_path}")
|
||||
num_changed += 1
|
||||
|
||||
@@ -572,7 +575,7 @@ class Vault:
|
||||
for _note in self.all_notes:
|
||||
path = _note.note_path.relative_to(self.vault_path)
|
||||
if str(path) in dictionary:
|
||||
log.info(f"Updating metadata for '{path}'")
|
||||
log.debug(f"Bulk update metadata for '{path}'")
|
||||
num_changed += 1
|
||||
_note.delete_all_metadata()
|
||||
for row in dictionary[str(path)]:
|
||||
@@ -589,7 +592,7 @@ class Vault:
|
||||
location=self.insert_location,
|
||||
)
|
||||
|
||||
if row["type"].lower() == "tag" or row["type"].lower() == "tags":
|
||||
if row["type"].lower() == "tag":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.TAGS,
|
||||
value=row["value"],
|
||||
|
||||
@@ -144,7 +144,7 @@ def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_inline_tag_1(test_application, mocker, capsys) -> None:
|
||||
def test_delete_tag_1(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag.
|
||||
|
||||
GIVEN an application
|
||||
@@ -159,10 +159,10 @@ def test_delete_inline_tag_1(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["delete_inline_tag", "back"],
|
||||
side_effect=["delete_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="breakfast",
|
||||
)
|
||||
|
||||
@@ -172,7 +172,7 @@ def test_delete_inline_tag_1(test_application, mocker, capsys) -> None:
|
||||
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_inline_tag_2(test_application, mocker, capsys) -> None:
|
||||
def test_delete_tag_2(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag.
|
||||
|
||||
GIVEN an application
|
||||
@@ -187,10 +187,10 @@ def test_delete_inline_tag_2(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["delete_inline_tag", "back"],
|
||||
side_effect=["delete_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="not_a_tag_in_vault",
|
||||
)
|
||||
|
||||
@@ -388,7 +388,7 @@ def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
||||
assert captured == Regex(r"type +│ article", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
def test_rename_tag(test_application, mocker, capsys) -> None:
|
||||
"""Test renaming an inline tag."""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
@@ -398,10 +398,10 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["rename_inline_tag", "back"],
|
||||
side_effect=["rename_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="not_a_tag",
|
||||
)
|
||||
mocker.patch(
|
||||
@@ -420,10 +420,10 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["rename_inline_tag", "back"],
|
||||
side_effect=["rename_tag", "back"],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||
"obsidian_metadata.models.application.Questions.ask_existing_tag",
|
||||
return_value="breakfast",
|
||||
)
|
||||
mocker.patch(
|
||||
|
||||
@@ -17,7 +17,7 @@ def test_version() -> None:
|
||||
"""Test printing version and then exiting."""
|
||||
result = runner.invoke(app, ["--version"])
|
||||
assert result.exit_code == 0
|
||||
assert result.output == Regex(r"obsidian_metadata: v\d+\.\d+\.\d+$")
|
||||
assert "obsidian_metadata: v" in result.output
|
||||
|
||||
|
||||
def test_application(tmp_path) -> None:
|
||||
@@ -51,3 +51,25 @@ def test_application(tmp_path) -> None:
|
||||
|
||||
assert banner in result.output
|
||||
assert result.exit_code == 1
|
||||
|
||||
|
||||
def test_export_template(tmp_path) -> None:
|
||||
"""Test the export template command."""
|
||||
source_dir = Path(__file__).parent / "fixtures" / "test_vault"
|
||||
dest_dir = Path(tmp_path / "vault")
|
||||
|
||||
if not source_dir.exists():
|
||||
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||
|
||||
shutil.copytree(source_dir, dest_dir)
|
||||
|
||||
config_path = tmp_path / "config.toml"
|
||||
export_path = tmp_path / "export_template.csv"
|
||||
result = runner.invoke(
|
||||
app,
|
||||
["--vault-path", dest_dir, "--config-file", config_path, "--export-template", export_path],
|
||||
)
|
||||
|
||||
assert "SUCCESS | Exported metadata to" in result.output
|
||||
assert result.exit_code == 0
|
||||
assert export_path.exists()
|
||||
|
||||
@@ -47,7 +47,7 @@ repeated_key:: repeated_key_value2
|
||||
"""
|
||||
|
||||
|
||||
def test_frontmatter_create_1() -> None:
|
||||
def test_create_1() -> None:
|
||||
"""Test frontmatter creation.
|
||||
|
||||
GIVEN valid frontmatter content
|
||||
@@ -72,7 +72,7 @@ def test_frontmatter_create_1() -> None:
|
||||
}
|
||||
|
||||
|
||||
def test_frontmatter_create_2() -> None:
|
||||
def test_create_2() -> None:
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN invalid frontmatter content
|
||||
@@ -88,7 +88,7 @@ invalid = = "content"
|
||||
Frontmatter(fn)
|
||||
|
||||
|
||||
def test_frontmatter_create_3():
|
||||
def test_create_3():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content
|
||||
@@ -100,7 +100,7 @@ def test_frontmatter_create_3():
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_create_4():
|
||||
def test_create_4():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content with a yaml marker
|
||||
@@ -112,7 +112,7 @@ def test_frontmatter_create_4():
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_add_1():
|
||||
def test_add_1():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -124,7 +124,7 @@ def test_frontmatter_add_1():
|
||||
assert frontmatter.add("frontmatter_Key1") is False
|
||||
|
||||
|
||||
def test_frontmatter_add_2():
|
||||
def test_add_2():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -135,7 +135,7 @@ def test_frontmatter_add_2():
|
||||
assert frontmatter.add("frontmatter_Key1", "frontmatter_Key1_value") is False
|
||||
|
||||
|
||||
def test_frontmatter_add_3():
|
||||
def test_add_3():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -147,7 +147,7 @@ def test_frontmatter_add_3():
|
||||
assert "added_key" in frontmatter.dict
|
||||
|
||||
|
||||
def test_frontmatter_add_4():
|
||||
def test_add_4():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -159,7 +159,7 @@ def test_frontmatter_add_4():
|
||||
assert frontmatter.dict["added_key"] == ["added_value"]
|
||||
|
||||
|
||||
def test_frontmatter_add_5():
|
||||
def test_add_5():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -171,7 +171,7 @@ def test_frontmatter_add_5():
|
||||
assert frontmatter.dict["frontmatter_Key1"] == ["frontmatter_Key1_value", "new_value"]
|
||||
|
||||
|
||||
def test_frontmatter_add_6():
|
||||
def test_add_6():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -187,7 +187,7 @@ def test_frontmatter_add_6():
|
||||
]
|
||||
|
||||
|
||||
def test_frontmatter_add_7():
|
||||
def test_add_7():
|
||||
"""Test frontmatter add() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -206,7 +206,7 @@ def test_frontmatter_add_7():
|
||||
]
|
||||
|
||||
|
||||
def test_frontmatter_contains_1():
|
||||
def test_contains_1():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -217,7 +217,7 @@ def test_frontmatter_contains_1():
|
||||
assert frontmatter.contains("frontmatter_Key1") is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_2():
|
||||
def test_contains_2():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -228,7 +228,7 @@ def test_frontmatter_contains_2():
|
||||
assert frontmatter.contains("no_key") is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_3():
|
||||
def test_contains_3():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -239,7 +239,7 @@ def test_frontmatter_contains_3():
|
||||
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_4():
|
||||
def test_contains_4():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -250,7 +250,7 @@ def test_frontmatter_contains_4():
|
||||
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_5():
|
||||
def test_contains_5():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -261,7 +261,7 @@ def test_frontmatter_contains_5():
|
||||
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_6():
|
||||
def test_contains_6():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -272,7 +272,7 @@ def test_frontmatter_contains_6():
|
||||
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_7():
|
||||
def test_contains_7():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -283,7 +283,7 @@ def test_frontmatter_contains_7():
|
||||
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_8():
|
||||
def test_contains_8():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -294,7 +294,7 @@ def test_frontmatter_contains_8():
|
||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_delete_1():
|
||||
def test_delete_1():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -305,7 +305,7 @@ def test_frontmatter_delete_1():
|
||||
assert frontmatter.delete("no key") is False
|
||||
|
||||
|
||||
def test_frontmatter_delete_2():
|
||||
def test_delete_2():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -316,7 +316,7 @@ def test_frontmatter_delete_2():
|
||||
assert frontmatter.delete("tags", "no value") is False
|
||||
|
||||
|
||||
def test_frontmatter_delete_3():
|
||||
def test_delete_3():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -324,10 +324,10 @@ def test_frontmatter_delete_3():
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete(r"\d{3}") is False
|
||||
assert frontmatter.delete(r"\d{3}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_delete_4():
|
||||
def test_delete_4():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -335,10 +335,10 @@ def test_frontmatter_delete_4():
|
||||
THEN return False
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", r"\d{5}") is False
|
||||
assert frontmatter.delete("tags", r"\d{5}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_delete_5():
|
||||
def test_delete_5():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -351,7 +351,7 @@ def test_frontmatter_delete_5():
|
||||
assert "tags" in frontmatter.dict
|
||||
|
||||
|
||||
def test_frontmatter_delete_6():
|
||||
def test_delete_6():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -363,7 +363,7 @@ def test_frontmatter_delete_6():
|
||||
assert "tags" not in frontmatter.dict
|
||||
|
||||
|
||||
def test_frontmatter_delete_7():
|
||||
def test_delete_7():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -371,12 +371,12 @@ def test_frontmatter_delete_7():
|
||||
THEN return True and delete the matching keys from the dict
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete(r"front\w+") is True
|
||||
assert frontmatter.delete(r"front\w+", is_regex=True) is True
|
||||
assert "frontmatter_Key1" not in frontmatter.dict
|
||||
assert "frontmatter_Key2" not in frontmatter.dict
|
||||
|
||||
|
||||
def test_frontmatter_delete_8():
|
||||
def test_delete_8():
|
||||
"""Test frontmatter delete() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -384,13 +384,13 @@ def test_frontmatter_delete_8():
|
||||
THEN return True and delete the matching values
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.delete("tags", r"\w+_[23]") is True
|
||||
assert frontmatter.delete("tags", r"\w+_[23]", is_regex=True) is True
|
||||
assert "tag_2" not in frontmatter.dict["tags"]
|
||||
assert "📅/tag_3" not in frontmatter.dict["tags"]
|
||||
assert "tag_1" in frontmatter.dict["tags"]
|
||||
|
||||
|
||||
def test_frontmatter_delete_all():
|
||||
def test_delete_all():
|
||||
"""Test Frontmatter delete_all method.
|
||||
|
||||
GIVEN Frontmatter with multiple keys
|
||||
@@ -402,7 +402,7 @@ def test_frontmatter_delete_all():
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_has_changes_1():
|
||||
def test_has_changes_1():
|
||||
"""Test frontmatter has_changes() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -413,7 +413,7 @@ def test_frontmatter_has_changes_1():
|
||||
assert frontmatter.has_changes() is False
|
||||
|
||||
|
||||
def test_frontmatter_has_changes_2():
|
||||
def test_has_changes_2():
|
||||
"""Test frontmatter has_changes() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -425,7 +425,7 @@ def test_frontmatter_has_changes_2():
|
||||
assert frontmatter.has_changes() is True
|
||||
|
||||
|
||||
def test_frontmatter_rename_1():
|
||||
def test_rename_1():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -436,7 +436,7 @@ def test_frontmatter_rename_1():
|
||||
assert frontmatter.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_frontmatter_rename_2():
|
||||
def test_rename_2():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -447,7 +447,7 @@ def test_frontmatter_rename_2():
|
||||
assert frontmatter.rename("tags", "no tag", "new key") is False
|
||||
|
||||
|
||||
def test_frontmatter_rename_3():
|
||||
def test_rename_3():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -460,7 +460,7 @@ def test_frontmatter_rename_3():
|
||||
assert frontmatter.dict["new key"] == ["frontmatter_Key1_value"]
|
||||
|
||||
|
||||
def test_frontmatter_rename_4():
|
||||
def test_rename_4():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -473,7 +473,7 @@ def test_frontmatter_rename_4():
|
||||
assert "new tag" in frontmatter.dict["tags"]
|
||||
|
||||
|
||||
def test_frontmatter_rename_5():
|
||||
def test_rename_5():
|
||||
"""Test frontmatter rename() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
@@ -486,7 +486,7 @@ def test_frontmatter_rename_5():
|
||||
assert frontmatter.dict["tags"] == ["tag_2", "📅/tag_3"]
|
||||
|
||||
|
||||
def test_frontmatter_to_yaml_1():
|
||||
def test_to_yaml_1():
|
||||
"""Test Frontmatter to_yaml method.
|
||||
|
||||
GIVEN a dictionary
|
||||
@@ -508,7 +508,7 @@ shared_key1: shared_key1_value
|
||||
assert frontmatter.to_yaml() == new_frontmatter
|
||||
|
||||
|
||||
def test_frontmatter_to_yaml_2():
|
||||
def test_to_yaml_2():
|
||||
"""Test Frontmatter to_yaml method.
|
||||
|
||||
GIVEN a dictionary
|
||||
|
||||
@@ -77,7 +77,7 @@ def test__grab_inline_metadata_2():
|
||||
}
|
||||
|
||||
|
||||
def test_inline_metadata_add_1():
|
||||
def test_add_1():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -88,7 +88,7 @@ def test_inline_metadata_add_1():
|
||||
assert inline.add("key1") is False
|
||||
|
||||
|
||||
def test_inline_metadata_add_2():
|
||||
def test_add_2():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -99,7 +99,7 @@ def test_inline_metadata_add_2():
|
||||
assert inline.add("key1", "value1") is False
|
||||
|
||||
|
||||
def test_inline_metadata_add_3():
|
||||
def test_add_3():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -111,7 +111,7 @@ def test_inline_metadata_add_3():
|
||||
assert "added_key" in inline.dict
|
||||
|
||||
|
||||
def test_inline_metadata_add_4():
|
||||
def test_add_4():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -123,7 +123,7 @@ def test_inline_metadata_add_4():
|
||||
assert inline.dict["added_key"] == ["added_value"]
|
||||
|
||||
|
||||
def test_inline_metadata_add_5():
|
||||
def test_add_5():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -135,7 +135,7 @@ def test_inline_metadata_add_5():
|
||||
assert inline.dict["key1"] == ["value1", "value2", "value3", "new_value"]
|
||||
|
||||
|
||||
def test_inline_metadata_add_6():
|
||||
def test_add_6():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -147,7 +147,7 @@ def test_inline_metadata_add_6():
|
||||
assert inline.dict["key2"] == ["new_value", "new_value2", "value1"]
|
||||
|
||||
|
||||
def test_inline_metadata_add_7():
|
||||
def test_add_7():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -159,7 +159,19 @@ def test_inline_metadata_add_7():
|
||||
assert inline.dict["key1"] == ["new_value", "new_value2", "value1", "value2", "value3"]
|
||||
|
||||
|
||||
def test_inline_metadata_contains_1():
|
||||
def test_add_8():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
WHEN the add() method is called with a new key and a list of values
|
||||
THEN return True and add the new values to the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.add("new_key", ["value1", "new_value", "new_value2"]) is True
|
||||
assert inline.dict["new_key"] == ["value1", "new_value", "new_value2"]
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -170,7 +182,7 @@ def test_inline_metadata_contains_1():
|
||||
assert inline.contains("key1") is True
|
||||
|
||||
|
||||
def test_inline_metadata_contains_2():
|
||||
def test_contains_2():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -181,7 +193,7 @@ def test_inline_metadata_contains_2():
|
||||
assert inline.contains("no_key") is False
|
||||
|
||||
|
||||
def test_inline_metadata_contains_3():
|
||||
def test_contains_3():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -192,7 +204,7 @@ def test_inline_metadata_contains_3():
|
||||
assert inline.contains("key1", "value1") is True
|
||||
|
||||
|
||||
def test_inline_metadata_contains_4():
|
||||
def test_contains_4():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -203,7 +215,7 @@ def test_inline_metadata_contains_4():
|
||||
assert inline.contains("key1", "no value") is False
|
||||
|
||||
|
||||
def test_inline_metadata_contains_5():
|
||||
def test_contains_5():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -214,7 +226,7 @@ def test_inline_metadata_contains_5():
|
||||
assert inline.contains(r"\d$", is_regex=True) is True
|
||||
|
||||
|
||||
def test_inline_metadata_contains_6():
|
||||
def test_contains_6():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -225,7 +237,7 @@ def test_inline_metadata_contains_6():
|
||||
assert inline.contains(r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_metadata_contains_7():
|
||||
def test_contains_7():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -236,7 +248,7 @@ def test_inline_metadata_contains_7():
|
||||
assert inline.contains(r"key\d", r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_inline_metadata_contains_8():
|
||||
def test_contains_8():
|
||||
"""Test InlineMetadata contains() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -247,7 +259,7 @@ def test_inline_metadata_contains_8():
|
||||
assert inline.contains("key1", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_metadata_delete_1():
|
||||
def test_delete_1():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -258,7 +270,7 @@ def test_inline_metadata_delete_1():
|
||||
assert inline.delete("no key") is False
|
||||
|
||||
|
||||
def test_inline_metadata_delete_2():
|
||||
def test_delete_2():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -269,7 +281,7 @@ def test_inline_metadata_delete_2():
|
||||
assert inline.delete("key1", "no value") is False
|
||||
|
||||
|
||||
def test_inline_metadata_delete_3():
|
||||
def test_delete_3():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -277,10 +289,10 @@ def test_inline_metadata_delete_3():
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete(r"\d{3}") is False
|
||||
assert inline.delete(r"\d{3}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_metadata_delete_4():
|
||||
def test_delete_4():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -288,10 +300,10 @@ def test_inline_metadata_delete_4():
|
||||
THEN return False
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", r"\d{5}") is False
|
||||
assert inline.delete("key1", r"\d{5}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_metadata_delete_5():
|
||||
def test_delete_5():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -304,7 +316,7 @@ def test_inline_metadata_delete_5():
|
||||
assert "key1" in inline.dict
|
||||
|
||||
|
||||
def test_inline_metadata_delete_6():
|
||||
def test_delete_6():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -316,7 +328,7 @@ def test_inline_metadata_delete_6():
|
||||
assert "key1" not in inline.dict
|
||||
|
||||
|
||||
def test_inline_metadata_delete_7():
|
||||
def test_delete_7():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -324,12 +336,12 @@ def test_inline_metadata_delete_7():
|
||||
THEN return True and delete the matching keys from the dict
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete(r"key\w+") is True
|
||||
assert inline.delete(r"key\w+", is_regex=True) is True
|
||||
assert "key1" not in inline.dict
|
||||
assert "key2" not in inline.dict
|
||||
|
||||
|
||||
def test_inline_metadata_delete_8():
|
||||
def test_delete_8():
|
||||
"""Test InlineMetadata delete() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -337,13 +349,13 @@ def test_inline_metadata_delete_8():
|
||||
THEN return True and delete the matching values
|
||||
"""
|
||||
inline = InlineMetadata(INLINE_CONTENT)
|
||||
assert inline.delete("key1", r"\w+\d") is True
|
||||
assert inline.delete("key1", r"\w+\d", is_regex=True) is True
|
||||
assert "value1" not in inline.dict["key1"]
|
||||
assert "value2" not in inline.dict["key1"]
|
||||
assert "value3" not in inline.dict["key1"]
|
||||
|
||||
|
||||
def test_inline_metadata_has_changes_1():
|
||||
def test_has_changes_1():
|
||||
"""Test InlineMetadata has_changes() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -354,7 +366,7 @@ def test_inline_metadata_has_changes_1():
|
||||
assert inline.has_changes() is False
|
||||
|
||||
|
||||
def test_inline_metadata_has_changes_2():
|
||||
def test_has_changes_2():
|
||||
"""Test InlineMetadata has_changes() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -366,7 +378,7 @@ def test_inline_metadata_has_changes_2():
|
||||
assert inline.has_changes() is True
|
||||
|
||||
|
||||
def test_inline_metadata_rename_1():
|
||||
def test_rename_1():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -377,7 +389,7 @@ def test_inline_metadata_rename_1():
|
||||
assert inline.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_inline_metadata_rename_2():
|
||||
def test_rename_2():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -388,7 +400,7 @@ def test_inline_metadata_rename_2():
|
||||
assert inline.rename("key1", "no value", "new value") is False
|
||||
|
||||
|
||||
def test_inline_metadata_rename_3():
|
||||
def test_rename_3():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -401,7 +413,7 @@ def test_inline_metadata_rename_3():
|
||||
assert inline.dict["new key"] == ["value1", "value2", "value3"]
|
||||
|
||||
|
||||
def test_inline_metadata_rename_4():
|
||||
def test_rename_4():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
@@ -414,7 +426,7 @@ def test_inline_metadata_rename_4():
|
||||
assert "new value" in inline.dict["key1"]
|
||||
|
||||
|
||||
def test_inline_metadata_rename_5():
|
||||
def test_rename_5():
|
||||
"""Test InlineMetadata rename() method.
|
||||
|
||||
GIVEN a InlineMetadata object
|
||||
|
||||
367
tests/metadata_tags_test.py
Normal file
367
tests/metadata_tags_test.py
Normal file
@@ -0,0 +1,367 @@
|
||||
# type: ignore
|
||||
"""Test inline tags from metadata.py."""
|
||||
|
||||
from obsidian_metadata.models.metadata import InlineTags
|
||||
|
||||
CONTENT = """\
|
||||
#tag1 #tag2
|
||||
> #tag3
|
||||
**#tag4**
|
||||
I am a sentence with #tag5 and #tag6 in the middle
|
||||
#tag🙈7
|
||||
#tag/8
|
||||
#tag/👋/9
|
||||
"""
|
||||
|
||||
|
||||
def test__grab_inline_tags_1() -> None:
|
||||
"""Test _grab_inline_tags() method.
|
||||
|
||||
GIVEN a string with a codeblock
|
||||
WHEN the method is called
|
||||
THEN the codeblock is ignored
|
||||
"""
|
||||
content = """
|
||||
some text
|
||||
|
||||
```python
|
||||
#tag1
|
||||
#tag2
|
||||
```
|
||||
|
||||
```
|
||||
#tag3
|
||||
#tag4
|
||||
```
|
||||
"""
|
||||
tags = InlineTags(content)
|
||||
assert tags.list == []
|
||||
assert tags.list_original == []
|
||||
|
||||
|
||||
def test__grab_inline_tags_2() -> None:
|
||||
"""Test _grab_inline_tags() method.
|
||||
|
||||
GIVEN a string with tags
|
||||
WHEN the method is called
|
||||
THEN the tags are extracted
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.list == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
assert tags.list_original == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_1():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a tag that exists in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("tag1") is False
|
||||
|
||||
|
||||
def test_add_2():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a new tag
|
||||
THEN return True and add the tag to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("new_tag") is True
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_add_3():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of new tags
|
||||
THEN return True and add the tags to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["new_tag1", "new_tag2"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert "new_tag1" in tags.list
|
||||
assert "new_tag2" in tags.list
|
||||
|
||||
|
||||
def test_add_4():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags, some of which already exist
|
||||
THEN return True and add only the new tags to the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["new_tag1", "new_tag2", "tag1", "tag2"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert tags.list == [
|
||||
"new_tag1",
|
||||
"new_tag2",
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_5():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags which are already in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["tag1", "tag2"]
|
||||
assert tags.add(new_tags) is False
|
||||
assert "tag1" in tags.list
|
||||
assert "tag2" in tags.list
|
||||
|
||||
|
||||
def test_add_6():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a list of tags which have a # in the name
|
||||
THEN strip the # from the tag name
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
new_tags = ["#tag1", "#tag2", "#new_tag"]
|
||||
assert tags.add(new_tags) is True
|
||||
assert tags.list == [
|
||||
"new_tag",
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag1",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_add_7():
|
||||
"""Test add() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the add() method is called with a tag which has a # in the name
|
||||
THEN strip the # from the tag name
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.add("#tag1") is False
|
||||
assert tags.add("#new_tag") is True
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a tag that exists in the list
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains("tag1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains("no_tag") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a regex that matches a tag in the list
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains(r"tag\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the contains() method is called with a regex that does not match any tags in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.contains(r"tag\d\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a tag that exists in the list
|
||||
THEN return True and remove the tag from the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete("tag1") is True
|
||||
assert "tag1" not in tags.list
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete("no_tag") is False
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a regex that matches a tag in the list
|
||||
THEN return True and remove the tag from the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete(r"tag\d") is True
|
||||
assert tags.list == ["tag/8", "tag/👋/9", "tag🙈7"]
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the delete() method is called with a regex that does not match any tags in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.delete(r"tag\d\d") is False
|
||||
|
||||
|
||||
def test_has_changes_1():
|
||||
"""Test has_changes() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the has_changes() method is called
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.has_changes() is False
|
||||
|
||||
|
||||
def test_has_changes_2():
|
||||
"""Test has_changes() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the has_changes() method after the list has been updated
|
||||
THEN return True
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
tags.list = ["new_tag"]
|
||||
assert tags.has_changes() is True
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that exists in the list
|
||||
THEN return True and rename the tag in the list
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", "new_tag") is True
|
||||
assert "tag1" not in tags.list
|
||||
assert "new_tag" in tags.list
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that does not exist in the list
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("no_tag", "new_tag") is False
|
||||
assert "new_tag" not in tags.list
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a tag that exists and the new tag name already exists in the list
|
||||
THEN return True and ensure the new tag name is only in the list once
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename(r"tag1", "tag2") is True
|
||||
assert tags.list == [
|
||||
"tag/8",
|
||||
"tag/👋/9",
|
||||
"tag2",
|
||||
"tag3",
|
||||
"tag4",
|
||||
"tag5",
|
||||
"tag6",
|
||||
"tag🙈7",
|
||||
]
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a new tag value that is None
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", None) is False
|
||||
assert "tag1" in tags.list
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test rename() method.
|
||||
|
||||
GIVEN a InlineTag object
|
||||
WHEN the rename() method is called with a new tag value that is empty
|
||||
THEN return False
|
||||
"""
|
||||
tags = InlineTags(CONTENT)
|
||||
assert tags.rename("tag1", "") is False
|
||||
assert "tag1" in tags.list
|
||||
@@ -1,393 +0,0 @@
|
||||
# type: ignore
|
||||
"""Test metadata.py."""
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.metadata import (
|
||||
InlineTags,
|
||||
VaultMetadata,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
FILE_CONTENT: str = Path("tests/fixtures/test_vault/test1.md").read_text()
|
||||
TAG_LIST: list[str] = ["tag 1", "tag 2", "tag 3"]
|
||||
METADATA: dict[str, list[str]] = {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["note", "article"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 2", "tag 1", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
}
|
||||
METADATA_2: dict[str, list[str]] = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
---
|
||||
tags:
|
||||
- tag_1
|
||||
- tag_2
|
||||
-
|
||||
- 📅/tag_3
|
||||
frontmatter_Key1: "frontmatter_Key1_value"
|
||||
frontmatter_Key2: ["note", "article"]
|
||||
shared_key1: "shared_key1_value"
|
||||
---
|
||||
more content
|
||||
|
||||
---
|
||||
horizontal: rule
|
||||
---
|
||||
"""
|
||||
INLINE_CONTENT = """\
|
||||
repeated_key:: repeated_key_value1
|
||||
#inline_tag_top1,#inline_tag_top2
|
||||
**bold_key1**:: bold_key1_value
|
||||
**bold_key2:: bold_key2_value**
|
||||
link_key:: [[link_key_value]]
|
||||
tag_key:: #tag_key_value
|
||||
emoji_📅_key:: emoji_📅_key_value
|
||||
**#bold_tag**
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. [in_text_key1:: in_text_key1_value] Ut enim ad minim veniam, quis nostrud exercitation [in_text_key2:: in_text_key2_value] ullamco laboris nisi ut aliquip ex ea commodo consequat. #in_text_tag Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||
|
||||
```python
|
||||
#ffffff
|
||||
# This is sample text [no_key:: value]with tags and metadata
|
||||
#in_codeblock_tag1
|
||||
#ffffff;
|
||||
in_codeblock_key:: in_codeblock_value
|
||||
The quick brown fox jumped over the #in_codeblock_tag2
|
||||
```
|
||||
repeated_key:: repeated_key_value2
|
||||
"""
|
||||
|
||||
|
||||
def test_inline_tags_add() -> None:
|
||||
"""Test inline tags add."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
|
||||
assert tags.add("bold_tag") is False
|
||||
assert tags.add("new_tag") is True
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"new_tag",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
|
||||
def test_inline_tags_contains() -> None:
|
||||
"""Test inline tags contains."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.contains("bold_tag") is True
|
||||
assert tags.contains("no tag") is False
|
||||
|
||||
assert tags.contains(r"\w_\w", is_regex=True) is True
|
||||
assert tags.contains(r"\d_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_inline_tags_create() -> None:
|
||||
"""Test inline tags creation."""
|
||||
tags = InlineTags(FRONTMATTER_CONTENT)
|
||||
tags.metadata_key
|
||||
assert tags.list == []
|
||||
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.list_original == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
|
||||
def test_inline_tags_delete() -> None:
|
||||
"""Test inline tags delete."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
assert tags.delete("no tag") is False
|
||||
assert tags.has_changes() is False
|
||||
assert tags.delete("bold_tag") is True
|
||||
assert tags.list == [
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.has_changes() is True
|
||||
assert tags.delete(r"\d{3}") is False
|
||||
assert tags.delete(r"inline_tag_top\d") is True
|
||||
assert tags.list == ["in_text_tag", "tag_key_value"]
|
||||
|
||||
|
||||
def test_inline_tags_rename() -> None:
|
||||
"""Test inline tags rename."""
|
||||
tags = InlineTags(INLINE_CONTENT)
|
||||
assert tags.list == [
|
||||
"bold_tag",
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"tag_key_value",
|
||||
]
|
||||
|
||||
assert tags.rename("no tag", "new tag") is False
|
||||
assert tags.has_changes() is False
|
||||
assert tags.rename("bold_tag", "new tag") is True
|
||||
assert tags.list == [
|
||||
"in_text_tag",
|
||||
"inline_tag_top1",
|
||||
"inline_tag_top2",
|
||||
"new tag",
|
||||
"tag_key_value",
|
||||
]
|
||||
assert tags.has_changes() is True
|
||||
|
||||
|
||||
def test_vault_metadata() -> None:
|
||||
"""Test VaultMetadata class."""
|
||||
vm = VaultMetadata()
|
||||
assert vm.dict == {}
|
||||
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3"]
|
||||
|
||||
new_metadata = {"added_key": ["added_value"], "frontmatter_Key2": ["new_value"]}
|
||||
new_tags = ["tag 4", "tag 5"]
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_metadata)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_tags)
|
||||
assert vm.dict == {
|
||||
"added_key": ["added_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "new_value", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"added_key": ["added_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "new_value", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3", "tag 4", "tag 5"]
|
||||
|
||||
|
||||
def test_vault_metadata_print(capsys) -> None:
|
||||
"""Test print_metadata method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
|
||||
vm.print_metadata(area=MetadataType.ALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All metadata" in captured
|
||||
assert "All inline tags" in captured
|
||||
assert "┃ Keys ┃ Values ┃" in captured
|
||||
assert "│ shared_key1 │ shared_key1_value │" in captured
|
||||
assert captured == Regex("#tag 1 +#tag 2")
|
||||
|
||||
vm.print_metadata(area=MetadataType.FRONTMATTER)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All frontmatter" in captured
|
||||
assert "┃ Keys ┃ Values ┃" in captured
|
||||
assert "│ shared_key1 │ shared_key1_value │" in captured
|
||||
assert "value1" not in captured
|
||||
|
||||
vm.print_metadata(area=MetadataType.INLINE)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline" in captured
|
||||
assert "┃ Keys ┃ Values ┃" in captured
|
||||
assert "shared_key1" not in captured
|
||||
assert "│ key1 │ value1 │" in captured
|
||||
|
||||
vm.print_metadata(area=MetadataType.TAGS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline tags " in captured
|
||||
assert "┃ Keys ┃ Values ┃" not in captured
|
||||
assert captured == Regex("#tag 1 +#tag 2")
|
||||
|
||||
vm.print_metadata(area=MetadataType.KEYS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All Keys " in captured
|
||||
assert "┃ Keys ┃ Values ┃" not in captured
|
||||
assert captured != Regex("#tag 1 +#tag 2")
|
||||
assert captured == Regex("frontmatter_Key1 +frontmatter_Key2")
|
||||
|
||||
|
||||
def test_vault_metadata_contains() -> None:
|
||||
"""Test contains method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=METADATA_2)
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=TAG_LIST)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
assert vm.inline_metadata == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert vm.tags == ["tag 1", "tag 2", "tag 3"]
|
||||
|
||||
with pytest.raises(ValueError):
|
||||
vm.contains(area=MetadataType.ALL, value="key1")
|
||||
|
||||
assert vm.contains(area=MetadataType.ALL, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.ALL, key="key1") is True
|
||||
assert vm.contains(area=MetadataType.ALL, key="frontmatter_Key2", value="article") is True
|
||||
assert vm.contains(area=MetadataType.ALL, key="frontmatter_Key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.ALL, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key1") is True
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key2", value="article") is True
|
||||
)
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="frontmatter_Key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.INLINE, key="no_key") is False
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key1") is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value3") is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="none") is False
|
||||
assert vm.contains(area=MetadataType.INLINE, key="1$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"\d\d", is_regex=True) is False
|
||||
|
||||
assert vm.contains(area=MetadataType.TAGS, value="no_tag") is False
|
||||
assert vm.contains(area=MetadataType.TAGS, value="tag 1") is True
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"\w+ \d$", is_regex=True) is True
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"\w+ \d\d$", is_regex=True) is False
|
||||
with pytest.raises(ValueError):
|
||||
vm.contains(area=MetadataType.TAGS, key="key1")
|
||||
|
||||
|
||||
def test_vault_metadata_delete() -> None:
|
||||
"""Test delete method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
|
||||
assert vm.delete("no key") is False
|
||||
assert vm.delete("tags", "no value") is False
|
||||
assert vm.delete("tags", "tag 2") is True
|
||||
assert vm.dict["tags"] == ["tag 1", "tag 3"]
|
||||
assert vm.delete("tags") is True
|
||||
assert "tags" not in vm.dict
|
||||
|
||||
|
||||
def test_vault_metadata_rename() -> None:
|
||||
"""Test rename method."""
|
||||
vm = VaultMetadata()
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=METADATA)
|
||||
assert vm.dict == {
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_key_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key2": ["shared_key2_value"],
|
||||
"tags": ["tag 1", "tag 2", "tag 3"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value"],
|
||||
}
|
||||
|
||||
assert vm.rename("no key", "new key") is False
|
||||
assert vm.rename("tags", "no tag", "new key") is False
|
||||
assert vm.rename("tags", "tag 2", "new tag") is True
|
||||
assert vm.dict["tags"] == ["new tag", "tag 1", "tag 3"]
|
||||
assert vm.rename("tags", "old_tags") is True
|
||||
assert vm.dict["old_tags"] == ["new tag", "tag 1", "tag 3"]
|
||||
assert "tags" not in vm.dict
|
||||
814
tests/metadata_vault_test.py
Normal file
814
tests/metadata_vault_test.py
Normal file
@@ -0,0 +1,814 @@
|
||||
# type: ignore
|
||||
"""Test VaultMetadata object from metadata.py."""
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.metadata import (
|
||||
VaultMetadata,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_vault_metadata__init_1() -> None:
|
||||
"""Test VaultMetadata class."""
|
||||
vm = VaultMetadata()
|
||||
assert vm.dict == {}
|
||||
assert vm.frontmatter == {}
|
||||
assert vm.inline_metadata == {}
|
||||
assert vm.tags == []
|
||||
|
||||
|
||||
def test_index_metadata_1():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is FRONTMATTER and the old dictionary is empty
|
||||
THEN the new dictionary is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
||||
assert vm.dict == new_dict
|
||||
assert vm.frontmatter == new_dict
|
||||
|
||||
|
||||
def test_index_metadata_2():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is FRONTMATTER and the old dictionary is not empty
|
||||
THEN the new dictionary is merged with the old dictionary
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
||||
|
||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
||||
assert vm.dict == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
"other_key": ["value1"],
|
||||
}
|
||||
assert vm.frontmatter == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
}
|
||||
|
||||
|
||||
def test_index_metadata_3():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is INLINE and the old dictionary is empty
|
||||
THEN the new dictionary is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
||||
assert vm.dict == new_dict
|
||||
assert vm.inline_metadata == new_dict
|
||||
|
||||
|
||||
def test_index_metadata_4():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is INLINE and the old dictionary is not empty
|
||||
THEN the new dictionary is merged with the old dictionary
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
||||
|
||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
||||
assert vm.dict == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
"other_key": ["value1"],
|
||||
}
|
||||
assert vm.inline_metadata == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value1", "value2", "value3"],
|
||||
"key3": ["value1"],
|
||||
}
|
||||
|
||||
|
||||
def test_index_metadata_5():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is TAGS and the old list is empty
|
||||
THEN the new list is added to the target area
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
new_list = ["tag1", "tag2", "tag3"]
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
||||
assert vm.dict == {}
|
||||
assert vm.tags == new_list
|
||||
|
||||
|
||||
def test_index_metadata_6():
|
||||
"""Test index_metadata() method.
|
||||
|
||||
GIVEN a dictionary to add
|
||||
WHEN the target area is TAGS and the old list is not empty
|
||||
THEN the new list is merged with the old list
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
new_list = ["tag1", "tag2", "tag4", "tag5"]
|
||||
|
||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
||||
assert vm.dict == {}
|
||||
assert vm.tags == ["tag1", "tag2", "tag3", "tag4", "tag5"]
|
||||
|
||||
|
||||
def test_contains_1():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_2():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_3():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_4():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_5():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_6():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_7():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
)
|
||||
|
||||
|
||||
def test_contains_8():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert (
|
||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"^\d", is_regex=True) is False
|
||||
)
|
||||
|
||||
|
||||
def test_contains_9():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN FRONTMATTER is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.FRONTMATTER, value="value1")
|
||||
|
||||
|
||||
def test_contains_10():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_11():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_12():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_13():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_14():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_15():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_16():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_17():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_18():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN INLINE is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.INLINE, value="value1")
|
||||
|
||||
|
||||
def test_contains_19():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a key but not a value
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
with pytest.raises(ValueError, match="Value must be provided"):
|
||||
vm.contains(area=MetadataType.TAGS, key="key1")
|
||||
|
||||
|
||||
def test_contains_20():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value="tag1") is True
|
||||
|
||||
|
||||
def test_contains_21():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value="value1") is False
|
||||
|
||||
|
||||
def test_contains_22():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a key regex but no value
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
with pytest.raises(ValueError, match="Value must be provided"):
|
||||
vm.contains(area=MetadataType.TAGS, key=r"\w", is_regex=True)
|
||||
|
||||
|
||||
def test_contains_23():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_24():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN TAGS is checked for a value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
assert vm.contains(area=MetadataType.TAGS, value=r"^tag\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_25():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key1") is True
|
||||
|
||||
|
||||
def test_contains_26():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key3") is False
|
||||
|
||||
|
||||
def test_contains_27():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that exists
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value1") is True
|
||||
|
||||
|
||||
def test_contains_28():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that does not exist
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value3") is False
|
||||
|
||||
|
||||
def test_contains_29():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"\w+\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_30():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_31():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that exists with regex
|
||||
THEN True is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"\w\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_contains_32():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked for a key and value that does not exist with regex
|
||||
THEN False is returned
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_33():
|
||||
"""Test contains() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN ALL is checked with a key is None
|
||||
THEN raise a ValueError
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
with pytest.raises(ValueError, match="Key must be provided"):
|
||||
vm.contains(area=MetadataType.ALL, value="value1")
|
||||
|
||||
|
||||
def test_delete_1():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key is deleted
|
||||
THEN return True and the key is removed
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key1") is True
|
||||
assert vm.dict == {"key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_delete_2():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key is deleted that does not exist
|
||||
THEN return False and the key is not removed
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key3") is False
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_delete_3():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key and value are specified
|
||||
THEN return True and remove the value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key2", value_to_delete="value1") is True
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_delete_4():
|
||||
"""Test delete() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN a key and nonexistent value are specified
|
||||
THEN return False
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.delete(key="key2", value_to_delete="value11") is False
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_rename_1():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("no key", "new key") is False
|
||||
|
||||
|
||||
def test_rename_2():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and non-existing value
|
||||
THEN return False
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "no value", "new value") is False
|
||||
|
||||
|
||||
def test_rename_3():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key
|
||||
THEN return True and rename the key
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "new key") is True
|
||||
assert vm.dict == {"key2": ["value1", "value2"], "new key": ["value1"]}
|
||||
|
||||
|
||||
def test_rename_4():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and value
|
||||
THEN return True and rename the value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key1", "value1", "new value") is True
|
||||
assert vm.dict == {"key1": ["new value"], "key2": ["value1", "value2"]}
|
||||
|
||||
|
||||
def test_rename_5():
|
||||
"""Test VaultMetadata rename() method.
|
||||
|
||||
GIVEN a VaultMetadata object
|
||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
||||
THEN return True and remove the old value leaving one instance of the new value
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
assert vm.rename("key2", "value1", "value2") is True
|
||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_print_metadata_1(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN ALL is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.ALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All metadata" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured == Regex("│ key2 +│ value1 +│")
|
||||
assert captured == Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" in captured
|
||||
assert captured == Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_2(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN FRONTMATTER is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.FRONTMATTER)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All frontmatter" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured == Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" not in captured
|
||||
assert captured != Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_3(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN INLINE is specified
|
||||
THEN print all the metadata
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.INLINE)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline" in captured
|
||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured == Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured == Regex("│ key4 +│ value1 +│")
|
||||
assert "All inline tags" not in captured
|
||||
assert captured != Regex("#tag1 +#tag2")
|
||||
|
||||
|
||||
def test_print_metadata_4(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN TAGS is specified
|
||||
THEN print all the tags
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.TAGS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All inline tags" in captured
|
||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured != Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert captured == Regex("#tag1 +#tag2 +#tag3")
|
||||
|
||||
|
||||
def test_print_metadata_5(capsys):
|
||||
"""Test print_metadata() method.
|
||||
|
||||
GIVEN calling print_metadata() with a VaultMetadata object
|
||||
WHEN KEYS is specified
|
||||
THEN print all the tags
|
||||
"""
|
||||
vm = VaultMetadata()
|
||||
vm.dict = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key2": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
||||
vm.inline_metadata = {
|
||||
"key1": ["value1", "value2"],
|
||||
"key3": ["value1"],
|
||||
"key4": ["value1", "value2"],
|
||||
}
|
||||
vm.tags = ["tag1", "tag2", "tag3"]
|
||||
|
||||
vm.print_metadata(area=MetadataType.KEYS)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "All Keys" in captured
|
||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
||||
assert captured != Regex("│ key1 +│ value1 +│")
|
||||
assert captured != Regex("│ key2 +│ value1 +│")
|
||||
assert captured != Regex("│ key4 +│ value1 +│")
|
||||
assert captured != Regex("#tag1 +#tag2 +#tag3")
|
||||
assert captured == Regex("key1 +key2 +key3 +key4")
|
||||
@@ -48,7 +48,7 @@ def test_create_note_1(sample_note):
|
||||
],
|
||||
}
|
||||
|
||||
assert note.inline_tags.list == [
|
||||
assert note.tags.list == [
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
"inline_tag_top1",
|
||||
@@ -233,12 +233,12 @@ def test_add_metadata_method_10(sample_note):
|
||||
THEN the tag is added to the InlineTags object and the file content
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert "new_tag2" not in note.inline_tags.list
|
||||
assert "new_tag2" not in note.tags.list
|
||||
assert (
|
||||
note.add_metadata(MetadataType.TAGS, value="new_tag2", location=InsertLocation.BOTTOM)
|
||||
is True
|
||||
)
|
||||
assert "new_tag2" in note.inline_tags.list
|
||||
assert "new_tag2" in note.tags.list
|
||||
assert "#new_tag2" in note.file_content
|
||||
|
||||
|
||||
@@ -279,19 +279,19 @@ def test_commit_2(sample_note) -> None:
|
||||
assert "Heading 1" in note.file_content
|
||||
|
||||
|
||||
def test_contains_inline_tag(sample_note) -> None:
|
||||
"""Test contains_inline_tag method.
|
||||
def test_contains_tag(sample_note) -> None:
|
||||
"""Test contains_tag method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN contains_inline_tag() is called
|
||||
WHEN contains_tag() is called
|
||||
THEN the method returns True if the tag is found and False if not
|
||||
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert note.contains_inline_tag("intext_tag1") is True
|
||||
assert note.contains_inline_tag("nonexistent_tag") is False
|
||||
assert note.contains_inline_tag(r"\d$", is_regex=True) is True
|
||||
assert note.contains_inline_tag(r"^\d", is_regex=True) is False
|
||||
assert note.contains_tag("intext_tag1") is True
|
||||
assert note.contains_tag("nonexistent_tag") is False
|
||||
assert note.contains_tag(r"\d$", is_regex=True) is True
|
||||
assert note.contains_tag(r"^\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_contains_metadata(sample_note) -> None:
|
||||
@@ -323,7 +323,7 @@ def test_delete_all_metadata(sample_note):
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
note.delete_all_metadata()
|
||||
assert note.inline_tags.list == []
|
||||
assert note.tags.list == []
|
||||
assert note.frontmatter.dict == {}
|
||||
assert note.inline_metadata.dict == {}
|
||||
assert note.file_content == Regex("consequat. Duis")
|
||||
@@ -332,17 +332,17 @@ def test_delete_all_metadata(sample_note):
|
||||
assert "---" not in note.file_content
|
||||
|
||||
|
||||
def test_delete_inline_tag(sample_note) -> None:
|
||||
"""Test delete_inline_tag method.
|
||||
def test_delete_tag(sample_note) -> None:
|
||||
"""Test delete_tag method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN delete_inline_tag() is called
|
||||
WHEN delete_tag() is called
|
||||
THEN the method returns True if the tag is found and deleted and False if not
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert note.delete_inline_tag("not_a_tag") is False
|
||||
assert note.delete_inline_tag("intext_tag[1]") is True
|
||||
assert "intext_tag1" not in note.inline_tags.list
|
||||
assert note.delete_tag("not_a_tag") is False
|
||||
assert note.delete_tag("intext_tag[1]") is True
|
||||
assert "intext_tag1" not in note.tags.list
|
||||
assert note.file_content == Regex("consequat. Duis")
|
||||
|
||||
|
||||
@@ -454,7 +454,7 @@ def test_has_changes(sample_note) -> None:
|
||||
|
||||
note = Note(note_path=sample_note)
|
||||
assert note.has_changes() is False
|
||||
note.delete_inline_tag("intext_tag1")
|
||||
note.delete_tag("intext_tag1")
|
||||
assert note.has_changes() is True
|
||||
|
||||
|
||||
@@ -494,29 +494,29 @@ def test_print_note(sample_note, capsys) -> None:
|
||||
assert "#shared_tag" in captured.out
|
||||
|
||||
|
||||
def test_rename_inline_tag_1(sample_note) -> None:
|
||||
"""Test rename_inline_tag() method.
|
||||
def test_rename_tag_1(sample_note) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN rename_inline_tag() is called with a tag that does not exist
|
||||
WHEN rename_tag() is called with a tag that does not exist
|
||||
THEN the method returns False
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert note.rename_inline_tag("no_note_tag", "intext_tag2") is False
|
||||
assert note.rename_tag("no_note_tag", "intext_tag2") is False
|
||||
|
||||
|
||||
def test_rename_inline_tag_2(sample_note) -> None:
|
||||
"""Test rename_inline_tag() method.
|
||||
def test_rename_tag_2(sample_note) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN rename_inline_tag() is called with a tag exists
|
||||
THEN the tag is renamed in the InlineTags object and the file content
|
||||
WHEN rename_tag() is called with a tag exists
|
||||
THEN the tag is renamed in the InlineTag object and the file content
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert "intext_tag1" in note.inline_tags.list
|
||||
assert note.rename_inline_tag("intext_tag1", "intext_tag26") is True
|
||||
assert "intext_tag1" not in note.inline_tags.list
|
||||
assert "intext_tag26" in note.inline_tags.list
|
||||
assert "intext_tag1" in note.tags.list
|
||||
assert note.rename_tag("intext_tag1", "intext_tag26") is True
|
||||
assert "intext_tag1" not in note.tags.list
|
||||
assert "intext_tag26" in note.tags.list
|
||||
assert note.file_content == Regex(r"#intext_tag26")
|
||||
assert note.file_content != Regex(r"#intext_tag1")
|
||||
|
||||
@@ -846,7 +846,7 @@ def test_write_delete_inline_metadata_2(sample_note) -> None:
|
||||
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
note.write_delete_inline_metadata("intext_key")
|
||||
note.write_delete_inline_metadata("intext_key", is_regex=False)
|
||||
assert note.file_content == Regex(r"dolore eu fugiat", re.DOTALL)
|
||||
|
||||
|
||||
@@ -858,7 +858,7 @@ def test_write_delete_inline_metadata_3(sample_note) -> None:
|
||||
THEN the key/value is removed from the note content
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
note.write_delete_inline_metadata("bottom_key2", "bottom_key2_value")
|
||||
note.write_delete_inline_metadata("bottom_key2", "bottom_key2_value", is_regex=False)
|
||||
assert note.file_content != Regex(r"bottom_key2_value")
|
||||
assert note.file_content == Regex(r"bottom_key2::")
|
||||
note.write_delete_inline_metadata("bottom_key1")
|
||||
|
||||
@@ -68,12 +68,12 @@ def test_validate_number() -> None:
|
||||
assert questions._validate_number("1") is True
|
||||
|
||||
|
||||
def test_validate_existing_inline_tag() -> None:
|
||||
def test_validate_existing_tag() -> None:
|
||||
"""Test existing tag validation."""
|
||||
questions = Questions(vault=VAULT)
|
||||
assert "Tag cannot be empty" in questions._validate_existing_inline_tag("")
|
||||
assert "'test' does not exist" in questions._validate_existing_inline_tag("test")
|
||||
assert questions._validate_existing_inline_tag("shared_tag") is True
|
||||
assert "Tag cannot be empty" in questions._validate_existing_tag("")
|
||||
assert "'test' does not exist" in questions._validate_existing_tag("test")
|
||||
assert questions._validate_existing_tag("shared_tag") is True
|
||||
|
||||
|
||||
def test_validate_key_exists_regex() -> None:
|
||||
|
||||
@@ -6,28 +6,329 @@ import typer
|
||||
|
||||
from obsidian_metadata._utils import (
|
||||
clean_dictionary,
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
validate_csv_bulk_imports,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_dict_contains() -> None:
|
||||
"""Test dict_contains."""
|
||||
d = {"key1": ["value1", "value2"], "key2": ["value3", "value4"], "key3": ["value5", "value6"]}
|
||||
def test_clean_dictionary_1():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
assert dict_contains(d, "key1") is True
|
||||
assert dict_contains(d, "key5") is False
|
||||
assert dict_contains(d, "key1", "value1") is True
|
||||
assert dict_contains(d, "key1", "value5") is False
|
||||
assert dict_contains(d, "key[1-2]", is_regex=True) is True
|
||||
assert dict_contains(d, "^1", is_regex=True) is False
|
||||
assert dict_contains(d, r"key\d", r"value\d", is_regex=True) is True
|
||||
assert dict_contains(d, "key1$", "^alue", is_regex=True) is False
|
||||
assert dict_contains(d, r"key\d", "value5", is_regex=True) is True
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN the dictionary is empty
|
||||
THEN return an empty dictionary
|
||||
"""
|
||||
assert clean_dictionary({}) == {}
|
||||
|
||||
|
||||
def test_clean_dictionary_2():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys contain leading/trailing spaces
|
||||
THEN remove the spaces from the keys
|
||||
"""
|
||||
assert clean_dictionary({" key 1 ": "value 1"}) == {"key 1": "value 1"}
|
||||
|
||||
|
||||
def test_clean_dictionary_3():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN values contain leading/trailing spaces
|
||||
THEN remove the spaces from the values
|
||||
"""
|
||||
assert clean_dictionary({"key 1": " value 1 "}) == {"key 1": "value 1"}
|
||||
|
||||
|
||||
def test_clean_dictionary_4():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing asterisks
|
||||
THEN remove the asterisks from the keys or values
|
||||
"""
|
||||
assert clean_dictionary({"**key_1**": ["**value 1**", "value 2"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_clean_dictionary_5():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing brackets
|
||||
THEN remove the brackets from the keys and values
|
||||
"""
|
||||
assert clean_dictionary({"[[key_1]]": ["[[value 1]]", "[value 2]"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_clean_dictionary_6():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing hashtags
|
||||
THEN remove the hashtags from the keys and values
|
||||
"""
|
||||
assert clean_dictionary({"#key_1": ["#value 1", "value 2#"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_1():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values
|
||||
WHEN the delete_from_dict() function is called with a key that exists
|
||||
THEN the key should be deleted from the dictionary and the original dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key1") == {
|
||||
"key2": ["value2", "value3"],
|
||||
"key3": "value4",
|
||||
}
|
||||
assert test_dict == {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
|
||||
def test_delete_from_dict_2():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values
|
||||
WHEN the delete_from_dict() function is called with a key that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key5") == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_3():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values in a list
|
||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
||||
THEN the value should be deleted from the specified key in dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key2", value="value3") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2"],
|
||||
"key3": "value4",
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_4():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
||||
THEN the value and key should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value4") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_5():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value5") == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_6():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key[23]", is_regex=True) == {
|
||||
"key1": ["value1"]
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_7():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that does not match
|
||||
THEN no keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key=r"key\d\d", is_regex=True) == test_dict
|
||||
|
||||
|
||||
def test_delete_from_dict_8():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key2", value=r"\w+", is_regex=True) == {
|
||||
"key1": ["value1"],
|
||||
"key2": [],
|
||||
"key3": "value4",
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_9():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that does not match
|
||||
THEN no keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert (
|
||||
delete_from_dict(dictionary=test_dict, key=r"key2", value=r"^\d", is_regex=True)
|
||||
== test_dict
|
||||
)
|
||||
|
||||
|
||||
def test_delete_from_dict_10():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
||||
THEN the matching keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(dictionary=test_dict, key="key3", value=r"\w+", is_regex=True) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_11():
|
||||
"""Test delete_from_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as strings
|
||||
WHEN the delete_from_dict() function is called with a key regex that matches multiple and values that match
|
||||
THEN the values matching the associated keys should be deleted from the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
||||
|
||||
assert delete_from_dict(
|
||||
dictionary=test_dict, key=r"key[23]", value=r"\w+[34]$", is_regex=True
|
||||
) == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_dict_contains_1():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN the dictionary is empty
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({}, "key1") is False
|
||||
|
||||
|
||||
def test_dict_contains_2():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key2") is False
|
||||
|
||||
|
||||
def test_dict_contains_3():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1") is True
|
||||
|
||||
|
||||
def test_dict_contains_4():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key and value are in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", "value1") is True
|
||||
|
||||
|
||||
def test_dict_contains_5():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key and value are not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", "value2") is False
|
||||
|
||||
|
||||
def test_dict_contains_6():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for the key and the key is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, r"key\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_dict_contains_7():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for the key and the key is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, r"key\d\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_dict_contains_8():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for a value and the value is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", r"\w+", is_regex=True) is True
|
||||
|
||||
|
||||
def test_dict_contains_9():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for a value and the value is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", r"\d{2}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_dict_keys_to_lower() -> None:
|
||||
@@ -41,87 +342,467 @@ def test_dict_keys_to_lower() -> None:
|
||||
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings():
|
||||
"""Test converting dictionary values to lists of strings."""
|
||||
dictionary = {
|
||||
"key1": "value1",
|
||||
"key2": ["value2", "value3", None],
|
||||
"key3": {"key4": "value4"},
|
||||
"key5": {"key6": {"key7": "value7"}},
|
||||
"key6": None,
|
||||
"key8": [1, 3, None, 4],
|
||||
"key9": [None, "", "None"],
|
||||
"key10": "None",
|
||||
"key11": "",
|
||||
}
|
||||
def test_dict_values_to_lists_strings_1():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
result = dict_values_to_lists_strings(dictionary)
|
||||
assert result == {
|
||||
"key1": ["value1"],
|
||||
"key10": ["None"],
|
||||
"key11": [""],
|
||||
"key2": ["None", "value2", "value3"],
|
||||
"key3": {"key4": ["value4"]},
|
||||
"key5": {"key6": {"key7": ["value7"]}},
|
||||
"key6": ["None"],
|
||||
"key8": ["1", "3", "4", "None"],
|
||||
"key9": ["", "None", "None"],
|
||||
}
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the dictionary is empty
|
||||
THEN the function should return an empty dictionary
|
||||
"""
|
||||
assert dict_values_to_lists_strings({}) == {}
|
||||
assert dict_values_to_lists_strings({}, strip_null_values=True) == {}
|
||||
|
||||
result = dict_values_to_lists_strings(dictionary, strip_null_values=True)
|
||||
assert result == {
|
||||
|
||||
def test_dict_values_to_lists_strings_2():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the dictionary values are already lists of strings
|
||||
THEN the function should return the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": ["value1"],
|
||||
"key10": [],
|
||||
"key11": [],
|
||||
"key2": ["value2", "value3"],
|
||||
"key3": {"key4": ["value4"]},
|
||||
"key5": {"key6": {"key7": ["value7"]}},
|
||||
"key6": [],
|
||||
"key8": ["1", "3", "4"],
|
||||
"key9": ["", "None"],
|
||||
}
|
||||
|
||||
|
||||
def test_remove_markdown_sections():
|
||||
"""Test removing markdown sections."""
|
||||
def test_dict_values_to_lists_strings_3():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is None and strip_null_values is False
|
||||
THEN then convert None to an empty string
|
||||
"""
|
||||
test_dict = {"key1": None, "key2": ["value", None]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {"key1": [""], "key2": ["", "value"]}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_4():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is None and strip_null_values is True
|
||||
THEN remove null values
|
||||
"""
|
||||
test_dict = {"key1": None, "key2": ["value", None]}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": [],
|
||||
"key2": ["value"],
|
||||
}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_5():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is a string "None" and strip_null_values is True or False
|
||||
THEN ensure the value is not removed
|
||||
"""
|
||||
test_dict = {"key1": "None", "key2": [None, "None"]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {"key1": ["None"], "key2": ["", "None"]}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": [],
|
||||
"key2": ["None"],
|
||||
}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_6():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is another dictionary
|
||||
THEN ensure the values in the inner dictionary are converted to lists of strings
|
||||
"""
|
||||
test_dict = {"key1": {"key2": "value2", "key3": ["value3", None]}}
|
||||
assert dict_values_to_lists_strings(test_dict) == {
|
||||
"key1": {"key2": ["value2"], "key3": ["", "value3"]}
|
||||
}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": {"key2": ["value2"], "key3": ["value3"]}
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_1():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN a value in dict1 is not a list
|
||||
THEN raise a TypeError
|
||||
"""
|
||||
test_dict_1 = {"key1": "value1", "key2": "value2"}
|
||||
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
|
||||
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||
merge_dictionaries(test_dict_1, test_dict_2)
|
||||
|
||||
|
||||
def test_merge_dictionaries_2():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN a value in dict2 is not a list
|
||||
THEN raise a TypeError
|
||||
"""
|
||||
test_dict_1 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
test_dict_2 = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||
merge_dictionaries(test_dict_1, test_dict_2)
|
||||
|
||||
|
||||
def test_merge_dictionaries_3():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values in both dictionaries are unique
|
||||
THEN return a dictionary with the keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2"],
|
||||
"key3": ["value3"],
|
||||
"key4": ["value4"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_4():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys in both dictionaries are not unique
|
||||
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||
test_dict_2 = {"key1": ["value3"], "key2": ["value4"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["value1", "value3"],
|
||||
"key2": ["value2", "value4"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_5():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values both dictionaries are not unique
|
||||
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["a", "b", "c"],
|
||||
"key2": ["a", "b", "c"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_6():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN one of the dictionaries is empty
|
||||
THEN return a dictionary the other dictionary
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
|
||||
test_dict_1 = {}
|
||||
test_dict_2 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
|
||||
|
||||
def test_merge_dictionaries_7():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values both dictionaries are not unique
|
||||
THEN ensure the original dictionaries objects are not modified
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["a", "b", "c"],
|
||||
"key2": ["a", "b", "c"],
|
||||
}
|
||||
assert test_dict_1 == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
assert test_dict_2 == {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_1():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that does not exist
|
||||
THEN no keys should be renamed in the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key4", value_1="key5") == test_dict
|
||||
|
||||
|
||||
def test_rename_in_dict_2():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a new value for the key
|
||||
THEN the key should be renamed in the returned dictionary and the original dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key2", value_1="new_key") == {
|
||||
"key1": ["value1"],
|
||||
"new_key": ["value2", "value3"],
|
||||
}
|
||||
assert test_dict == {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_3():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists value that does not exist
|
||||
THEN the dictionary should not be modified
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert (
|
||||
rename_in_dict(dictionary=test_dict, key="key2", value_1="no_value", value_2="new_value")
|
||||
== test_dict
|
||||
)
|
||||
|
||||
|
||||
def test_rename_in_dict_4():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a new value for a value
|
||||
THEN update the specified value in the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(
|
||||
dictionary=test_dict, key="key2", value_1="value2", value_2="new_value"
|
||||
) == {"key1": ["value1"], "key2": ["new_value", "value3"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_5():
|
||||
"""Test rename_in_dict() function.
|
||||
|
||||
GIVEN a dictionary with values as a list
|
||||
WHEN the rename_in_dict() function is called with a key that exists and a an existing value for a renamed value
|
||||
THEN only one instance of the new value should be in the key
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
|
||||
assert rename_in_dict(dictionary=test_dict, key="key2", value_1="value2", value_2="value3") == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_remove_markdown_sections_1():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with the default arguments
|
||||
THEN return the string without removing any markdown sections
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
Lorem ipsum `dolor sit` amet.
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello World"
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
|
||||
assert remove_markdown_sections(text) == text
|
||||
|
||||
|
||||
def test_remove_markdown_sections_2():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_codeblocks set to True
|
||||
THEN return the string without the codeblocks
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_codeblocks=True)
|
||||
assert "inline_code" in result
|
||||
assert "```bash" not in result
|
||||
assert "```" not in result
|
||||
assert "foo" not in result
|
||||
assert "world" not in result
|
||||
assert "key: value" in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_3():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_inlinecode set to True
|
||||
THEN return the string without the inline code
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_inlinecode=True)
|
||||
assert "`inline_code`" not in result
|
||||
assert "```bash" in result
|
||||
assert "```" in result
|
||||
assert "foo" in result
|
||||
assert "world" in result
|
||||
assert "key: value" in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_4():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_frontmatter set to True
|
||||
THEN return the string without the frontmatter
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_frontmatter=True)
|
||||
assert "`inline_code`" in result
|
||||
assert "```bash" in result
|
||||
assert "```" in result
|
||||
assert "foo" in result
|
||||
assert "world" in result
|
||||
assert "key: value" not in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_5():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with all arguments set to True
|
||||
THEN return the string without the frontmatter, inline code, and codeblocks
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(
|
||||
text,
|
||||
strip_codeblocks=True,
|
||||
strip_frontmatter=True,
|
||||
strip_inlinecode=True,
|
||||
text, strip_frontmatter=True, strip_inlinecode=True, strip_codeblocks=True
|
||||
)
|
||||
assert "```bash" not in result
|
||||
assert "`dolor sit`" not in result
|
||||
assert "---\nkey: value" not in result
|
||||
assert "`" not in result
|
||||
|
||||
result = remove_markdown_sections(text)
|
||||
assert "```bash" in result
|
||||
assert "`dolor sit`" in result
|
||||
assert "---\nkey: value" in result
|
||||
assert "`" in result
|
||||
|
||||
|
||||
def test_clean_dictionary():
|
||||
"""Test cleaning a dictionary."""
|
||||
dictionary = {" *key* ": ["**value**", "[[value2]]", "#value3"]}
|
||||
|
||||
new_dict = clean_dictionary(dictionary)
|
||||
assert new_dict == {"key": ["value", "value2", "value3"]}
|
||||
assert "`inline_code`" not in result
|
||||
assert "bash" not in result
|
||||
assert "```" not in result
|
||||
assert "foo" not in result
|
||||
assert "world" not in result
|
||||
assert "key: value" not in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_1(tmp_path):
|
||||
@@ -134,7 +815,7 @@ def test_validate_csv_bulk_imports_1(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
PATH,type,key,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -151,7 +832,7 @@ def test_validate_csv_bulk_imports_2(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,Type,key,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -168,7 +849,7 @@ def test_validate_csv_bulk_imports_3(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -185,7 +866,7 @@ def test_validate_csv_bulk_imports_4(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,values
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -207,7 +888,7 @@ def test_validate_csv_bulk_imports_5(tmp_path):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_6(tmp_path, capsys):
|
||||
def test_validate_csv_bulk_imports_6(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
@@ -217,30 +898,77 @@ def test_validate_csv_bulk_imports_6(tmp_path, capsys):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,type,key,value
|
||||
note2.md,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
csv_dict = validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md"])
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | 'note2.md' does not exist in vault." in captured
|
||||
assert csv_dict == {"note1.md": [{"key": "key", "type": "type", "value": "value"}]}
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_7(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN no paths match paths in the vault
|
||||
WHEN if a type is not 'frontmatter' or 'inline_metadata', 'tag'
|
||||
THEN exit the program
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,type,key,value
|
||||
note2.md,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note2.md,notvalid,key,value
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
with pytest.raises(typer.Exit):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_8(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN more than one row has the same path
|
||||
THEN add the row to the list of rows for that path
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
csv_dict = validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
assert csv_dict == {
|
||||
"note1.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
"note2.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
}
|
||||
|
||||
@@ -315,16 +315,16 @@ def test_delete_backup_2(test_vault, capsys):
|
||||
assert vault.backup_path.exists() is True
|
||||
|
||||
|
||||
def test_delete_inline_tag_1(test_vault) -> None:
|
||||
"""Test delete_inline_tag() method.
|
||||
def test_delete_tag_1(test_vault) -> None:
|
||||
"""Test delete_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_inline_tag method is called
|
||||
WHEN the delete_tag method is called
|
||||
THEN the inline tag is deleted
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_inline_tag("intext_tag2") == 1
|
||||
assert vault.delete_tag("intext_tag2") == 1
|
||||
assert vault.metadata.tags == [
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
@@ -335,16 +335,16 @@ def test_delete_inline_tag_1(test_vault) -> None:
|
||||
]
|
||||
|
||||
|
||||
def test_delete_inline_tag_2(test_vault) -> None:
|
||||
"""Test delete_inline_tag() method.
|
||||
def test_delete_tag_2(test_vault) -> None:
|
||||
"""Test delete_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the delete_inline_tag method is called with a tag that does not exist
|
||||
WHEN the delete_tag method is called with a tag that does not exist
|
||||
THEN no changes are made
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.delete_inline_tag("no tag") == 0
|
||||
assert vault.delete_tag("no tag") == 0
|
||||
|
||||
|
||||
def test_delete_metadata_1(test_vault) -> None:
|
||||
@@ -594,16 +594,16 @@ def test_move_inline_metadata_1(test_vault) -> None:
|
||||
assert vault.move_inline_metadata(location=InsertLocation.TOP) == 1
|
||||
|
||||
|
||||
def test_rename_inline_tag_1(test_vault) -> None:
|
||||
"""Test rename_inline_tag() method.
|
||||
def test_rename_tag_1(test_vault) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_inline_tag() method is called with a tag that is found
|
||||
WHEN the rename_tag() method is called with a tag that is found
|
||||
THEN the inline tag is renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_inline_tag("intext_tag2", "new_tag") == 1
|
||||
assert vault.rename_tag("intext_tag2", "new_tag") == 1
|
||||
assert vault.metadata.tags == [
|
||||
"inline_tag_bottom1",
|
||||
"inline_tag_bottom2",
|
||||
@@ -615,16 +615,16 @@ def test_rename_inline_tag_1(test_vault) -> None:
|
||||
]
|
||||
|
||||
|
||||
def test_rename_inline_tag_2(test_vault) -> None:
|
||||
"""Test rename_inline_tag() method.
|
||||
def test_rename_tag_2(test_vault) -> None:
|
||||
"""Test rename_tag() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the rename_inline_tag() method is called with a tag that is not found
|
||||
WHEN the rename_tag() method is called with a tag that is not found
|
||||
THEN the inline tag is not renamed
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
assert vault.rename_inline_tag("no tag", "new_tag") == 0
|
||||
assert vault.rename_tag("no tag", "new_tag") == 0
|
||||
|
||||
|
||||
def test_rename_metadata_1(test_vault) -> None:
|
||||
@@ -766,14 +766,14 @@ def test_update_from_dict_3(test_vault):
|
||||
"test1.md": [
|
||||
{"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
{"type": "inline_metadata", "key": "new_key2", "value": "new_value"},
|
||||
{"type": "tags", "key": "", "value": "new_tag"},
|
||||
{"type": "tag", "key": "", "value": "new_tag"},
|
||||
]
|
||||
}
|
||||
assert vault.update_from_dict(update_dict) == 1
|
||||
assert vault.get_changed_notes()[0].note_path.name == "test1.md"
|
||||
assert vault.get_changed_notes()[0].frontmatter.dict == {"new_key": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].inline_metadata.dict == {"new_key2": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].inline_tags.list == ["new_tag"]
|
||||
assert vault.get_changed_notes()[0].tags.list == ["new_tag"]
|
||||
assert vault.metadata.frontmatter == {"new_key": ["new_value"]}
|
||||
assert vault.metadata.inline_metadata == {"new_key2": ["new_value"]}
|
||||
assert vault.metadata.tags == ["new_tag"]
|
||||
|
||||
Reference in New Issue
Block a user