mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-16 08:53:48 -05:00
Compare commits
7 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5a4643ea8f | ||
|
|
c5766af678 | ||
|
|
375dceb8c6 | ||
|
|
c75d18200e | ||
|
|
ffdac91537 | ||
|
|
e8f408ee33 | ||
|
|
1dd3ddfb22 |
@@ -54,14 +54,14 @@ repos:
|
||||
types: [python]
|
||||
|
||||
- repo: "https://github.com/adrienverge/yamllint.git"
|
||||
rev: v1.29.0
|
||||
rev: v1.30.0
|
||||
hooks:
|
||||
- id: yamllint
|
||||
files: ^.*\.(yaml|yml)$
|
||||
entry: yamllint --strict --config-file .yamllint.yml
|
||||
|
||||
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
||||
rev: "v0.0.257"
|
||||
rev: "v0.0.259"
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: ["--extend-ignore", "I001,D301,D401"]
|
||||
|
||||
12
CHANGELOG.md
12
CHANGELOG.md
@@ -1,3 +1,15 @@
|
||||
## v0.11.1 (2023-03-29)
|
||||
|
||||
### Fix
|
||||
|
||||
- add custom exceptions (#29)
|
||||
|
||||
## v0.11.0 (2023-03-24)
|
||||
|
||||
### Feat
|
||||
|
||||
- add `--import-csv` option to cli
|
||||
|
||||
## v0.10.0 (2023-03-21)
|
||||
|
||||
### Feat
|
||||
|
||||
@@ -25,6 +25,7 @@ pip install obsidian-metadata
|
||||
|
||||
- `--config-file`: Specify a custom configuration file location
|
||||
- `--dry-run`: Make no destructive changes
|
||||
- `--import-csv` Import a CSV file with bulk updates
|
||||
- `--export-csv`: Specify a path and create a CSV export of all metadata
|
||||
- `--export-json`: Specify a path and create a JSON export of all metadata
|
||||
- `--export-template`: Specify a path and export all notes with their associated metadata to a CSV file for use as a bulk import template
|
||||
@@ -173,6 +174,8 @@ Create a CSV template for making bulk updates containing all your notes and thei
|
||||
1. Using the `--export-template` cli command; or
|
||||
2. Selecting the `Metadata by note` option within the `Export Metadata` section of the app
|
||||
|
||||
Once you have a template created you can import it using the `--import-csv` flag or by navigating to the `Import bulk changes from CSV` option.
|
||||
|
||||
# Contributing
|
||||
|
||||
## Setup: Once per project
|
||||
|
||||
310
poetry.lock
generated
310
poetry.lock
generated
@@ -1,4 +1,4 @@
|
||||
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
|
||||
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
|
||||
|
||||
[[package]]
|
||||
name = "argcomplete"
|
||||
@@ -37,37 +37,37 @@ tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy
|
||||
|
||||
[[package]]
|
||||
name = "black"
|
||||
version = "23.1.0"
|
||||
version = "23.3.0"
|
||||
description = "The uncompromising code formatter."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"},
|
||||
{file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"},
|
||||
{file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"},
|
||||
{file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"},
|
||||
{file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"},
|
||||
{file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"},
|
||||
{file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"},
|
||||
{file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"},
|
||||
{file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"},
|
||||
{file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"},
|
||||
{file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"},
|
||||
{file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"},
|
||||
{file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"},
|
||||
{file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"},
|
||||
{file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"},
|
||||
{file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"},
|
||||
{file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"},
|
||||
{file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"},
|
||||
{file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"},
|
||||
{file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"},
|
||||
{file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"},
|
||||
{file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"},
|
||||
{file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"},
|
||||
{file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"},
|
||||
{file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"},
|
||||
{file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"},
|
||||
{file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"},
|
||||
{file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"},
|
||||
{file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"},
|
||||
{file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"},
|
||||
{file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"},
|
||||
{file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"},
|
||||
{file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"},
|
||||
{file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"},
|
||||
{file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -283,30 +283,30 @@ testing = ["pre-commit"]
|
||||
|
||||
[[package]]
|
||||
name = "filelock"
|
||||
version = "3.10.0"
|
||||
version = "3.10.7"
|
||||
description = "A platform independent file lock."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"},
|
||||
{file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"},
|
||||
{file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"},
|
||||
{file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"]
|
||||
|
||||
[[package]]
|
||||
name = "identify"
|
||||
version = "2.5.21"
|
||||
version = "2.5.22"
|
||||
description = "File identification library for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "identify-2.5.21-py2.py3-none-any.whl", hash = "sha256:69edcaffa8e91ae0f77d397af60f148b6b45a8044b2cc6d99cafa5b04793ff00"},
|
||||
{file = "identify-2.5.21.tar.gz", hash = "sha256:7671a05ef9cfaf8ff63b15d45a91a1147a03aaccb2976d4e9bd047cbbc508471"},
|
||||
{file = "identify-2.5.22-py2.py3-none-any.whl", hash = "sha256:f0faad595a4687053669c112004178149f6c326db71ee999ae4636685753ad2f"},
|
||||
{file = "identify-2.5.22.tar.gz", hash = "sha256:f7a93d6cf98e29bd07663c60728e7a4057615068d7a639d132dc883b2d54d31e"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -616,19 +616,19 @@ dev = ["black", "hypothesis", "mypy", "pygments (>=2.14.0)", "pytest", "pytest-c
|
||||
|
||||
[[package]]
|
||||
name = "platformdirs"
|
||||
version = "3.1.1"
|
||||
version = "3.2.0"
|
||||
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "platformdirs-3.1.1-py3-none-any.whl", hash = "sha256:e5986afb596e4bb5bde29a79ac9061aa955b94fca2399b7aaac4090860920dd8"},
|
||||
{file = "platformdirs-3.1.1.tar.gz", hash = "sha256:024996549ee88ec1a9aa99ff7f8fc819bb59e2c3477b410d90a16d32d6e707aa"},
|
||||
{file = "platformdirs-3.2.0-py3-none-any.whl", hash = "sha256:ebe11c0d7a805086e99506aa331612429a72ca7cd52a1f0d277dc4adc20cb10e"},
|
||||
{file = "platformdirs-3.2.0.tar.gz", hash = "sha256:d5b638ca397f25f979350ff789db335903d7ea010ab28903f57b27e1b16c2b08"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
@@ -648,14 +648,14 @@ testing = ["pytest", "pytest-benchmark"]
|
||||
|
||||
[[package]]
|
||||
name = "poethepoet"
|
||||
version = "0.18.1"
|
||||
version = "0.19.0"
|
||||
description = "A task runner that works well with poetry."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "poethepoet-0.18.1-py3-none-any.whl", hash = "sha256:e85727bf6f4a10bf6c1a43026bdeb40df689bea3c4682d03cbe531cabc8f2ba6"},
|
||||
{file = "poethepoet-0.18.1.tar.gz", hash = "sha256:5f3566b14c2f5dccdfbc3bb26f0096006b38dc0b9c74bd4f8dd1eba7b0e29f6a"},
|
||||
{file = "poethepoet-0.19.0-py3-none-any.whl", hash = "sha256:87038be589077e4b407050a9da644d9cd9e4076ccfc8abc7f855cf6870d5c6c2"},
|
||||
{file = "poethepoet-0.19.0.tar.gz", hash = "sha256:897eb85ec15876d79befc7d19d4c80ce7c8b214d1bb0dcfec640abd81616bfed"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -679,14 +679,14 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "pre-commit"
|
||||
version = "3.2.0"
|
||||
version = "3.2.1"
|
||||
description = "A framework for managing and maintaining multi-language pre-commit hooks."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "pre_commit-3.2.0-py2.py3-none-any.whl", hash = "sha256:f712d3688102e13c8e66b7d7dbd8934a6dda157e58635d89f7d6fecdca39ce8a"},
|
||||
{file = "pre_commit-3.2.0.tar.gz", hash = "sha256:818f0d998059934d0f81bb3667e3ccdc32da6ed7ccaac33e43dc231561ddaaa9"},
|
||||
{file = "pre_commit-3.2.1-py2.py3-none-any.whl", hash = "sha256:a06a7fcce7f420047a71213c175714216498b49ebc81fe106f7716ca265f5bb6"},
|
||||
{file = "pre_commit-3.2.1.tar.gz", hash = "sha256:b5aee7d75dbba21ee161ba641b01e7ae10c5b91967ebf7b2ab0dfae12d07e1f1"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -905,112 +905,84 @@ docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphin
|
||||
|
||||
[[package]]
|
||||
name = "regex"
|
||||
version = "2022.10.31"
|
||||
version = "2023.3.23"
|
||||
description = "Alternative regular expression module, to replace re."
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.8"
|
||||
files = [
|
||||
{file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"},
|
||||
{file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"},
|
||||
{file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"},
|
||||
{file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"},
|
||||
{file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"},
|
||||
{file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"},
|
||||
{file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"},
|
||||
{file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"},
|
||||
{file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"},
|
||||
{file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"},
|
||||
{file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"},
|
||||
{file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"},
|
||||
{file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "13.3.2"
|
||||
version = "13.3.3"
|
||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7.0"
|
||||
files = [
|
||||
{file = "rich-13.3.2-py3-none-any.whl", hash = "sha256:a104f37270bf677148d8acb07d33be1569eeee87e2d1beb286a4e9113caf6f2f"},
|
||||
{file = "rich-13.3.2.tar.gz", hash = "sha256:91954fe80cfb7985727a467ca98a7618e5dd15178cc2da10f553b36a93859001"},
|
||||
{file = "rich-13.3.3-py3-none-any.whl", hash = "sha256:540c7d6d26a1178e8e8b37e9ba44573a3cd1464ff6348b99ee7061b95d1c6333"},
|
||||
{file = "rich-13.3.3.tar.gz", hash = "sha256:dc84400a9d842b3a9c5ff74addd8eb798d155f36c1c91303888e0a66850d2a15"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1087,41 +1059,41 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.0.257"
|
||||
version = "0.0.259"
|
||||
description = "An extremely fast Python linter, written in Rust."
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7280640690c1d0046b20e0eb924319a89d8e22925d7d232180ce31196e7478f8"},
|
||||
{file = "ruff-0.0.257-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4582b73da61ab410ffda35b2987a6eacb33f18263e1c91810f0b9779ec4f41a9"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5acae9878f1136893e266348acdb9d30dfae23c296d3012043816432a5abdd51"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9f0912d045eee15e8e02e335c16d7a7f9fb6821aa5eb1628eeb5bbfa3d88908"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9542c34ee5298b31be6c6ba304f14b672dcf104846ee65adb2466d3e325870"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3464f1ad4cea6c4b9325da13ae306bd22bf15d226e18d19c52db191b1f4355ac"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a54bfd559e558ee0df2a2f3756423fe6a9de7307bc290d807c3cdf351cb4c24"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3438fd38446e1a0915316f4085405c9feca20fe00a4b614995ab7034dbfaa7ff"},
|
||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358cc2b547bd6451dcf2427b22a9c29a2d9c34e66576c693a6381c5f2ed3011d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:783390f1e94a168c79d7004426dae3e4ae2999cc85f7d00fdd86c62262b71854"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:aaa3b5b6929c63a854b6bcea7a229453b455ab26337100b2905fae4523ca5667"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4ecd7a84db4816df2dcd0f11c5365a9a2cf4fa70a19b3ac161b7b0bfa592959d"},
|
||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3db8d77d5651a2c0d307102d717627a025d4488d406f54c2764b21cfbe11d822"},
|
||||
{file = "ruff-0.0.257-py3-none-win32.whl", hash = "sha256:d2c8755fa4f6c5e5ec032ad341ca3beeecd16786e12c3f26e6b0cc40418ae998"},
|
||||
{file = "ruff-0.0.257-py3-none-win_amd64.whl", hash = "sha256:3cec07d6fecb1ebbc45ea8eeb1047b929caa2f7dfb8dd4b0e1869ff789326da5"},
|
||||
{file = "ruff-0.0.257-py3-none-win_arm64.whl", hash = "sha256:352f1bdb9b433b3b389aee512ffb0b82226ae1e25b3d92e4eaf0e7be6b1b6f6a"},
|
||||
{file = "ruff-0.0.257.tar.gz", hash = "sha256:fedfd06a37ddc17449203c3e38fc83fb68de7f20b5daa0ee4e60d3599b38bab0"},
|
||||
{file = "ruff-0.0.259-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"},
|
||||
{file = "ruff-0.0.259-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b"},
|
||||
{file = "ruff-0.0.259-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d"},
|
||||
{file = "ruff-0.0.259-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9"},
|
||||
{file = "ruff-0.0.259-py3-none-win32.whl", hash = "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456"},
|
||||
{file = "ruff-0.0.259-py3-none-win_amd64.whl", hash = "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8"},
|
||||
{file = "ruff-0.0.259-py3-none-win_arm64.whl", hash = "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086"},
|
||||
{file = "ruff-0.0.259.tar.gz", hash = "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "setuptools"
|
||||
version = "67.6.0"
|
||||
version = "67.6.1"
|
||||
description = "Easily download, build, install, upgrade, and uninstall Python packages"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "setuptools-67.6.0-py3-none-any.whl", hash = "sha256:b78aaa36f6b90a074c1fa651168723acbf45d14cb1196b6f02c0fd07f17623b2"},
|
||||
{file = "setuptools-67.6.0.tar.gz", hash = "sha256:2ee892cd5f29f3373097f5a814697e397cf3ce313616df0af11231e2ad118077"},
|
||||
{file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"},
|
||||
{file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"},
|
||||
]
|
||||
|
||||
[package.extras]
|
||||
@@ -1209,26 +1181,26 @@ files = [
|
||||
|
||||
[[package]]
|
||||
name = "tomlkit"
|
||||
version = "0.11.6"
|
||||
version = "0.11.7"
|
||||
description = "Style preserving TOML library"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "tomlkit-0.11.6-py3-none-any.whl", hash = "sha256:07de26b0d8cfc18f871aec595fda24d95b08fef89d147caa861939f37230bf4b"},
|
||||
{file = "tomlkit-0.11.6.tar.gz", hash = "sha256:71b952e5721688937fb02cf9d354dbcf0785066149d2855e44531ebdd2b65d73"},
|
||||
{file = "tomlkit-0.11.7-py3-none-any.whl", hash = "sha256:5325463a7da2ef0c6bbfefb62a3dc883aebe679984709aee32a317907d0a8d3c"},
|
||||
{file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typeguard"
|
||||
version = "3.0.1"
|
||||
version = "3.0.2"
|
||||
description = "Run-time type checker for Python"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = ">=3.7.4"
|
||||
files = [
|
||||
{file = "typeguard-3.0.1-py3-none-any.whl", hash = "sha256:15628045c830abf68533247afd2cb04683b5ce6f4e30d5401a5ef6f5182280de"},
|
||||
{file = "typeguard-3.0.1.tar.gz", hash = "sha256:beb0e67c5dc76eea4a6d00a6606d444d899589908362960769d0c4a1d32bca70"},
|
||||
{file = "typeguard-3.0.2-py3-none-any.whl", hash = "sha256:bbe993854385284ab42fd5bd3bee6f6556577ce8b50696d6cb956d704f286c8e"},
|
||||
{file = "typeguard-3.0.2.tar.gz", hash = "sha256:fee5297fdb28f8e9efcb8142b5ee219e02375509cd77ea9d270b5af826358d5a"},
|
||||
]
|
||||
|
||||
[package.dependencies]
|
||||
@@ -1261,14 +1233,14 @@ test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.
|
||||
|
||||
[[package]]
|
||||
name = "types-python-dateutil"
|
||||
version = "2.8.19.10"
|
||||
version = "2.8.19.11"
|
||||
description = "Typing stubs for python-dateutil"
|
||||
category = "dev"
|
||||
optional = false
|
||||
python-versions = "*"
|
||||
files = [
|
||||
{file = "types-python-dateutil-2.8.19.10.tar.gz", hash = "sha256:c640f2eb71b4b94a9d3bfda4c04250d29a24e51b8bad6e12fddec0cf6e96f7a3"},
|
||||
{file = "types_python_dateutil-2.8.19.10-py3-none-any.whl", hash = "sha256:fbecd02c19cac383bf4a16248d45ffcff17c93a04c0794be5f95d42c6aa5de39"},
|
||||
{file = "types-python-dateutil-2.8.19.11.tar.gz", hash = "sha256:de66222c54318c2e05ceb4956976d16696240a45fc2c98e54bfe9a56ce5e1eff"},
|
||||
{file = "types_python_dateutil-2.8.19.11-py3-none-any.whl", hash = "sha256:357553f8056cfbb8ce8ea0ca4a6a3480268596748360df73a94c2b8c113a5b06"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -1349,4 +1321,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "8fa62f96cc77eac773497573dcbdd5666173cbec56374fea73a814f3fb7f5338"
|
||||
content-hash = "4eb179bbb559eb12dd73f673a8e7f80706386c88f5d777d5de4ca9b7612ace60"
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
name = "obsidian-metadata"
|
||||
readme = "README.md"
|
||||
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||
version = "0.10.0"
|
||||
version = "0.11.1"
|
||||
|
||||
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||
@@ -20,11 +20,11 @@
|
||||
loguru = "^0.6.0"
|
||||
python = "^3.10"
|
||||
questionary = "^1.10.0"
|
||||
regex = "^2022.10.31"
|
||||
rich = "^13.3.2"
|
||||
regex = "^2023.3.23"
|
||||
rich = "^13.3.3"
|
||||
ruamel-yaml = "^0.17.21"
|
||||
shellingham = "^1.5.0.post1"
|
||||
tomlkit = "^0.11.6"
|
||||
tomlkit = "^0.11.7"
|
||||
typer = "^0.7.0"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
@@ -35,18 +35,18 @@
|
||||
pytest-xdist = "^3.2.1"
|
||||
|
||||
[tool.poetry.group.dev.dependencies]
|
||||
black = "^23.1.0"
|
||||
black = "^23.3.0"
|
||||
commitizen = "^2.42.1"
|
||||
coverage = "^7.2.2"
|
||||
interrogate = "^1.5.0"
|
||||
mypy = "^1.1.1"
|
||||
pdoc = "^13.0.1"
|
||||
poethepoet = "^0.18.1"
|
||||
pre-commit = "^3.2.0"
|
||||
ruff = "^0.0.257"
|
||||
poethepoet = "^0.19.0"
|
||||
pre-commit = "^3.2.1"
|
||||
ruff = "^0.0.259"
|
||||
sh = "2.0.3"
|
||||
typeguard = "^3.0.1"
|
||||
types-python-dateutil = "^2.8.19.10"
|
||||
typeguard = "^3.0.2"
|
||||
types-python-dateutil = "^2.8.19.11"
|
||||
vulture = "^2.7"
|
||||
|
||||
[tool.black]
|
||||
@@ -57,7 +57,7 @@
|
||||
changelog_incremental = true
|
||||
tag_format = "v$version"
|
||||
update_changelog_on_bump = true
|
||||
version = "0.10.0"
|
||||
version = "0.11.1"
|
||||
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
||||
|
||||
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
"""obsidian-metadata version."""
|
||||
__version__ = "0.10.0"
|
||||
__version__ = "0.11.1"
|
||||
|
||||
@@ -10,6 +10,7 @@ from obsidian_metadata._utils.utilities import (
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
docstring_parameter,
|
||||
inline_metadata_from_string,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
@@ -27,6 +28,7 @@ __all__ = [
|
||||
"dict_values_to_lists_strings",
|
||||
"docstring_parameter",
|
||||
"LoggerManager",
|
||||
"inline_metadata_from_string",
|
||||
"merge_dictionaries",
|
||||
"rename_in_dict",
|
||||
"remove_markdown_sections",
|
||||
|
||||
@@ -178,8 +178,7 @@ class LoggerManager:
|
||||
self.log_level = log_level
|
||||
|
||||
if self.log_file == Path("/logs") and self.log_to_file: # pragma: no cover
|
||||
console.print("No log file specified")
|
||||
raise typer.Exit(1)
|
||||
raise typer.BadParameter("No log file specified")
|
||||
|
||||
if self.verbosity >= VerboseLevel.TRACE.value:
|
||||
logger.remove()
|
||||
|
||||
@@ -21,24 +21,26 @@ def clean_dictionary(dictionary: dict[str, Any]) -> dict[str, Any]:
|
||||
Returns:
|
||||
dict: Cleaned dictionary
|
||||
"""
|
||||
new_dict = {key.strip(): value for key, value in dictionary.items()}
|
||||
new_dict = {key.strip("*[]#"): value for key, value in new_dict.items()}
|
||||
new_dict = copy.deepcopy(dictionary)
|
||||
new_dict = {key.strip("*[]# "): value for key, value in new_dict.items()}
|
||||
for key, value in new_dict.items():
|
||||
new_dict[key] = [s.strip("*[]#") for s in value if isinstance(value, list)]
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = [s.strip("*[]# ") for s in value if isinstance(value, list)]
|
||||
elif isinstance(value, str):
|
||||
new_dict[key] = value.strip("*[]# ")
|
||||
|
||||
return new_dict
|
||||
|
||||
|
||||
def clear_screen() -> None: # pragma: no cover
|
||||
"""Clear the screen."""
|
||||
# for windows
|
||||
_ = system("cls") if name == "nt" else system("clear")
|
||||
|
||||
|
||||
def dict_contains(
|
||||
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||
) -> bool:
|
||||
"""Check if a dictionary contains a key or if a specified key contains a value.
|
||||
"""Check if a dictionary contains a key or if a key contains a value.
|
||||
|
||||
Args:
|
||||
dictionary (dict): Dictionary to check
|
||||
@@ -47,7 +49,7 @@ def dict_contains(
|
||||
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||
|
||||
Returns:
|
||||
bool: Whether the dictionary contains the key
|
||||
bool: Whether the dictionary contains the key or value
|
||||
"""
|
||||
if value is None:
|
||||
if is_regex:
|
||||
@@ -55,13 +57,11 @@ def dict_contains(
|
||||
return key in dictionary
|
||||
|
||||
if is_regex:
|
||||
found_keys = []
|
||||
for _key in dictionary:
|
||||
if re.search(key, str(_key)):
|
||||
found_keys.append(
|
||||
any(re.search(value, _v) for _v in dictionary[_key]),
|
||||
)
|
||||
return any(found_keys)
|
||||
if re.search(key, str(_key)) and any(re.search(value, _v) for _v in dictionary[_key]):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
return key in dictionary and value in dictionary[key]
|
||||
|
||||
@@ -93,6 +93,7 @@ def dict_values_to_lists_strings(
|
||||
|
||||
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
||||
"""
|
||||
dictionary = copy.deepcopy(dictionary)
|
||||
new_dict = {}
|
||||
|
||||
if strip_null_values:
|
||||
@@ -100,7 +101,7 @@ def dict_values_to_lists_strings(
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
||||
elif isinstance(value, dict):
|
||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||
new_dict[key] = dict_values_to_lists_strings(value, strip_null_values=True) # type: ignore[assignment]
|
||||
elif value is None or value == "None" or not value:
|
||||
new_dict[key] = []
|
||||
else:
|
||||
@@ -110,11 +111,11 @@ def dict_values_to_lists_strings(
|
||||
|
||||
for key, value in dictionary.items():
|
||||
if isinstance(value, list):
|
||||
new_dict[key] = sorted([str(item) for item in value])
|
||||
new_dict[key] = sorted([str(item) if item is not None else "" for item in value])
|
||||
elif isinstance(value, dict):
|
||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||
else:
|
||||
new_dict[key] = [str(value)]
|
||||
new_dict[key] = [str(value) if value is not None else ""]
|
||||
|
||||
return new_dict
|
||||
|
||||
@@ -182,6 +183,21 @@ def docstring_parameter(*sub: Any) -> Any:
|
||||
return dec
|
||||
|
||||
|
||||
def inline_metadata_from_string(string: str) -> list[tuple[Any, ...]]:
|
||||
"""Search for inline metadata in a string and return a list tuples containing (key, value).
|
||||
|
||||
Args:
|
||||
string (str): String to get metadata from
|
||||
|
||||
Returns:
|
||||
tuple[str]: (key, value)
|
||||
"""
|
||||
from obsidian_metadata.models import Patterns
|
||||
|
||||
results = Patterns().find_inline_metadata.findall(string)
|
||||
return [tuple(filter(None, x)) for x in results]
|
||||
|
||||
|
||||
def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
"""Merge two dictionaries. When the values are lists, they are merged and sorted.
|
||||
|
||||
@@ -192,22 +208,24 @@ def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||
Returns:
|
||||
dict: Merged dictionary.
|
||||
"""
|
||||
for k, v in dict2.items():
|
||||
if k in dict1:
|
||||
if isinstance(v, list):
|
||||
dict1[k].extend(v)
|
||||
d1 = copy.deepcopy(dict1)
|
||||
d2 = copy.deepcopy(dict2)
|
||||
|
||||
for _key in d1:
|
||||
if not isinstance(d1[_key], list):
|
||||
raise TypeError(f"Key {_key} is not a list.")
|
||||
for _key in d2:
|
||||
if not isinstance(d2[_key], list):
|
||||
raise TypeError(f"Key {_key} is not a list.")
|
||||
|
||||
for k, v in d2.items():
|
||||
if k in d1:
|
||||
d1[k].extend(v)
|
||||
d1[k] = sorted(set(d1[k]))
|
||||
else:
|
||||
dict1[k] = v
|
||||
d1[k] = sorted(set(v))
|
||||
|
||||
for k, v in dict1.items():
|
||||
if isinstance(v, list):
|
||||
dict1[k] = sorted(set(v))
|
||||
elif isinstance(v, dict): # pragma: no cover
|
||||
for kk, vv in v.items():
|
||||
if isinstance(vv, list):
|
||||
v[kk] = sorted(set(vv))
|
||||
|
||||
return dict(sorted(dict1.items()))
|
||||
return dict(sorted(d1.items()))
|
||||
|
||||
|
||||
def rename_in_dict(
|
||||
@@ -241,7 +259,7 @@ def remove_markdown_sections(
|
||||
strip_inlinecode: bool = False,
|
||||
strip_frontmatter: bool = False,
|
||||
) -> str:
|
||||
"""Strip markdown sections from text.
|
||||
"""Strip unwanted markdown sections from text. This is used to remove code blocks and frontmatter from the body of notes before tags and inline metadata are processed.
|
||||
|
||||
Args:
|
||||
text (str): Text to remove code blocks from
|
||||
@@ -256,7 +274,7 @@ def remove_markdown_sections(
|
||||
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
||||
|
||||
if strip_inlinecode:
|
||||
text = re.sub(r"`.*?`", "", text)
|
||||
text = re.sub(r"(?<!`{2})`[^`]+?`", "", text)
|
||||
|
||||
if strip_frontmatter:
|
||||
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
||||
@@ -319,4 +337,4 @@ def version_callback(value: bool) -> None:
|
||||
"""Print version and exit."""
|
||||
if value:
|
||||
console.print(f"{__package__.split('.')[0]}: v{__version__}")
|
||||
raise typer.Exit()
|
||||
raise typer.Exit(0)
|
||||
|
||||
@@ -53,6 +53,13 @@ def main(
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
import_csv: Path = typer.Option(
|
||||
None,
|
||||
help="Import a CSV file with bulk updates to metadata.",
|
||||
show_default=False,
|
||||
dir_okay=False,
|
||||
file_okay=True,
|
||||
),
|
||||
vault_path: Path = typer.Option(
|
||||
None,
|
||||
help="Path to Obsidian vault",
|
||||
@@ -125,7 +132,7 @@ def main(
|
||||
config: Config = Config(config_path=config_file, vault_path=vault_path)
|
||||
if len(config.vaults) == 0:
|
||||
typer.echo("No vaults configured. Exiting.")
|
||||
raise typer.Exit(1)
|
||||
raise typer.BadParameter("No vaults configured. Exiting.")
|
||||
|
||||
if len(config.vaults) == 1:
|
||||
application = Application(dry_run=dry_run, config=config.vaults[0])
|
||||
@@ -153,6 +160,10 @@ def main(
|
||||
path = Path(export_template).expanduser().resolve()
|
||||
application.noninteractive_export_template(path)
|
||||
raise typer.Exit(code=0)
|
||||
if import_csv is not None:
|
||||
path = Path(import_csv).expanduser().resolve()
|
||||
application.noninteractive_bulk_import(path)
|
||||
raise typer.Exit(code=0)
|
||||
|
||||
application.application_main()
|
||||
|
||||
|
||||
@@ -548,6 +548,41 @@ class Application:
|
||||
|
||||
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
||||
|
||||
def noninteractive_bulk_import(self, path: Path) -> None:
|
||||
"""Bulk update metadata from a CSV from the command line.
|
||||
|
||||
Args:
|
||||
path: Path to the CSV file containing the metadata to update.
|
||||
"""
|
||||
self._load_vault()
|
||||
note_paths = [
|
||||
str(n.note_path.relative_to(self.vault.vault_path)) for n in self.vault.all_notes
|
||||
]
|
||||
dict_from_csv = validate_csv_bulk_imports(path, note_paths)
|
||||
num_changed = self.vault.update_from_dict(dict_from_csv)
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"{num_changed} notes specified in '{path}'")
|
||||
alerts.info("Review changes and commit.")
|
||||
while True:
|
||||
self.vault.info()
|
||||
|
||||
match self.questions.ask_application_main():
|
||||
case "vault_actions":
|
||||
self.application_vault()
|
||||
case "inspect_metadata":
|
||||
self.application_inspect_metadata()
|
||||
case "review_changes":
|
||||
self.review_changes()
|
||||
case "commit_changes":
|
||||
self.commit_changes()
|
||||
case _:
|
||||
break
|
||||
|
||||
console.print("Done!")
|
||||
|
||||
def noninteractive_export_csv(self, path: Path) -> None:
|
||||
"""Export the vault metadata to CSV."""
|
||||
self._load_vault()
|
||||
|
||||
17
src/obsidian_metadata/models/exceptions.py
Normal file
17
src/obsidian_metadata/models/exceptions.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Custom exceptions for the obsidian_metadata package."""
|
||||
|
||||
|
||||
class ObsidianMetadataError(Exception):
|
||||
"""Base exception for the obsidian_metadata package."""
|
||||
|
||||
|
||||
class FrontmatterError(ObsidianMetadataError):
|
||||
"""Exception for errors in the frontmatter."""
|
||||
|
||||
|
||||
class InlineMetadataError(ObsidianMetadataError):
|
||||
"""Exception for errors in the inlined metadata."""
|
||||
|
||||
|
||||
class InlineTagError(ObsidianMetadataError):
|
||||
"""Exception for errors in the inline tags."""
|
||||
@@ -13,13 +13,20 @@ from obsidian_metadata._utils import (
|
||||
delete_from_dict,
|
||||
dict_contains,
|
||||
dict_values_to_lists_strings,
|
||||
inline_metadata_from_string,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
)
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import Patterns # isort: ignore
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from obsidian_metadata.models.exceptions import (
|
||||
FrontmatterError,
|
||||
InlineMetadataError,
|
||||
InlineTagError,
|
||||
)
|
||||
|
||||
PATTERNS = Patterns()
|
||||
INLINE_TAG_KEY: str = "inline_tag"
|
||||
@@ -230,7 +237,7 @@ class Frontmatter:
|
||||
try:
|
||||
frontmatter: dict = yaml.load(frontmatter_block)
|
||||
except Exception as e: # noqa: BLE001
|
||||
raise AttributeError(e) from e
|
||||
raise FrontmatterError(e) from e
|
||||
|
||||
if frontmatter is None or frontmatter == [None]:
|
||||
return {}
|
||||
@@ -400,15 +407,26 @@ class InlineMetadata:
|
||||
strip_inlinecode=True,
|
||||
strip_frontmatter=True,
|
||||
)
|
||||
all_results = PATTERNS.find_inline_metadata.findall(content)
|
||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||
|
||||
found_inline_metadata = inline_metadata_from_string(content)
|
||||
inline_metadata: dict[str, list[str]] = {}
|
||||
for k, v in stripped_null_values:
|
||||
if k in inline_metadata:
|
||||
inline_metadata[k].append(str(v))
|
||||
else:
|
||||
inline_metadata[k] = [str(v)]
|
||||
|
||||
try:
|
||||
for k, v in found_inline_metadata:
|
||||
if not k:
|
||||
log.trace(f"Skipping empty key associated with value: {v}")
|
||||
continue
|
||||
if k in inline_metadata:
|
||||
inline_metadata[k].append(str(v))
|
||||
else:
|
||||
inline_metadata[k] = [str(v)]
|
||||
except ValueError as e:
|
||||
raise InlineMetadataError(
|
||||
f"Error parsing inline metadata: {found_inline_metadata}"
|
||||
) from e
|
||||
except AttributeError as e:
|
||||
raise InlineMetadataError(
|
||||
f"Error parsing inline metadata: {found_inline_metadata}"
|
||||
) from e
|
||||
|
||||
return clean_dictionary(inline_metadata)
|
||||
|
||||
@@ -537,15 +555,22 @@ class InlineTags:
|
||||
Returns:
|
||||
list[str]: Inline tags from the note.
|
||||
"""
|
||||
return sorted(
|
||||
PATTERNS.find_inline_tags.findall(
|
||||
remove_markdown_sections(
|
||||
file_content,
|
||||
strip_codeblocks=True,
|
||||
strip_inlinecode=True,
|
||||
try:
|
||||
return sorted(
|
||||
PATTERNS.find_inline_tags.findall(
|
||||
remove_markdown_sections(
|
||||
file_content,
|
||||
strip_codeblocks=True,
|
||||
strip_inlinecode=True,
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
except AttributeError as e:
|
||||
raise InlineTagError("Error parsing inline tags.") from e
|
||||
except TypeError as e:
|
||||
raise InlineTagError("Error parsing inline tags.") from e
|
||||
except ValueError as e:
|
||||
raise InlineTagError("Error parsing inline tags.") from e
|
||||
|
||||
def add(self, new_tag: str | list[str]) -> bool:
|
||||
"""Add a new inline tag.
|
||||
|
||||
@@ -10,7 +10,7 @@ import rich.repr
|
||||
import typer
|
||||
from rich.table import Table
|
||||
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils import alerts, inline_metadata_from_string
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
from obsidian_metadata.models import (
|
||||
@@ -21,6 +21,11 @@ from obsidian_metadata.models import (
|
||||
MetadataType,
|
||||
Patterns,
|
||||
)
|
||||
from obsidian_metadata.models.exceptions import (
|
||||
FrontmatterError,
|
||||
InlineMetadataError,
|
||||
InlineTagError,
|
||||
)
|
||||
|
||||
PATTERNS = Patterns()
|
||||
|
||||
@@ -50,19 +55,24 @@ class Note:
|
||||
try:
|
||||
with self.note_path.open():
|
||||
self.file_content: str = self.note_path.read_text()
|
||||
self.original_file_content: str = self.file_content
|
||||
except FileNotFoundError as e:
|
||||
alerts.error(f"Note {self.note_path} not found. Exiting")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
try:
|
||||
self.frontmatter: Frontmatter = Frontmatter(self.file_content)
|
||||
except AttributeError as e:
|
||||
alerts.error(f"Note {self.note_path} has invalid frontmatter.\n{e}")
|
||||
self.inline_metadata: InlineMetadata = InlineMetadata(self.file_content)
|
||||
self.tags: InlineTags = InlineTags(self.file_content)
|
||||
except FrontmatterError as e:
|
||||
alerts.error(f"Invalid frontmatter: {self.note_path}\n{e}")
|
||||
raise typer.Exit(code=1) from e
|
||||
except InlineMetadataError as e:
|
||||
alerts.error(f"Error parsing inline metadata: {self.note_path}.\n{e}")
|
||||
raise typer.Exit(code=1) from e
|
||||
except InlineTagError as e:
|
||||
alerts.error(f"Error parsing inline tags: {self.note_path}\n{e}")
|
||||
raise typer.Exit(code=1) from e
|
||||
|
||||
self.tags: InlineTags = InlineTags(self.file_content)
|
||||
self.inline_metadata: InlineMetadata = InlineMetadata(self.file_content)
|
||||
self.original_file_content: str = self.file_content
|
||||
|
||||
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||
"""Define rich representation of Vault."""
|
||||
@@ -552,10 +562,9 @@ class Note:
|
||||
value_2 (str, optional): New value.
|
||||
|
||||
"""
|
||||
all_results = PATTERNS.find_inline_metadata.findall(self.file_content)
|
||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||
found_inline_metadata = inline_metadata_from_string(self.file_content)
|
||||
|
||||
for _k, _v in stripped_null_values:
|
||||
for _k, _v in found_inline_metadata:
|
||||
if re.search(key, _k):
|
||||
if value_2 is None:
|
||||
if re.search(rf"{key}[^\\w\\d_-]+", _k):
|
||||
|
||||
@@ -297,7 +297,7 @@ class Questions:
|
||||
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
||||
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Bulk changes from imported CSV", "value": "import_from_csv"},
|
||||
{"name": "Import bulk changes from CSV", "value": "import_from_csv"},
|
||||
{"name": "Add Metadata", "value": "add_metadata"},
|
||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.exceptions import FrontmatterError
|
||||
from obsidian_metadata.models.metadata import Frontmatter
|
||||
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
@@ -84,7 +85,7 @@ tags: tag
|
||||
invalid = = "content"
|
||||
---
|
||||
"""
|
||||
with pytest.raises(AttributeError):
|
||||
with pytest.raises(FrontmatterError):
|
||||
Frontmatter(fn)
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# type: ignore
|
||||
"""Test inline metadata from metadata.py."""
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.exceptions import InlineMetadataError
|
||||
from obsidian_metadata.models.metadata import InlineMetadata
|
||||
|
||||
FRONTMATTER_CONTENT: str = """
|
||||
@@ -77,6 +79,21 @@ def test__grab_inline_metadata_2():
|
||||
}
|
||||
|
||||
|
||||
def test__grab_inline_metadata_3(mocker):
|
||||
"""Test grab inline metadata.
|
||||
|
||||
GIVEN content that has inline metadata
|
||||
WHEN an error occurs parsing the inline metadata
|
||||
THEN raise an InlineMetadataError and pass the error message
|
||||
"""
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.metadata.inline_metadata_from_string",
|
||||
return_value=[("key")],
|
||||
)
|
||||
with pytest.raises(InlineMetadataError, match=r"Error parsing inline metadata: \['key'\]"):
|
||||
InlineMetadata("")
|
||||
|
||||
|
||||
def test_add_1():
|
||||
"""Test InlineMetadata add() method.
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@ import pytest
|
||||
import typer
|
||||
|
||||
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||
from obsidian_metadata.models.exceptions import InlineMetadataError, InlineTagError
|
||||
from obsidian_metadata.models.notes import Note
|
||||
from tests.helpers import Regex
|
||||
|
||||
@@ -88,6 +89,38 @@ def test_create_note_2() -> None:
|
||||
Note(note_path=broken_fm)
|
||||
|
||||
|
||||
def test_create_note_3(sample_note, mocker) -> None:
|
||||
"""Test creating a note object.
|
||||
|
||||
GIVEN a text file with invalid inline metadata
|
||||
WHEN the note is initialized
|
||||
THEN a typer exit is raised
|
||||
"""
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.notes.InlineMetadata",
|
||||
side_effect=InlineMetadataError("error message"),
|
||||
)
|
||||
|
||||
with pytest.raises(typer.Exit):
|
||||
Note(note_path=sample_note)
|
||||
|
||||
|
||||
def test_create_note_4(sample_note, mocker) -> None:
|
||||
"""Test creating a note object.
|
||||
|
||||
GIVEN a text file
|
||||
WHEN there is an error parsing the inline tags
|
||||
THEN a typer exit is raised
|
||||
"""
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.notes.InlineTags",
|
||||
side_effect=InlineTagError("error message"),
|
||||
)
|
||||
|
||||
with pytest.raises(typer.Exit):
|
||||
Note(note_path=sample_note)
|
||||
|
||||
|
||||
def test_add_metadata_method_1(short_notes):
|
||||
"""Test adding metadata.
|
||||
|
||||
|
||||
@@ -101,7 +101,7 @@ shared_key1: 'shared_key1_value'
|
||||
"""
|
||||
no_fm_result = '### Header\'s number 3 [📅] "+$2.00" 🤷'
|
||||
|
||||
assert pattern.top_with_header.search(no_fm_or_header).group("top") == ""
|
||||
assert not pattern.top_with_header.search(no_fm_or_header).group("top")
|
||||
assert pattern.top_with_header.search(fm_and_header).group("top") == fm_and_header_result
|
||||
assert pattern.top_with_header.search(no_fm).group("top") == no_fm_result
|
||||
|
||||
|
||||
@@ -10,11 +10,78 @@ from obsidian_metadata._utils import (
|
||||
dict_contains,
|
||||
dict_keys_to_lower,
|
||||
dict_values_to_lists_strings,
|
||||
inline_metadata_from_string,
|
||||
merge_dictionaries,
|
||||
remove_markdown_sections,
|
||||
rename_in_dict,
|
||||
validate_csv_bulk_imports,
|
||||
)
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_clean_dictionary_1():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN the dictionary is empty
|
||||
THEN return an empty dictionary
|
||||
"""
|
||||
assert clean_dictionary({}) == {}
|
||||
|
||||
|
||||
def test_clean_dictionary_2():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys contain leading/trailing spaces
|
||||
THEN remove the spaces from the keys
|
||||
"""
|
||||
assert clean_dictionary({" key 1 ": "value 1"}) == {"key 1": "value 1"}
|
||||
|
||||
|
||||
def test_clean_dictionary_3():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN values contain leading/trailing spaces
|
||||
THEN remove the spaces from the values
|
||||
"""
|
||||
assert clean_dictionary({"key 1": " value 1 "}) == {"key 1": "value 1"}
|
||||
|
||||
|
||||
def test_clean_dictionary_4():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing asterisks
|
||||
THEN remove the asterisks from the keys or values
|
||||
"""
|
||||
assert clean_dictionary({"**key_1**": ["**value 1**", "value 2"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_clean_dictionary_5():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing brackets
|
||||
THEN remove the brackets from the keys and values
|
||||
"""
|
||||
assert clean_dictionary({"[[key_1]]": ["[[value 1]]", "[value 2]"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_clean_dictionary_6():
|
||||
"""Test clean_dictionary() function.
|
||||
|
||||
GIVEN a dictionary passed to clean_dictionary()
|
||||
WHEN keys or values contain leading/trailing hashtags
|
||||
THEN remove the hashtags from the keys and values
|
||||
"""
|
||||
assert clean_dictionary({"#key_1": ["#value 1", "value 2#"]}) == {
|
||||
"key_1": ["value 1", "value 2"]
|
||||
}
|
||||
|
||||
|
||||
def test_delete_from_dict_1():
|
||||
@@ -174,19 +241,94 @@ def test_delete_from_dict_11():
|
||||
) == {"key1": ["value1"], "key2": ["value2"]}
|
||||
|
||||
|
||||
def test_dict_contains() -> None:
|
||||
"""Test dict_contains."""
|
||||
d = {"key1": ["value1", "value2"], "key2": ["value3", "value4"], "key3": ["value5", "value6"]}
|
||||
def test_dict_contains_1():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
assert dict_contains(d, "key1") is True
|
||||
assert dict_contains(d, "key5") is False
|
||||
assert dict_contains(d, "key1", "value1") is True
|
||||
assert dict_contains(d, "key1", "value5") is False
|
||||
assert dict_contains(d, "key[1-2]", is_regex=True) is True
|
||||
assert dict_contains(d, "^1", is_regex=True) is False
|
||||
assert dict_contains(d, r"key\d", r"value\d", is_regex=True) is True
|
||||
assert dict_contains(d, "key1$", "^alue", is_regex=True) is False
|
||||
assert dict_contains(d, r"key\d", "value5", is_regex=True) is True
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN the dictionary is empty
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({}, "key1") is False
|
||||
|
||||
|
||||
def test_dict_contains_2():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key2") is False
|
||||
|
||||
|
||||
def test_dict_contains_3():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1") is True
|
||||
|
||||
|
||||
def test_dict_contains_4():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key and value are in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", "value1") is True
|
||||
|
||||
|
||||
def test_dict_contains_5():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN when the key and value are not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", "value2") is False
|
||||
|
||||
|
||||
def test_dict_contains_6():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for the key and the key is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, r"key\d", is_regex=True) is True
|
||||
|
||||
|
||||
def test_dict_contains_7():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for the key and the key is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, r"key\d\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_dict_contains_8():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for a value and the value is in the dictionary
|
||||
THEN the function should return True
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", r"\w+", is_regex=True) is True
|
||||
|
||||
|
||||
def test_dict_contains_9():
|
||||
"""Test dict_contains() function.
|
||||
|
||||
GIVEN calling dict_contains() with a dictionary
|
||||
WHEN a regex is used for a value and the value is not in the dictionary
|
||||
THEN the function should return False
|
||||
"""
|
||||
assert dict_contains({"key1": "value1"}, "key1", r"\d{2}", is_regex=True) is False
|
||||
|
||||
|
||||
def test_dict_keys_to_lower() -> None:
|
||||
@@ -200,45 +342,251 @@ def test_dict_keys_to_lower() -> None:
|
||||
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings():
|
||||
"""Test converting dictionary values to lists of strings."""
|
||||
dictionary = {
|
||||
"key1": "value1",
|
||||
"key2": ["value2", "value3", None],
|
||||
"key3": {"key4": "value4"},
|
||||
"key5": {"key6": {"key7": "value7"}},
|
||||
"key6": None,
|
||||
"key8": [1, 3, None, 4],
|
||||
"key9": [None, "", "None"],
|
||||
"key10": "None",
|
||||
"key11": "",
|
||||
}
|
||||
def test_dict_values_to_lists_strings_1():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
result = dict_values_to_lists_strings(dictionary)
|
||||
assert result == {
|
||||
"key1": ["value1"],
|
||||
"key10": ["None"],
|
||||
"key11": [""],
|
||||
"key2": ["None", "value2", "value3"],
|
||||
"key3": {"key4": ["value4"]},
|
||||
"key5": {"key6": {"key7": ["value7"]}},
|
||||
"key6": ["None"],
|
||||
"key8": ["1", "3", "4", "None"],
|
||||
"key9": ["", "None", "None"],
|
||||
}
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the dictionary is empty
|
||||
THEN the function should return an empty dictionary
|
||||
"""
|
||||
assert dict_values_to_lists_strings({}) == {}
|
||||
assert dict_values_to_lists_strings({}, strip_null_values=True) == {}
|
||||
|
||||
result = dict_values_to_lists_strings(dictionary, strip_null_values=True)
|
||||
assert result == {
|
||||
|
||||
def test_dict_values_to_lists_strings_2():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the dictionary values are already lists of strings
|
||||
THEN the function should return the dictionary
|
||||
"""
|
||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {
|
||||
"key1": ["value1"],
|
||||
"key10": [],
|
||||
"key11": [],
|
||||
"key2": ["value2", "value3"],
|
||||
"key3": {"key4": ["value4"]},
|
||||
"key5": {"key6": {"key7": ["value7"]}},
|
||||
"key6": [],
|
||||
"key8": ["1", "3", "4"],
|
||||
"key9": ["", "None"],
|
||||
}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2", "value3"],
|
||||
}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_3():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is None and strip_null_values is False
|
||||
THEN then convert None to an empty string
|
||||
"""
|
||||
test_dict = {"key1": None, "key2": ["value", None]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {"key1": [""], "key2": ["", "value"]}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_4():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is None and strip_null_values is True
|
||||
THEN remove null values
|
||||
"""
|
||||
test_dict = {"key1": None, "key2": ["value", None]}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": [],
|
||||
"key2": ["value"],
|
||||
}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_5():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is a string "None" and strip_null_values is True or False
|
||||
THEN ensure the value is not removed
|
||||
"""
|
||||
test_dict = {"key1": "None", "key2": [None, "None"]}
|
||||
assert dict_values_to_lists_strings(test_dict) == {"key1": ["None"], "key2": ["", "None"]}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": [],
|
||||
"key2": ["None"],
|
||||
}
|
||||
|
||||
|
||||
def test_dict_values_to_lists_strings_6():
|
||||
"""Test the dict_values_to_lists_strings() function.
|
||||
|
||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||
WHEN the a value is another dictionary
|
||||
THEN ensure the values in the inner dictionary are converted to lists of strings
|
||||
"""
|
||||
test_dict = {"key1": {"key2": "value2", "key3": ["value3", None]}}
|
||||
assert dict_values_to_lists_strings(test_dict) == {
|
||||
"key1": {"key2": ["value2"], "key3": ["", "value3"]}
|
||||
}
|
||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||
"key1": {"key2": ["value2"], "key3": ["value3"]}
|
||||
}
|
||||
|
||||
|
||||
def test_inline_metadata_from_string_1():
|
||||
"""Test inline_metadata_from_string() function.
|
||||
|
||||
GIVEN a string
|
||||
WHEN the string is empty
|
||||
THEN the function should return an empty list.
|
||||
"""
|
||||
assert inline_metadata_from_string("") == []
|
||||
|
||||
|
||||
def test_inline_metadata_from_string_2():
|
||||
"""Test inline_metadata_from_string() function.
|
||||
|
||||
GIVEN a string
|
||||
WHEN the string contains nothing matching the inline metadata regex
|
||||
THEN the function should return an empty list.
|
||||
"""
|
||||
assert inline_metadata_from_string("this is content that has no inline metadata") == []
|
||||
|
||||
|
||||
def test_inline_metadata_from_string_3():
|
||||
"""Test inline_metadata_from_string() function.
|
||||
|
||||
GIVEN a string
|
||||
WHEN the string contains inline metadata
|
||||
THEN the function should return the key value pair as a tuple within a list.
|
||||
"""
|
||||
assert inline_metadata_from_string("test::test") == [("test", "test")]
|
||||
|
||||
|
||||
def test_inline_metadata_from_string_4():
|
||||
"""Test inline_metadata_from_string() function.
|
||||
|
||||
GIVEN a string
|
||||
WHEN the string contains multiple matches of inline metadata
|
||||
THEN the function should return the key value pairs as a tuple within a list.
|
||||
"""
|
||||
content = """
|
||||
test::test
|
||||
paragraph [key::value] paragraph
|
||||
> test2::test2
|
||||
"""
|
||||
assert inline_metadata_from_string(content) == [
|
||||
("test", "test"),
|
||||
("key", "value"),
|
||||
("test2", "test2"),
|
||||
]
|
||||
|
||||
|
||||
def test_merge_dictionaries_1():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN a value in dict1 is not a list
|
||||
THEN raise a TypeError
|
||||
"""
|
||||
test_dict_1 = {"key1": "value1", "key2": "value2"}
|
||||
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
|
||||
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||
merge_dictionaries(test_dict_1, test_dict_2)
|
||||
|
||||
|
||||
def test_merge_dictionaries_2():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN a value in dict2 is not a list
|
||||
THEN raise a TypeError
|
||||
"""
|
||||
test_dict_1 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
test_dict_2 = {"key1": "value1", "key2": "value2"}
|
||||
|
||||
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||
merge_dictionaries(test_dict_1, test_dict_2)
|
||||
|
||||
|
||||
def test_merge_dictionaries_3():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values in both dictionaries are unique
|
||||
THEN return a dictionary with the keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["value1"],
|
||||
"key2": ["value2"],
|
||||
"key3": ["value3"],
|
||||
"key4": ["value4"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_4():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys in both dictionaries are not unique
|
||||
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||
test_dict_2 = {"key1": ["value3"], "key2": ["value4"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["value1", "value3"],
|
||||
"key2": ["value2", "value4"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_5():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values both dictionaries are not unique
|
||||
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["a", "b", "c"],
|
||||
"key2": ["a", "b", "c"],
|
||||
}
|
||||
|
||||
|
||||
def test_merge_dictionaries_6():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN one of the dictionaries is empty
|
||||
THEN return a dictionary the other dictionary
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
|
||||
test_dict_1 = {}
|
||||
test_dict_2 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
|
||||
|
||||
def test_merge_dictionaries_7():
|
||||
"""Test merge_dictionaries() function.
|
||||
|
||||
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||
WHEN keys and values both dictionaries are not unique
|
||||
THEN ensure the original dictionaries objects are not modified
|
||||
"""
|
||||
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||
"key1": ["a", "b", "c"],
|
||||
"key2": ["a", "b", "c"],
|
||||
}
|
||||
assert test_dict_1 == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||
assert test_dict_2 == {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||
|
||||
|
||||
def test_rename_in_dict_1():
|
||||
@@ -313,46 +661,197 @@ def test_rename_in_dict_5():
|
||||
}
|
||||
|
||||
|
||||
def test_remove_markdown_sections():
|
||||
"""Test removing markdown sections."""
|
||||
def test_remove_markdown_sections_1():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with the default arguments
|
||||
THEN return the string without removing any markdown sections
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
Lorem ipsum `dolor sit` amet.
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello World"
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
|
||||
assert remove_markdown_sections(text) == text
|
||||
|
||||
|
||||
def test_remove_markdown_sections_2():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_codeblocks set to True
|
||||
THEN return the string without the codeblocks
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_codeblocks=True)
|
||||
assert "inline_code" in result
|
||||
assert "```bash" not in result
|
||||
assert "```" not in result
|
||||
assert "foo" not in result
|
||||
assert "world" not in result
|
||||
assert "key: value" in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_3():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_inlinecode set to True
|
||||
THEN return the string without the inline code
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_inlinecode=True)
|
||||
assert "`inline_code`" not in result
|
||||
assert "```bash" in result
|
||||
assert "```" in result
|
||||
assert "foo" in result
|
||||
assert "world" in result
|
||||
assert "key: value" in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_4():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with strip_frontmatter set to True
|
||||
THEN return the string without the frontmatter
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(text, strip_frontmatter=True)
|
||||
assert "`inline_code`" in result
|
||||
assert "```bash" in result
|
||||
assert "```" in result
|
||||
assert "foo" in result
|
||||
assert "world" in result
|
||||
assert "key: value" not in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_remove_markdown_sections_5():
|
||||
"""Test remove_markdown_sections() function.
|
||||
|
||||
GIVEN a string with markdown sections
|
||||
WHEN the remove_markdown_sections() function is called with all arguments set to True
|
||||
THEN return the string without the frontmatter, inline code, and codeblocks
|
||||
"""
|
||||
text: str = """
|
||||
---
|
||||
key: value
|
||||
---
|
||||
|
||||
# heading
|
||||
|
||||
```bash
|
||||
echo "Hello world"
|
||||
```
|
||||
|
||||
Lorem ipsum `inline_code` lorem ipsum.
|
||||
```
|
||||
echo "foo bar"
|
||||
```
|
||||
|
||||
---
|
||||
dd
|
||||
---
|
||||
"""
|
||||
result = remove_markdown_sections(
|
||||
text,
|
||||
strip_codeblocks=True,
|
||||
strip_frontmatter=True,
|
||||
strip_inlinecode=True,
|
||||
text, strip_frontmatter=True, strip_inlinecode=True, strip_codeblocks=True
|
||||
)
|
||||
assert "```bash" not in result
|
||||
assert "`dolor sit`" not in result
|
||||
assert "---\nkey: value" not in result
|
||||
assert "`" not in result
|
||||
|
||||
result = remove_markdown_sections(text)
|
||||
assert "```bash" in result
|
||||
assert "`dolor sit`" in result
|
||||
assert "---\nkey: value" in result
|
||||
assert "`" in result
|
||||
|
||||
|
||||
def test_clean_dictionary():
|
||||
"""Test cleaning a dictionary."""
|
||||
dictionary = {" *key* ": ["**value**", "[[value2]]", "#value3"]}
|
||||
|
||||
new_dict = clean_dictionary(dictionary)
|
||||
assert new_dict == {"key": ["value", "value2", "value3"]}
|
||||
assert "`inline_code`" not in result
|
||||
assert "bash" not in result
|
||||
assert "```" not in result
|
||||
assert "foo" not in result
|
||||
assert "world" not in result
|
||||
assert "key: value" not in result
|
||||
assert "heading" in result
|
||||
assert "Lorem ipsum" in result
|
||||
assert "---\n" in result
|
||||
assert "dd" in result
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_1(tmp_path):
|
||||
|
||||
Reference in New Issue
Block a user