mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-16 08:53:48 -05:00
Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c75d18200e | ||
|
|
ffdac91537 | ||
|
|
e8f408ee33 | ||
|
|
1dd3ddfb22 |
@@ -1,3 +1,9 @@
|
|||||||
|
## v0.11.0 (2023-03-24)
|
||||||
|
|
||||||
|
### Feat
|
||||||
|
|
||||||
|
- add `--import-csv` option to cli
|
||||||
|
|
||||||
## v0.10.0 (2023-03-21)
|
## v0.10.0 (2023-03-21)
|
||||||
|
|
||||||
### Feat
|
### Feat
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ pip install obsidian-metadata
|
|||||||
|
|
||||||
- `--config-file`: Specify a custom configuration file location
|
- `--config-file`: Specify a custom configuration file location
|
||||||
- `--dry-run`: Make no destructive changes
|
- `--dry-run`: Make no destructive changes
|
||||||
|
- `--import-csv` Import a CSV file with bulk updates
|
||||||
- `--export-csv`: Specify a path and create a CSV export of all metadata
|
- `--export-csv`: Specify a path and create a CSV export of all metadata
|
||||||
- `--export-json`: Specify a path and create a JSON export of all metadata
|
- `--export-json`: Specify a path and create a JSON export of all metadata
|
||||||
- `--export-template`: Specify a path and export all notes with their associated metadata to a CSV file for use as a bulk import template
|
- `--export-template`: Specify a path and export all notes with their associated metadata to a CSV file for use as a bulk import template
|
||||||
@@ -173,6 +174,8 @@ Create a CSV template for making bulk updates containing all your notes and thei
|
|||||||
1. Using the `--export-template` cli command; or
|
1. Using the `--export-template` cli command; or
|
||||||
2. Selecting the `Metadata by note` option within the `Export Metadata` section of the app
|
2. Selecting the `Metadata by note` option within the `Export Metadata` section of the app
|
||||||
|
|
||||||
|
Once you have a template created you can import it using the `--import-csv` flag or by navigating to the `Import bulk changes from CSV` option.
|
||||||
|
|
||||||
# Contributing
|
# Contributing
|
||||||
|
|
||||||
## Setup: Once per project
|
## Setup: Once per project
|
||||||
|
|||||||
212
poetry.lock
generated
212
poetry.lock
generated
@@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.4.0 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "argcomplete"
|
name = "argcomplete"
|
||||||
@@ -283,19 +283,19 @@ testing = ["pre-commit"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filelock"
|
name = "filelock"
|
||||||
version = "3.10.0"
|
version = "3.10.4"
|
||||||
description = "A platform independent file lock."
|
description = "A platform independent file lock."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "filelock-3.10.0-py3-none-any.whl", hash = "sha256:e90b34656470756edf8b19656785c5fea73afa1953f3e1b0d645cef11cab3182"},
|
{file = "filelock-3.10.4-py3-none-any.whl", hash = "sha256:6d332dc5c896f18ba93a21d987155e97c434a96d3fe4042ca70d0b3b46e3b470"},
|
||||||
{file = "filelock-3.10.0.tar.gz", hash = "sha256:3199fd0d3faea8b911be52b663dfccceb84c95949dd13179aa21436d1a79c4ce"},
|
{file = "filelock-3.10.4.tar.gz", hash = "sha256:9fc1734dbddcdcd4aaa02c160dd94db5272b92dfa859b44ec8df28e160b751f0"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"]
|
||||||
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.1)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "identify"
|
name = "identify"
|
||||||
@@ -648,14 +648,14 @@ testing = ["pytest", "pytest-benchmark"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "poethepoet"
|
name = "poethepoet"
|
||||||
version = "0.18.1"
|
version = "0.19.0"
|
||||||
description = "A task runner that works well with poetry."
|
description = "A task runner that works well with poetry."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "poethepoet-0.18.1-py3-none-any.whl", hash = "sha256:e85727bf6f4a10bf6c1a43026bdeb40df689bea3c4682d03cbe531cabc8f2ba6"},
|
{file = "poethepoet-0.19.0-py3-none-any.whl", hash = "sha256:87038be589077e4b407050a9da644d9cd9e4076ccfc8abc7f855cf6870d5c6c2"},
|
||||||
{file = "poethepoet-0.18.1.tar.gz", hash = "sha256:5f3566b14c2f5dccdfbc3bb26f0096006b38dc0b9c74bd4f8dd1eba7b0e29f6a"},
|
{file = "poethepoet-0.19.0.tar.gz", hash = "sha256:897eb85ec15876d79befc7d19d4c80ce7c8b214d1bb0dcfec640abd81616bfed"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -905,100 +905,72 @@ docs = ["Sphinx (>=3.3,<4.0)", "sphinx-autobuild (>=2020.9.1,<2021.0.0)", "sphin
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "regex"
|
name = "regex"
|
||||||
version = "2022.10.31"
|
version = "2023.3.23"
|
||||||
description = "Alternative regular expression module, to replace re."
|
description = "Alternative regular expression module, to replace re."
|
||||||
category = "main"
|
category = "main"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "regex-2022.10.31-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a8ff454ef0bb061e37df03557afda9d785c905dab15584860f982e88be73015f"},
|
{file = "regex-2023.3.23-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:845a5e2d84389c4ddada1a9b95c055320070f18bb76512608374aca00d22eca8"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eba476b1b242620c266edf6325b443a2e22b633217a9835a52d8da2b5c051f9"},
|
{file = "regex-2023.3.23-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87d9951f5a538dd1d016bdc0dcae59241d15fa94860964833a54d18197fcd134"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0e5af9a9effb88535a472e19169e09ce750c3d442fb222254a276d77808620b"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37ae17d3be44c0b3f782c28ae9edd8b47c1f1776d4cabe87edc0b98e1f12b021"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d03fe67b2325cb3f09be029fd5da8df9e6974f0cde2c2ac6a79d2634e791dd57"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b8eb1e3bca6b48dc721818a60ae83b8264d4089a4a41d62be6d05316ec38e15"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a9d0b68ac1743964755ae2d89772c7e6fb0118acd4d0b7464eaf3921c6b49dd4"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df45fac182ebc3c494460c644e853515cc24f5ad9da05f8ffb91da891bfee879"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a45b6514861916c429e6059a55cf7db74670eaed2052a648e3e4d04f070e001"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7006105b10b59971d3b248ad75acc3651c7e4cf54d81694df5a5130a3c3f7ea"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8b0886885f7323beea6f552c28bff62cbe0983b9fbb94126531693ea6c5ebb90"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93f3f1aa608380fe294aa4cb82e2afda07a7598e828d0341e124b8fd9327c715"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5aefb84a301327ad115e9d346c8e2760009131d9d4b4c6b213648d02e2abe144"},
|
{file = "regex-2023.3.23-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787954f541ab95d8195d97b0b8cf1dc304424adb1e07365967e656b92b38a699"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:702d8fc6f25bbf412ee706bd73019da5e44a8400861dfff7ff31eb5b4a1276dc"},
|
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:20abe0bdf03630fe92ccafc45a599bca8b3501f48d1de4f7d121153350a2f77d"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a3c1ebd4ed8e76e886507c9eddb1a891673686c813adf889b864a17fafcf6d66"},
|
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:11d00c31aeab9a6e0503bc77e73ed9f4527b3984279d997eb145d7c7be6268fd"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:50921c140561d3db2ab9f5b11c5184846cde686bb5a9dc64cae442926e86f3af"},
|
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d5bbe0e1511b844794a3be43d6c145001626ba9a6c1db8f84bdc724e91131d9d"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:7db345956ecce0c99b97b042b4ca7326feeec6b75facd8390af73b18e2650ffc"},
|
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ea3c0cb56eadbf4ab2277e7a095676370b3e46dbfc74d5c383bd87b0d6317910"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:763b64853b0a8f4f9cfb41a76a4a85a9bcda7fdda5cb057016e7706fde928e66"},
|
{file = "regex-2023.3.23-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d895b4c863059a4934d3e874b90998df774644a41b349ebb330f85f11b4ef2c0"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-win32.whl", hash = "sha256:44136355e2f5e06bf6b23d337a75386371ba742ffa771440b85bed367c1318d1"},
|
{file = "regex-2023.3.23-cp310-cp310-win32.whl", hash = "sha256:9d764514d19b4edcc75fd8cb1423448ef393e8b6cbd94f38cab983ab1b75855d"},
|
||||||
{file = "regex-2022.10.31-cp310-cp310-win_amd64.whl", hash = "sha256:bfff48c7bd23c6e2aec6454aaf6edc44444b229e94743b34bdcdda2e35126cf5"},
|
{file = "regex-2023.3.23-cp310-cp310-win_amd64.whl", hash = "sha256:11d1f2b7a0696dc0310de0efb51b1f4d813ad4401fe368e83c0c62f344429f98"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b4b1fe58cd102d75ef0552cf17242705ce0759f9695334a56644ad2d83903fe"},
|
{file = "regex-2023.3.23-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8a9c63cde0eaa345795c0fdeb19dc62d22e378c50b0bc67bf4667cd5b482d98b"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:542e3e306d1669b25936b64917285cdffcd4f5c6f0247636fec037187bd93542"},
|
{file = "regex-2023.3.23-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dd7200b4c27b68cf9c9646da01647141c6db09f48cc5b51bc588deaf8e98a797"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c27cc1e4b197092e50ddbf0118c788d9977f3f8f35bfbbd3e76c1846a3443df7"},
|
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22720024b90a6ba673a725dcc62e10fb1111b889305d7c6b887ac7466b74bedb"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8e38472739028e5f2c3a4aded0ab7eadc447f0d84f310c7a8bb697ec417229e"},
|
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b190a339090e6af25f4a5fd9e77591f6d911cc7b96ecbb2114890b061be0ac1"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76c598ca73ec73a2f568e2a72ba46c3b6c8690ad9a07092b18e48ceb936e9f0c"},
|
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e76b6fc0d8e9efa39100369a9b3379ce35e20f6c75365653cf58d282ad290f6f"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c28d3309ebd6d6b2cf82969b5179bed5fefe6142c70f354ece94324fa11bf6a1"},
|
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7868b8f218bf69a2a15402fde08b08712213a1f4b85a156d90473a6fb6b12b09"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9af69f6746120998cd9c355e9c3c6aec7dff70d47247188feb4f829502be8ab4"},
|
{file = "regex-2023.3.23-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2472428efc4127374f494e570e36b30bb5e6b37d9a754f7667f7073e43b0abdd"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a5f9505efd574d1e5b4a76ac9dd92a12acb2b309551e9aa874c13c11caefbe4f"},
|
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c37df2a060cb476d94c047b18572ee2b37c31f831df126c0da3cd9227b39253d"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5ff525698de226c0ca743bfa71fc6b378cda2ddcf0d22d7c37b1cc925c9650a5"},
|
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4479f9e2abc03362df4045b1332d4a2b7885b245a30d4f4b051c4083b97d95d8"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4fe7fda2fe7c8890d454f2cbc91d6c01baf206fbc96d89a80241a02985118c0c"},
|
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e2396e0678167f2d0c197da942b0b3fb48fee2f0b5915a0feb84d11b6686afe6"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:2cdc55ca07b4e70dda898d2ab7150ecf17c990076d3acd7a5f3b25cb23a69f1c"},
|
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75f288c60232a5339e0ff2fa05779a5e9c74e9fc085c81e931d4a264501e745b"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:44a6c2f6374e0033873e9ed577a54a3602b4f609867794c1a3ebba65e4c93ee7"},
|
{file = "regex-2023.3.23-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c869260aa62cee21c5eb171a466c0572b5e809213612ef8d495268cd2e34f20d"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-win32.whl", hash = "sha256:d8716f82502997b3d0895d1c64c3b834181b1eaca28f3f6336a71777e437c2af"},
|
{file = "regex-2023.3.23-cp311-cp311-win32.whl", hash = "sha256:25f0532fd0c53e96bad84664171969de9673b4131f2297f1db850d3918d58858"},
|
||||||
{file = "regex-2022.10.31-cp311-cp311-win_amd64.whl", hash = "sha256:61edbca89aa3f5ef7ecac8c23d975fe7261c12665f1d90a6b1af527bba86ce61"},
|
{file = "regex-2023.3.23-cp311-cp311-win_amd64.whl", hash = "sha256:5ccfafd98473e007cebf7da10c1411035b7844f0f204015efd050601906dbb53"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a069c8483466806ab94ea9068c34b200b8bfc66b6762f45a831c4baaa9e8cdd"},
|
{file = "regex-2023.3.23-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6572ff287176c0fb96568adb292674b421fa762153ed074d94b1d939ed92c253"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d26166acf62f731f50bdd885b04b38828436d74e8e362bfcb8df221d868b5d9b"},
|
{file = "regex-2023.3.23-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a610e0adfcb0fc84ea25f6ea685e39e74cbcd9245a72a9a7aab85ff755a5ed27"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac741bf78b9bb432e2d314439275235f41656e189856b11fb4e774d9f7246d81"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:086afe222d58b88b62847bdbd92079b4699350b4acab892f88a935db5707c790"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75f591b2055523fc02a4bbe598aa867df9e953255f0b7f7715d2a36a9c30065c"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79e29fd62fa2f597a6754b247356bda14b866131a22444d67f907d6d341e10f3"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bddd61d2a3261f025ad0f9ee2586988c6a00c780a2fb0a92cea2aa702c54"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c07ce8e9eee878a48ebeb32ee661b49504b85e164b05bebf25420705709fdd31"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef4163770525257876f10e8ece1cf25b71468316f61451ded1a6f44273eedeb5"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86b036f401895e854de9fefe061518e78d506d8a919cc250dc3416bca03f6f9a"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7b280948d00bd3973c1998f92e22aa3ecb76682e3a4255f33e1020bd32adf443"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ac8dd8e18800bb1f97aad0d73f68916592dddf233b99d2b5cabc562088503a"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:d0213671691e341f6849bf33cd9fad21f7b1cb88b89e024f33370733fec58742"},
|
{file = "regex-2023.3.23-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:539dd010dc35af935b32f248099e38447bbffc10b59c2b542bceead2bed5c325"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:22e7ebc231d28393dfdc19b185d97e14a0f178bedd78e85aad660e93b646604e"},
|
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9bf4a5626f2a0ea006bf81e8963f498a57a47d58907eaa58f4b3e13be68759d8"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:8ad241da7fac963d7573cc67a064c57c58766b62a9a20c452ca1f21050868dfa"},
|
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf86b4328c204c3f315074a61bc1c06f8a75a8e102359f18ce99fbcbbf1951f0"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:586b36ebda81e6c1a9c5a5d0bfdc236399ba6595e1397842fd4a45648c30f35e"},
|
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:2848bf76673c83314068241c8d5b7fa9ad9bed866c979875a0e84039349e8fa7"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0653d012b3bf45f194e5e6a41df9258811ac8fc395579fa82958a8b76286bea4"},
|
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c125a02d22c555e68f7433bac8449992fa1cead525399f14e47c2d98f2f0e467"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-win32.whl", hash = "sha256:144486e029793a733e43b2e37df16a16df4ceb62102636ff3db6033994711066"},
|
{file = "regex-2023.3.23-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cd1671e9d5ac05ce6aa86874dd8dfa048824d1dbe73060851b310c6c1a201a96"},
|
||||||
{file = "regex-2022.10.31-cp36-cp36m-win_amd64.whl", hash = "sha256:c14b63c9d7bab795d17392c7c1f9aaabbffd4cf4387725a0ac69109fb3b550c6"},
|
{file = "regex-2023.3.23-cp38-cp38-win32.whl", hash = "sha256:fffe57312a358be6ec6baeb43d253c36e5790e436b7bf5b7a38df360363e88e9"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4cac3405d8dda8bc6ed499557625585544dd5cbf32072dcc72b5a176cb1271c8"},
|
{file = "regex-2023.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:dbb3f87e15d3dd76996d604af8678316ad2d7d20faa394e92d9394dfd621fd0c"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23cbb932cc53a86ebde0fb72e7e645f9a5eec1a5af7aa9ce333e46286caef783"},
|
{file = "regex-2023.3.23-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c88e8c226473b5549fe9616980ea7ca09289246cfbdf469241edf4741a620004"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74bcab50a13960f2a610cdcd066e25f1fd59e23b69637c92ad470784a51b1347"},
|
{file = "regex-2023.3.23-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6560776ec19c83f3645bbc5db64a7a5816c9d8fb7ed7201c5bcd269323d88072"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:78d680ef3e4d405f36f0d6d1ea54e740366f061645930072d39bca16a10d8c93"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b1fc2632c01f42e06173d8dd9bb2e74ab9b0afa1d698058c867288d2c7a31f3"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce6910b56b700bea7be82c54ddf2e0ed792a577dfaa4a76b9af07d550af435c6"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdf7ad455f1916b8ea5cdbc482d379f6daf93f3867b4232d14699867a5a13af7"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:659175b2144d199560d99a8d13b2228b85e6019b6e09e556209dfb8c37b78a11"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5fc33b27b1d800fc5b78d7f7d0f287e35079ecabe68e83d46930cf45690e1c8c"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1ddf14031a3882f684b8642cb74eea3af93a2be68893901b2b387c5fd92a03ec"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c49552dc938e3588f63f8a78c86f3c9c75301e813bca0bef13bdb4b87ccf364"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b683e5fd7f74fb66e89a1ed16076dbab3f8e9f34c18b1979ded614fe10cdc4d9"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e152461e9a0aedec7d37fc66ec0fa635eca984777d3d3c3e36f53bf3d3ceb16e"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2bde29cc44fa81c0a0c8686992c3080b37c488df167a371500b2a43ce9f026d1"},
|
{file = "regex-2023.3.23-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:db034255e72d2995cf581b14bb3fc9c00bdbe6822b49fcd4eef79e1d5f232618"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4919899577ba37f505aaebdf6e7dc812d55e8f097331312db7f1aab18767cce8"},
|
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:55ae114da21b7a790b90255ea52d2aa3a0d121a646deb2d3c6a3194e722fc762"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9c94f7cc91ab16b36ba5ce476f1904c91d6c92441f01cd61a8e2729442d6fcf5"},
|
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ef3f528fe1cc3d139508fe1b22523745aa77b9d6cb5b0bf277f48788ee0b993f"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ae1e96785696b543394a4e3f15f3f225d44f3c55dafe3f206493031419fedf95"},
|
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:a81c9ec59ca2303acd1ccd7b9ac409f1e478e40e96f8f79b943be476c5fdb8bb"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-win32.whl", hash = "sha256:c670f4773f2f6f1957ff8a3962c7dd12e4be54d05839b216cb7fd70b5a1df394"},
|
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:cde09c4fdd070772aa2596d97e942eb775a478b32459e042e1be71b739d08b77"},
|
||||||
{file = "regex-2022.10.31-cp37-cp37m-win_amd64.whl", hash = "sha256:8e0caeff18b96ea90fc0eb6e3bdb2b10ab5b01a95128dfeccb64a7238decf5f0"},
|
{file = "regex-2023.3.23-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3cd9f5dd7b821f141d3a6ca0d5d9359b9221e4f051ca3139320adea9f1679691"},
|
||||||
{file = "regex-2022.10.31-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:131d4be09bea7ce2577f9623e415cab287a3c8e0624f778c1d955ec7c281bd4d"},
|
{file = "regex-2023.3.23-cp39-cp39-win32.whl", hash = "sha256:7304863f3a652dab5e68e6fb1725d05ebab36ec0390676d1736e0571ebb713ef"},
|
||||||
{file = "regex-2022.10.31-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e613a98ead2005c4ce037c7b061f2409a1a4e45099edb0ef3200ee26ed2a69a8"},
|
{file = "regex-2023.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:54c3fa855a3f7438149de3211738dd9b5f0c733f48b54ae05aa7fce83d48d858"},
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052b670fafbe30966bbe5d025e90b2a491f85dfe5b2583a163b5e60a85a321ad"},
|
{file = "regex-2023.3.23.tar.gz", hash = "sha256:dc80df325b43ffea5cdea2e3eaa97a44f3dd298262b1c7fe9dbb2a9522b956a7"},
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa62a07ac93b7cb6b7d0389d8ef57ffc321d78f60c037b19dfa78d6b17c928ee"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5352bea8a8f84b89d45ccc503f390a6be77917932b1c98c4cdc3565137acc714"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:20f61c9944f0be2dc2b75689ba409938c14876c19d02f7585af4460b6a21403e"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29c04741b9ae13d1e94cf93fca257730b97ce6ea64cfe1eba11cf9ac4e85afb6"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:543883e3496c8b6d58bd036c99486c3c8387c2fc01f7a342b760c1ea3158a318"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7a8b43ee64ca8f4befa2bea4083f7c52c92864d8518244bfa6e88c751fa8fff"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:6a9a19bea8495bb419dc5d38c4519567781cd8d571c72efc6aa959473d10221a"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:6ffd55b5aedc6f25fd8d9f905c9376ca44fcf768673ffb9d160dd6f409bfda73"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4bdd56ee719a8f751cf5a593476a441c4e56c9b64dc1f0f30902858c4ef8771d"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ca88da1bd78990b536c4a7765f719803eb4f8f9971cc22d6ca965c10a7f2c4c"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-win32.whl", hash = "sha256:5a260758454580f11dd8743fa98319bb046037dfab4f7828008909d0aa5292bc"},
|
|
||||||
{file = "regex-2022.10.31-cp38-cp38-win_amd64.whl", hash = "sha256:5e6a5567078b3eaed93558842346c9d678e116ab0135e22eb72db8325e90b453"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5217c25229b6a85049416a5c1e6451e9060a1edcf988641e309dbe3ab26d3e49"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bf41b8b0a80708f7e0384519795e80dcb44d7199a35d52c15cc674d10b3081b"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf0da36a212978be2c2e2e2d04bdff46f850108fccc1851332bcae51c8907cc"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d403d781b0e06d2922435ce3b8d2376579f0c217ae491e273bab8d092727d244"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a37d51fa9a00d265cf73f3de3930fa9c41548177ba4f0faf76e61d512c774690"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4f781ffedd17b0b834c8731b75cce2639d5a8afe961c1e58ee7f1f20b3af185"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d243b36fbf3d73c25e48014961e83c19c9cc92530516ce3c43050ea6276a2ab7"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:370f6e97d02bf2dd20d7468ce4f38e173a124e769762d00beadec3bc2f4b3bc4"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:597f899f4ed42a38df7b0e46714880fb4e19a25c2f66e5c908805466721760f5"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7dbdce0c534bbf52274b94768b3498abdf675a691fec5f751b6057b3030f34c1"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:22960019a842777a9fa5134c2364efaed5fbf9610ddc5c904bd3a400973b0eb8"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7f5a3ffc731494f1a57bd91c47dc483a1e10048131ffb52d901bfe2beb6102e8"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7ef6b5942e6bfc5706301a18a62300c60db9af7f6368042227ccb7eeb22d0892"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-win32.whl", hash = "sha256:395161bbdbd04a8333b9ff9763a05e9ceb4fe210e3c7690f5e68cedd3d65d8e1"},
|
|
||||||
{file = "regex-2022.10.31-cp39-cp39-win_amd64.whl", hash = "sha256:957403a978e10fb3ca42572a23e6f7badff39aa1ce2f4ade68ee452dc6807692"},
|
|
||||||
{file = "regex-2022.10.31.tar.gz", hash = "sha256:a3a98921da9a1bf8457aeee6a551948a83601689e5ecdd736894ea9bbec77e83"},
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1087,29 +1059,29 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ruff"
|
name = "ruff"
|
||||||
version = "0.0.257"
|
version = "0.0.259"
|
||||||
description = "An extremely fast Python linter, written in Rust."
|
description = "An extremely fast Python linter, written in Rust."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "ruff-0.0.257-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:7280640690c1d0046b20e0eb924319a89d8e22925d7d232180ce31196e7478f8"},
|
{file = "ruff-0.0.259-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:f3938dc45e2a3f818e9cbd53007265c22246fbfded8837b2c563bf0ebde1a226"},
|
||||||
{file = "ruff-0.0.257-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4582b73da61ab410ffda35b2987a6eacb33f18263e1c91810f0b9779ec4f41a9"},
|
{file = "ruff-0.0.259-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:22e1e35bf5f12072cd644d22afd9203641ccf258bc14ff91aa1c43dc14f6047d"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5acae9878f1136893e266348acdb9d30dfae23c296d3012043816432a5abdd51"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2fb20e89e85d147c85caa807707a1488bccc1f3854dc3d53533e89b52a0c5ff"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d9f0912d045eee15e8e02e335c16d7a7f9fb6821aa5eb1628eeb5bbfa3d88908"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:49e903bcda19f6bb0725a962c058eb5d61f40d84ef52ed53b61939b69402ab4e"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a9542c34ee5298b31be6c6ba304f14b672dcf104846ee65adb2466d3e325870"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71f0ef1985e9a6696fa97da8459917fa34bdaa2c16bd33bd5edead585b7d44f7"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:3464f1ad4cea6c4b9325da13ae306bd22bf15d226e18d19c52db191b1f4355ac"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:7cfef26619cba184d59aa7fa17b48af5891d51fc0b755a9bc533478a10d4d066"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5a54bfd559e558ee0df2a2f3756423fe6a9de7307bc290d807c3cdf351cb4c24"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:79b02fa17ec1fd8d306ae302cb47fb614b71e1f539997858243769bcbe78c6d9"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3438fd38446e1a0915316f4085405c9feca20fe00a4b614995ab7034dbfaa7ff"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:428507fb321b386dda70d66cd1a8aa0abf51d7c197983d83bb9e4fa5ee60300b"},
|
||||||
{file = "ruff-0.0.257-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:358cc2b547bd6451dcf2427b22a9c29a2d9c34e66576c693a6381c5f2ed3011d"},
|
{file = "ruff-0.0.259-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5fbaea9167f1852757f02133e5daacdb8c75b3431343205395da5b10499927a"},
|
||||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:783390f1e94a168c79d7004426dae3e4ae2999cc85f7d00fdd86c62262b71854"},
|
{file = "ruff-0.0.259-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:40ae87f2638484b7e8a7567b04a7af719f1c484c5bf132038b702bb32e1f6577"},
|
||||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:aaa3b5b6929c63a854b6bcea7a229453b455ab26337100b2905fae4523ca5667"},
|
{file = "ruff-0.0.259-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:29e2b77b7d5da6a7dd5cf9b738b511355c5734ece56f78e500d4b5bffd58c1a0"},
|
||||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_i686.whl", hash = "sha256:4ecd7a84db4816df2dcd0f11c5365a9a2cf4fa70a19b3ac161b7b0bfa592959d"},
|
{file = "ruff-0.0.259-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5b3c1beacf6037e7f0781d4699d9a2dd4ba2462f475be5b1f45cf84c4ba3c69d"},
|
||||||
{file = "ruff-0.0.257-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:3db8d77d5651a2c0d307102d717627a025d4488d406f54c2764b21cfbe11d822"},
|
{file = "ruff-0.0.259-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:daaea322e7e85f4c13d82be9536309e1c4b8b9851bb0cbc7eeb15d490fd46bf9"},
|
||||||
{file = "ruff-0.0.257-py3-none-win32.whl", hash = "sha256:d2c8755fa4f6c5e5ec032ad341ca3beeecd16786e12c3f26e6b0cc40418ae998"},
|
{file = "ruff-0.0.259-py3-none-win32.whl", hash = "sha256:38704f151323aa5858370a2f792e122cc25e5d1aabe7d42ceeab83da18f0b456"},
|
||||||
{file = "ruff-0.0.257-py3-none-win_amd64.whl", hash = "sha256:3cec07d6fecb1ebbc45ea8eeb1047b929caa2f7dfb8dd4b0e1869ff789326da5"},
|
{file = "ruff-0.0.259-py3-none-win_amd64.whl", hash = "sha256:aa9449b898287e621942cc71b9327eceb8f0c357e4065fecefb707ef2d978df8"},
|
||||||
{file = "ruff-0.0.257-py3-none-win_arm64.whl", hash = "sha256:352f1bdb9b433b3b389aee512ffb0b82226ae1e25b3d92e4eaf0e7be6b1b6f6a"},
|
{file = "ruff-0.0.259-py3-none-win_arm64.whl", hash = "sha256:e4f39e18702de69faaaee3969934b92d7467285627f99a5b6ecd55a7d9f5d086"},
|
||||||
{file = "ruff-0.0.257.tar.gz", hash = "sha256:fedfd06a37ddc17449203c3e38fc83fb68de7f20b5daa0ee4e60d3599b38bab0"},
|
{file = "ruff-0.0.259.tar.gz", hash = "sha256:8b56496063ab3bfdf72339a5fbebb8bd46e5c5fee25ef11a9f03b208fa0562ec"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@@ -1221,14 +1193,14 @@ files = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typeguard"
|
name = "typeguard"
|
||||||
version = "3.0.1"
|
version = "3.0.2"
|
||||||
description = "Run-time type checker for Python"
|
description = "Run-time type checker for Python"
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7.4"
|
python-versions = ">=3.7.4"
|
||||||
files = [
|
files = [
|
||||||
{file = "typeguard-3.0.1-py3-none-any.whl", hash = "sha256:15628045c830abf68533247afd2cb04683b5ce6f4e30d5401a5ef6f5182280de"},
|
{file = "typeguard-3.0.2-py3-none-any.whl", hash = "sha256:bbe993854385284ab42fd5bd3bee6f6556577ce8b50696d6cb956d704f286c8e"},
|
||||||
{file = "typeguard-3.0.1.tar.gz", hash = "sha256:beb0e67c5dc76eea4a6d00a6606d444d899589908362960769d0c4a1d32bca70"},
|
{file = "typeguard-3.0.2.tar.gz", hash = "sha256:fee5297fdb28f8e9efcb8142b5ee219e02375509cd77ea9d270b5af826358d5a"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
@@ -1349,4 +1321,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "8fa62f96cc77eac773497573dcbdd5666173cbec56374fea73a814f3fb7f5338"
|
content-hash = "45fc32e73a5670e7a8060985528c690a0739d76293e92e82ba1376f58f038638"
|
||||||
|
|||||||
@@ -11,7 +11,7 @@
|
|||||||
name = "obsidian-metadata"
|
name = "obsidian-metadata"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
repository = "https://github.com/natelandau/obsidian-metadata"
|
repository = "https://github.com/natelandau/obsidian-metadata"
|
||||||
version = "0.10.0"
|
version = "0.11.0"
|
||||||
|
|
||||||
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
[tool.poetry.scripts] # https://python-poetry.org/docs/pyproject/#scripts
|
||||||
obsidian-metadata = "obsidian_metadata.cli:app"
|
obsidian-metadata = "obsidian_metadata.cli:app"
|
||||||
@@ -20,7 +20,7 @@
|
|||||||
loguru = "^0.6.0"
|
loguru = "^0.6.0"
|
||||||
python = "^3.10"
|
python = "^3.10"
|
||||||
questionary = "^1.10.0"
|
questionary = "^1.10.0"
|
||||||
regex = "^2022.10.31"
|
regex = "^2023.3.23"
|
||||||
rich = "^13.3.2"
|
rich = "^13.3.2"
|
||||||
ruamel-yaml = "^0.17.21"
|
ruamel-yaml = "^0.17.21"
|
||||||
shellingham = "^1.5.0.post1"
|
shellingham = "^1.5.0.post1"
|
||||||
@@ -41,11 +41,11 @@
|
|||||||
interrogate = "^1.5.0"
|
interrogate = "^1.5.0"
|
||||||
mypy = "^1.1.1"
|
mypy = "^1.1.1"
|
||||||
pdoc = "^13.0.1"
|
pdoc = "^13.0.1"
|
||||||
poethepoet = "^0.18.1"
|
poethepoet = "^0.19.0"
|
||||||
pre-commit = "^3.2.0"
|
pre-commit = "^3.2.0"
|
||||||
ruff = "^0.0.257"
|
ruff = "^0.0.259"
|
||||||
sh = "2.0.3"
|
sh = "2.0.3"
|
||||||
typeguard = "^3.0.1"
|
typeguard = "^3.0.2"
|
||||||
types-python-dateutil = "^2.8.19.10"
|
types-python-dateutil = "^2.8.19.10"
|
||||||
vulture = "^2.7"
|
vulture = "^2.7"
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@
|
|||||||
changelog_incremental = true
|
changelog_incremental = true
|
||||||
tag_format = "v$version"
|
tag_format = "v$version"
|
||||||
update_changelog_on_bump = true
|
update_changelog_on_bump = true
|
||||||
version = "0.10.0"
|
version = "0.11.0"
|
||||||
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
version_files = ["pyproject.toml:version", "src/obsidian_metadata/__version__.py:__version__"]
|
||||||
|
|
||||||
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
[tool.coverage.report] # https://coverage.readthedocs.io/en/latest/config.html#report
|
||||||
|
|||||||
@@ -1,2 +1,2 @@
|
|||||||
"""obsidian-metadata version."""
|
"""obsidian-metadata version."""
|
||||||
__version__ = "0.10.0"
|
__version__ = "0.11.0"
|
||||||
|
|||||||
@@ -21,24 +21,26 @@ def clean_dictionary(dictionary: dict[str, Any]) -> dict[str, Any]:
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Cleaned dictionary
|
dict: Cleaned dictionary
|
||||||
"""
|
"""
|
||||||
new_dict = {key.strip(): value for key, value in dictionary.items()}
|
new_dict = copy.deepcopy(dictionary)
|
||||||
new_dict = {key.strip("*[]#"): value for key, value in new_dict.items()}
|
new_dict = {key.strip("*[]# "): value for key, value in new_dict.items()}
|
||||||
for key, value in new_dict.items():
|
for key, value in new_dict.items():
|
||||||
new_dict[key] = [s.strip("*[]#") for s in value if isinstance(value, list)]
|
if isinstance(value, list):
|
||||||
|
new_dict[key] = [s.strip("*[]# ") for s in value if isinstance(value, list)]
|
||||||
|
elif isinstance(value, str):
|
||||||
|
new_dict[key] = value.strip("*[]# ")
|
||||||
|
|
||||||
return new_dict
|
return new_dict
|
||||||
|
|
||||||
|
|
||||||
def clear_screen() -> None: # pragma: no cover
|
def clear_screen() -> None: # pragma: no cover
|
||||||
"""Clear the screen."""
|
"""Clear the screen."""
|
||||||
# for windows
|
|
||||||
_ = system("cls") if name == "nt" else system("clear")
|
_ = system("cls") if name == "nt" else system("clear")
|
||||||
|
|
||||||
|
|
||||||
def dict_contains(
|
def dict_contains(
|
||||||
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
dictionary: dict[str, list[str]], key: str, value: str = None, is_regex: bool = False
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Check if a dictionary contains a key or if a specified key contains a value.
|
"""Check if a dictionary contains a key or if a key contains a value.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
dictionary (dict): Dictionary to check
|
dictionary (dict): Dictionary to check
|
||||||
@@ -47,7 +49,7 @@ def dict_contains(
|
|||||||
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
is_regex (bool, optional): Whether the key is a regex. Defaults to False.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: Whether the dictionary contains the key
|
bool: Whether the dictionary contains the key or value
|
||||||
"""
|
"""
|
||||||
if value is None:
|
if value is None:
|
||||||
if is_regex:
|
if is_regex:
|
||||||
@@ -55,13 +57,11 @@ def dict_contains(
|
|||||||
return key in dictionary
|
return key in dictionary
|
||||||
|
|
||||||
if is_regex:
|
if is_regex:
|
||||||
found_keys = []
|
|
||||||
for _key in dictionary:
|
for _key in dictionary:
|
||||||
if re.search(key, str(_key)):
|
if re.search(key, str(_key)) and any(re.search(value, _v) for _v in dictionary[_key]):
|
||||||
found_keys.append(
|
return True
|
||||||
any(re.search(value, _v) for _v in dictionary[_key]),
|
|
||||||
)
|
return False
|
||||||
return any(found_keys)
|
|
||||||
|
|
||||||
return key in dictionary and value in dictionary[key]
|
return key in dictionary and value in dictionary[key]
|
||||||
|
|
||||||
@@ -93,6 +93,7 @@ def dict_values_to_lists_strings(
|
|||||||
|
|
||||||
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
||||||
"""
|
"""
|
||||||
|
dictionary = copy.deepcopy(dictionary)
|
||||||
new_dict = {}
|
new_dict = {}
|
||||||
|
|
||||||
if strip_null_values:
|
if strip_null_values:
|
||||||
@@ -100,7 +101,7 @@ def dict_values_to_lists_strings(
|
|||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
new_dict[key] = dict_values_to_lists_strings(value, strip_null_values=True) # type: ignore[assignment]
|
||||||
elif value is None or value == "None" or not value:
|
elif value is None or value == "None" or not value:
|
||||||
new_dict[key] = []
|
new_dict[key] = []
|
||||||
else:
|
else:
|
||||||
@@ -110,11 +111,11 @@ def dict_values_to_lists_strings(
|
|||||||
|
|
||||||
for key, value in dictionary.items():
|
for key, value in dictionary.items():
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
new_dict[key] = sorted([str(item) for item in value])
|
new_dict[key] = sorted([str(item) if item is not None else "" for item in value])
|
||||||
elif isinstance(value, dict):
|
elif isinstance(value, dict):
|
||||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
||||||
else:
|
else:
|
||||||
new_dict[key] = [str(value)]
|
new_dict[key] = [str(value) if value is not None else ""]
|
||||||
|
|
||||||
return new_dict
|
return new_dict
|
||||||
|
|
||||||
@@ -192,22 +193,24 @@ def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
|||||||
Returns:
|
Returns:
|
||||||
dict: Merged dictionary.
|
dict: Merged dictionary.
|
||||||
"""
|
"""
|
||||||
for k, v in dict2.items():
|
d1 = copy.deepcopy(dict1)
|
||||||
if k in dict1:
|
d2 = copy.deepcopy(dict2)
|
||||||
if isinstance(v, list):
|
|
||||||
dict1[k].extend(v)
|
for _key in d1:
|
||||||
|
if not isinstance(d1[_key], list):
|
||||||
|
raise TypeError(f"Key {_key} is not a list.")
|
||||||
|
for _key in d2:
|
||||||
|
if not isinstance(d2[_key], list):
|
||||||
|
raise TypeError(f"Key {_key} is not a list.")
|
||||||
|
|
||||||
|
for k, v in d2.items():
|
||||||
|
if k in d1:
|
||||||
|
d1[k].extend(v)
|
||||||
|
d1[k] = sorted(set(d1[k]))
|
||||||
else:
|
else:
|
||||||
dict1[k] = v
|
d1[k] = sorted(set(v))
|
||||||
|
|
||||||
for k, v in dict1.items():
|
return dict(sorted(d1.items()))
|
||||||
if isinstance(v, list):
|
|
||||||
dict1[k] = sorted(set(v))
|
|
||||||
elif isinstance(v, dict): # pragma: no cover
|
|
||||||
for kk, vv in v.items():
|
|
||||||
if isinstance(vv, list):
|
|
||||||
v[kk] = sorted(set(vv))
|
|
||||||
|
|
||||||
return dict(sorted(dict1.items()))
|
|
||||||
|
|
||||||
|
|
||||||
def rename_in_dict(
|
def rename_in_dict(
|
||||||
@@ -241,7 +244,7 @@ def remove_markdown_sections(
|
|||||||
strip_inlinecode: bool = False,
|
strip_inlinecode: bool = False,
|
||||||
strip_frontmatter: bool = False,
|
strip_frontmatter: bool = False,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Strip markdown sections from text.
|
"""Strip unwanted markdown sections from text. This is used to remove code blocks and frontmatter from the body of notes before tags and inline metadata are processed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
text (str): Text to remove code blocks from
|
text (str): Text to remove code blocks from
|
||||||
@@ -256,7 +259,7 @@ def remove_markdown_sections(
|
|||||||
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
||||||
|
|
||||||
if strip_inlinecode:
|
if strip_inlinecode:
|
||||||
text = re.sub(r"`.*?`", "", text)
|
text = re.sub(r"(?<!`{2})`[^`]+?`", "", text)
|
||||||
|
|
||||||
if strip_frontmatter:
|
if strip_frontmatter:
|
||||||
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
||||||
|
|||||||
@@ -53,6 +53,13 @@ def main(
|
|||||||
dir_okay=False,
|
dir_okay=False,
|
||||||
file_okay=True,
|
file_okay=True,
|
||||||
),
|
),
|
||||||
|
import_csv: Path = typer.Option(
|
||||||
|
None,
|
||||||
|
help="Import a CSV file with bulk updates to metadata.",
|
||||||
|
show_default=False,
|
||||||
|
dir_okay=False,
|
||||||
|
file_okay=True,
|
||||||
|
),
|
||||||
vault_path: Path = typer.Option(
|
vault_path: Path = typer.Option(
|
||||||
None,
|
None,
|
||||||
help="Path to Obsidian vault",
|
help="Path to Obsidian vault",
|
||||||
@@ -153,6 +160,10 @@ def main(
|
|||||||
path = Path(export_template).expanduser().resolve()
|
path = Path(export_template).expanduser().resolve()
|
||||||
application.noninteractive_export_template(path)
|
application.noninteractive_export_template(path)
|
||||||
raise typer.Exit(code=0)
|
raise typer.Exit(code=0)
|
||||||
|
if import_csv is not None:
|
||||||
|
path = Path(import_csv).expanduser().resolve()
|
||||||
|
application.noninteractive_bulk_import(path)
|
||||||
|
raise typer.Exit(code=0)
|
||||||
|
|
||||||
application.application_main()
|
application.application_main()
|
||||||
|
|
||||||
|
|||||||
@@ -548,6 +548,41 @@ class Application:
|
|||||||
|
|
||||||
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
||||||
|
|
||||||
|
def noninteractive_bulk_import(self, path: Path) -> None:
|
||||||
|
"""Bulk update metadata from a CSV from the command line.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path: Path to the CSV file containing the metadata to update.
|
||||||
|
"""
|
||||||
|
self._load_vault()
|
||||||
|
note_paths = [
|
||||||
|
str(n.note_path.relative_to(self.vault.vault_path)) for n in self.vault.all_notes
|
||||||
|
]
|
||||||
|
dict_from_csv = validate_csv_bulk_imports(path, note_paths)
|
||||||
|
num_changed = self.vault.update_from_dict(dict_from_csv)
|
||||||
|
if num_changed == 0:
|
||||||
|
alerts.warning("No notes were changed")
|
||||||
|
return
|
||||||
|
|
||||||
|
alerts.success(f"{num_changed} notes specified in '{path}'")
|
||||||
|
alerts.info("Review changes and commit.")
|
||||||
|
while True:
|
||||||
|
self.vault.info()
|
||||||
|
|
||||||
|
match self.questions.ask_application_main():
|
||||||
|
case "vault_actions":
|
||||||
|
self.application_vault()
|
||||||
|
case "inspect_metadata":
|
||||||
|
self.application_inspect_metadata()
|
||||||
|
case "review_changes":
|
||||||
|
self.review_changes()
|
||||||
|
case "commit_changes":
|
||||||
|
self.commit_changes()
|
||||||
|
case _:
|
||||||
|
break
|
||||||
|
|
||||||
|
console.print("Done!")
|
||||||
|
|
||||||
def noninteractive_export_csv(self, path: Path) -> None:
|
def noninteractive_export_csv(self, path: Path) -> None:
|
||||||
"""Export the vault metadata to CSV."""
|
"""Export the vault metadata to CSV."""
|
||||||
self._load_vault()
|
self._load_vault()
|
||||||
|
|||||||
@@ -297,7 +297,7 @@ class Questions:
|
|||||||
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
||||||
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
||||||
questionary.Separator("-------------------------------"),
|
questionary.Separator("-------------------------------"),
|
||||||
{"name": "Bulk changes from imported CSV", "value": "import_from_csv"},
|
{"name": "Import bulk changes from CSV", "value": "import_from_csv"},
|
||||||
{"name": "Add Metadata", "value": "add_metadata"},
|
{"name": "Add Metadata", "value": "add_metadata"},
|
||||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from obsidian_metadata._utils import (
|
|||||||
dict_contains,
|
dict_contains,
|
||||||
dict_keys_to_lower,
|
dict_keys_to_lower,
|
||||||
dict_values_to_lists_strings,
|
dict_values_to_lists_strings,
|
||||||
|
merge_dictionaries,
|
||||||
remove_markdown_sections,
|
remove_markdown_sections,
|
||||||
rename_in_dict,
|
rename_in_dict,
|
||||||
validate_csv_bulk_imports,
|
validate_csv_bulk_imports,
|
||||||
@@ -17,6 +18,72 @@ from obsidian_metadata._utils import (
|
|||||||
from tests.helpers import Regex, remove_ansi
|
from tests.helpers import Regex, remove_ansi
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_1():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN the dictionary is empty
|
||||||
|
THEN return an empty dictionary
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({}) == {}
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_2():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN keys contain leading/trailing spaces
|
||||||
|
THEN remove the spaces from the keys
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({" key 1 ": "value 1"}) == {"key 1": "value 1"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_3():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN values contain leading/trailing spaces
|
||||||
|
THEN remove the spaces from the values
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({"key 1": " value 1 "}) == {"key 1": "value 1"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_4():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN keys or values contain leading/trailing asterisks
|
||||||
|
THEN remove the asterisks from the keys or values
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({"**key_1**": ["**value 1**", "value 2"]}) == {
|
||||||
|
"key_1": ["value 1", "value 2"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_5():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN keys or values contain leading/trailing brackets
|
||||||
|
THEN remove the brackets from the keys and values
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({"[[key_1]]": ["[[value 1]]", "[value 2]"]}) == {
|
||||||
|
"key_1": ["value 1", "value 2"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_clean_dictionary_6():
|
||||||
|
"""Test clean_dictionary() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to clean_dictionary()
|
||||||
|
WHEN keys or values contain leading/trailing hashtags
|
||||||
|
THEN remove the hashtags from the keys and values
|
||||||
|
"""
|
||||||
|
assert clean_dictionary({"#key_1": ["#value 1", "value 2#"]}) == {
|
||||||
|
"key_1": ["value 1", "value 2"]
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_1():
|
def test_delete_from_dict_1():
|
||||||
"""Test delete_from_dict() function.
|
"""Test delete_from_dict() function.
|
||||||
|
|
||||||
@@ -174,19 +241,94 @@ def test_delete_from_dict_11():
|
|||||||
) == {"key1": ["value1"], "key2": ["value2"]}
|
) == {"key1": ["value1"], "key2": ["value2"]}
|
||||||
|
|
||||||
|
|
||||||
def test_dict_contains() -> None:
|
def test_dict_contains_1():
|
||||||
"""Test dict_contains."""
|
"""Test dict_contains() function.
|
||||||
d = {"key1": ["value1", "value2"], "key2": ["value3", "value4"], "key3": ["value5", "value6"]}
|
|
||||||
|
|
||||||
assert dict_contains(d, "key1") is True
|
GIVEN calling dict_contains() with a dictionary
|
||||||
assert dict_contains(d, "key5") is False
|
WHEN the dictionary is empty
|
||||||
assert dict_contains(d, "key1", "value1") is True
|
THEN the function should return False
|
||||||
assert dict_contains(d, "key1", "value5") is False
|
"""
|
||||||
assert dict_contains(d, "key[1-2]", is_regex=True) is True
|
assert dict_contains({}, "key1") is False
|
||||||
assert dict_contains(d, "^1", is_regex=True) is False
|
|
||||||
assert dict_contains(d, r"key\d", r"value\d", is_regex=True) is True
|
|
||||||
assert dict_contains(d, "key1$", "^alue", is_regex=True) is False
|
def test_dict_contains_2():
|
||||||
assert dict_contains(d, r"key\d", "value5", is_regex=True) is True
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN when the key is not in the dictionary
|
||||||
|
THEN the function should return False
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key2") is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_3():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN when the key is in the dictionary
|
||||||
|
THEN the function should return True
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key1") is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_4():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN when the key and value are in the dictionary
|
||||||
|
THEN the function should return True
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key1", "value1") is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_5():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN when the key and value are not in the dictionary
|
||||||
|
THEN the function should return False
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key1", "value2") is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_6():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN a regex is used for the key and the key is in the dictionary
|
||||||
|
THEN the function should return True
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, r"key\d", is_regex=True) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_7():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN a regex is used for the key and the key is not in the dictionary
|
||||||
|
THEN the function should return False
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, r"key\d\d", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_8():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN a regex is used for a value and the value is in the dictionary
|
||||||
|
THEN the function should return True
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key1", r"\w+", is_regex=True) is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_contains_9():
|
||||||
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
|
GIVEN calling dict_contains() with a dictionary
|
||||||
|
WHEN a regex is used for a value and the value is not in the dictionary
|
||||||
|
THEN the function should return False
|
||||||
|
"""
|
||||||
|
assert dict_contains({"key1": "value1"}, "key1", r"\d{2}", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
def test_dict_keys_to_lower() -> None:
|
def test_dict_keys_to_lower() -> None:
|
||||||
@@ -200,45 +342,202 @@ def test_dict_keys_to_lower() -> None:
|
|||||||
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings():
|
def test_dict_values_to_lists_strings_1():
|
||||||
"""Test converting dictionary values to lists of strings."""
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
dictionary = {
|
|
||||||
"key1": "value1",
|
|
||||||
"key2": ["value2", "value3", None],
|
|
||||||
"key3": {"key4": "value4"},
|
|
||||||
"key5": {"key6": {"key7": "value7"}},
|
|
||||||
"key6": None,
|
|
||||||
"key8": [1, 3, None, 4],
|
|
||||||
"key9": [None, "", "None"],
|
|
||||||
"key10": "None",
|
|
||||||
"key11": "",
|
|
||||||
}
|
|
||||||
|
|
||||||
result = dict_values_to_lists_strings(dictionary)
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
assert result == {
|
WHEN the dictionary is empty
|
||||||
"key1": ["value1"],
|
THEN the function should return an empty dictionary
|
||||||
"key10": ["None"],
|
"""
|
||||||
"key11": [""],
|
assert dict_values_to_lists_strings({}) == {}
|
||||||
"key2": ["None", "value2", "value3"],
|
assert dict_values_to_lists_strings({}, strip_null_values=True) == {}
|
||||||
"key3": {"key4": ["value4"]},
|
|
||||||
"key5": {"key6": {"key7": ["value7"]}},
|
|
||||||
"key6": ["None"],
|
|
||||||
"key8": ["1", "3", "4", "None"],
|
|
||||||
"key9": ["", "None", "None"],
|
|
||||||
}
|
|
||||||
|
|
||||||
result = dict_values_to_lists_strings(dictionary, strip_null_values=True)
|
|
||||||
assert result == {
|
def test_dict_values_to_lists_strings_2():
|
||||||
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
|
WHEN the dictionary values are already lists of strings
|
||||||
|
THEN the function should return the dictionary
|
||||||
|
"""
|
||||||
|
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
||||||
|
assert dict_values_to_lists_strings(test_dict) == {
|
||||||
"key1": ["value1"],
|
"key1": ["value1"],
|
||||||
"key10": [],
|
|
||||||
"key11": [],
|
|
||||||
"key2": ["value2", "value3"],
|
"key2": ["value2", "value3"],
|
||||||
"key3": {"key4": ["value4"]},
|
|
||||||
"key5": {"key6": {"key7": ["value7"]}},
|
|
||||||
"key6": [],
|
|
||||||
"key8": ["1", "3", "4"],
|
|
||||||
"key9": ["", "None"],
|
|
||||||
}
|
}
|
||||||
|
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||||
|
"key1": ["value1"],
|
||||||
|
"key2": ["value2", "value3"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_values_to_lists_strings_3():
|
||||||
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
|
WHEN the a value is None and strip_null_values is False
|
||||||
|
THEN then convert None to an empty string
|
||||||
|
"""
|
||||||
|
test_dict = {"key1": None, "key2": ["value", None]}
|
||||||
|
assert dict_values_to_lists_strings(test_dict) == {"key1": [""], "key2": ["", "value"]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_values_to_lists_strings_4():
|
||||||
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
|
WHEN the a value is None and strip_null_values is True
|
||||||
|
THEN remove null values
|
||||||
|
"""
|
||||||
|
test_dict = {"key1": None, "key2": ["value", None]}
|
||||||
|
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||||
|
"key1": [],
|
||||||
|
"key2": ["value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_values_to_lists_strings_5():
|
||||||
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
|
WHEN the a value is a string "None" and strip_null_values is True or False
|
||||||
|
THEN ensure the value is not removed
|
||||||
|
"""
|
||||||
|
test_dict = {"key1": "None", "key2": [None, "None"]}
|
||||||
|
assert dict_values_to_lists_strings(test_dict) == {"key1": ["None"], "key2": ["", "None"]}
|
||||||
|
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||||
|
"key1": [],
|
||||||
|
"key2": ["None"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_values_to_lists_strings_6():
|
||||||
|
"""Test the dict_values_to_lists_strings() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
||||||
|
WHEN the a value is another dictionary
|
||||||
|
THEN ensure the values in the inner dictionary are converted to lists of strings
|
||||||
|
"""
|
||||||
|
test_dict = {"key1": {"key2": "value2", "key3": ["value3", None]}}
|
||||||
|
assert dict_values_to_lists_strings(test_dict) == {
|
||||||
|
"key1": {"key2": ["value2"], "key3": ["", "value3"]}
|
||||||
|
}
|
||||||
|
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
||||||
|
"key1": {"key2": ["value2"], "key3": ["value3"]}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_1():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN a value in dict1 is not a list
|
||||||
|
THEN raise a TypeError
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": "value1", "key2": "value2"}
|
||||||
|
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||||
|
|
||||||
|
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||||
|
merge_dictionaries(test_dict_1, test_dict_2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_2():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN a value in dict2 is not a list
|
||||||
|
THEN raise a TypeError
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key3": ["value3"], "key4": ["value4"]}
|
||||||
|
test_dict_2 = {"key1": "value1", "key2": "value2"}
|
||||||
|
|
||||||
|
with pytest.raises(TypeError, match=r"key.*is not a list"):
|
||||||
|
merge_dictionaries(test_dict_1, test_dict_2)
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_3():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN keys and values in both dictionaries are unique
|
||||||
|
THEN return a dictionary with the keys and values from both dictionaries
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||||
|
test_dict_2 = {"key3": ["value3"], "key4": ["value4"]}
|
||||||
|
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||||
|
"key1": ["value1"],
|
||||||
|
"key2": ["value2"],
|
||||||
|
"key3": ["value3"],
|
||||||
|
"key4": ["value4"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_4():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN keys in both dictionaries are not unique
|
||||||
|
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": ["value1"], "key2": ["value2"]}
|
||||||
|
test_dict_2 = {"key1": ["value3"], "key2": ["value4"]}
|
||||||
|
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||||
|
"key1": ["value1", "value3"],
|
||||||
|
"key2": ["value2", "value4"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_5():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN keys and values both dictionaries are not unique
|
||||||
|
THEN return a dictionary with the merged keys and values from both dictionaries
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||||
|
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||||
|
"key1": ["a", "b", "c"],
|
||||||
|
"key2": ["a", "b", "c"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_6():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN one of the dictionaries is empty
|
||||||
|
THEN return a dictionary the other dictionary
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
test_dict_2 = {}
|
||||||
|
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
|
||||||
|
test_dict_1 = {}
|
||||||
|
test_dict_2 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
|
||||||
|
|
||||||
|
def test_merge_dictionaries_7():
|
||||||
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
|
GIVEN two dictionaries supplied to the merge_dictionaries() function
|
||||||
|
WHEN keys and values both dictionaries are not unique
|
||||||
|
THEN ensure the original dictionaries objects are not modified
|
||||||
|
"""
|
||||||
|
test_dict_1 = {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
test_dict_2 = {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||||
|
|
||||||
|
assert merge_dictionaries(test_dict_1, test_dict_2) == {
|
||||||
|
"key1": ["a", "b", "c"],
|
||||||
|
"key2": ["a", "b", "c"],
|
||||||
|
}
|
||||||
|
assert test_dict_1 == {"key1": ["a", "c"], "key2": ["a", "b"]}
|
||||||
|
assert test_dict_2 == {"key1": ["a", "b"], "key2": ["a", "c"]}
|
||||||
|
|
||||||
|
|
||||||
def test_rename_in_dict_1():
|
def test_rename_in_dict_1():
|
||||||
@@ -313,46 +612,197 @@ def test_rename_in_dict_5():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections():
|
def test_remove_markdown_sections_1():
|
||||||
"""Test removing markdown sections."""
|
"""Test remove_markdown_sections() function.
|
||||||
|
|
||||||
|
GIVEN a string with markdown sections
|
||||||
|
WHEN the remove_markdown_sections() function is called with the default arguments
|
||||||
|
THEN return the string without removing any markdown sections
|
||||||
|
"""
|
||||||
text: str = """
|
text: str = """
|
||||||
---
|
---
|
||||||
key: value
|
key: value
|
||||||
---
|
---
|
||||||
|
|
||||||
Lorem ipsum `dolor sit` amet.
|
# heading
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
echo "Hello World"
|
echo "Hello world"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Lorem ipsum `inline_code` lorem ipsum.
|
||||||
|
```
|
||||||
|
echo "foo bar"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
dd
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
|
||||||
|
assert remove_markdown_sections(text) == text
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_markdown_sections_2():
|
||||||
|
"""Test remove_markdown_sections() function.
|
||||||
|
|
||||||
|
GIVEN a string with markdown sections
|
||||||
|
WHEN the remove_markdown_sections() function is called with strip_codeblocks set to True
|
||||||
|
THEN return the string without the codeblocks
|
||||||
|
"""
|
||||||
|
text: str = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# heading
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "Hello world"
|
||||||
|
```
|
||||||
|
|
||||||
|
Lorem ipsum `inline_code` lorem ipsum.
|
||||||
|
```
|
||||||
|
echo "foo bar"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
dd
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
result = remove_markdown_sections(text, strip_codeblocks=True)
|
||||||
|
assert "inline_code" in result
|
||||||
|
assert "```bash" not in result
|
||||||
|
assert "```" not in result
|
||||||
|
assert "foo" not in result
|
||||||
|
assert "world" not in result
|
||||||
|
assert "key: value" in result
|
||||||
|
assert "heading" in result
|
||||||
|
assert "Lorem ipsum" in result
|
||||||
|
assert "---\n" in result
|
||||||
|
assert "dd" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_markdown_sections_3():
|
||||||
|
"""Test remove_markdown_sections() function.
|
||||||
|
|
||||||
|
GIVEN a string with markdown sections
|
||||||
|
WHEN the remove_markdown_sections() function is called with strip_inlinecode set to True
|
||||||
|
THEN return the string without the inline code
|
||||||
|
"""
|
||||||
|
text: str = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# heading
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "Hello world"
|
||||||
|
```
|
||||||
|
|
||||||
|
Lorem ipsum `inline_code` lorem ipsum.
|
||||||
|
```
|
||||||
|
echo "foo bar"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
dd
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
result = remove_markdown_sections(text, strip_inlinecode=True)
|
||||||
|
assert "`inline_code`" not in result
|
||||||
|
assert "```bash" in result
|
||||||
|
assert "```" in result
|
||||||
|
assert "foo" in result
|
||||||
|
assert "world" in result
|
||||||
|
assert "key: value" in result
|
||||||
|
assert "heading" in result
|
||||||
|
assert "Lorem ipsum" in result
|
||||||
|
assert "---\n" in result
|
||||||
|
assert "dd" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_markdown_sections_4():
|
||||||
|
"""Test remove_markdown_sections() function.
|
||||||
|
|
||||||
|
GIVEN a string with markdown sections
|
||||||
|
WHEN the remove_markdown_sections() function is called with strip_frontmatter set to True
|
||||||
|
THEN return the string without the frontmatter
|
||||||
|
"""
|
||||||
|
text: str = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# heading
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "Hello world"
|
||||||
|
```
|
||||||
|
|
||||||
|
Lorem ipsum `inline_code` lorem ipsum.
|
||||||
|
```
|
||||||
|
echo "foo bar"
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
dd
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
result = remove_markdown_sections(text, strip_frontmatter=True)
|
||||||
|
assert "`inline_code`" in result
|
||||||
|
assert "```bash" in result
|
||||||
|
assert "```" in result
|
||||||
|
assert "foo" in result
|
||||||
|
assert "world" in result
|
||||||
|
assert "key: value" not in result
|
||||||
|
assert "heading" in result
|
||||||
|
assert "Lorem ipsum" in result
|
||||||
|
assert "---\n" in result
|
||||||
|
assert "dd" in result
|
||||||
|
|
||||||
|
|
||||||
|
def test_remove_markdown_sections_5():
|
||||||
|
"""Test remove_markdown_sections() function.
|
||||||
|
|
||||||
|
GIVEN a string with markdown sections
|
||||||
|
WHEN the remove_markdown_sections() function is called with all arguments set to True
|
||||||
|
THEN return the string without the frontmatter, inline code, and codeblocks
|
||||||
|
"""
|
||||||
|
text: str = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# heading
|
||||||
|
|
||||||
|
```bash
|
||||||
|
echo "Hello world"
|
||||||
|
```
|
||||||
|
|
||||||
|
Lorem ipsum `inline_code` lorem ipsum.
|
||||||
|
```
|
||||||
|
echo "foo bar"
|
||||||
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
dd
|
dd
|
||||||
---
|
---
|
||||||
"""
|
"""
|
||||||
result = remove_markdown_sections(
|
result = remove_markdown_sections(
|
||||||
text,
|
text, strip_frontmatter=True, strip_inlinecode=True, strip_codeblocks=True
|
||||||
strip_codeblocks=True,
|
|
||||||
strip_frontmatter=True,
|
|
||||||
strip_inlinecode=True,
|
|
||||||
)
|
)
|
||||||
assert "```bash" not in result
|
assert "`inline_code`" not in result
|
||||||
assert "`dolor sit`" not in result
|
assert "bash" not in result
|
||||||
assert "---\nkey: value" not in result
|
assert "```" not in result
|
||||||
assert "`" not in result
|
assert "foo" not in result
|
||||||
|
assert "world" not in result
|
||||||
result = remove_markdown_sections(text)
|
assert "key: value" not in result
|
||||||
assert "```bash" in result
|
assert "heading" in result
|
||||||
assert "`dolor sit`" in result
|
assert "Lorem ipsum" in result
|
||||||
assert "---\nkey: value" in result
|
assert "---\n" in result
|
||||||
assert "`" in result
|
assert "dd" in result
|
||||||
|
|
||||||
|
|
||||||
def test_clean_dictionary():
|
|
||||||
"""Test cleaning a dictionary."""
|
|
||||||
dictionary = {" *key* ": ["**value**", "[[value2]]", "#value3"]}
|
|
||||||
|
|
||||||
new_dict = clean_dictionary(dictionary)
|
|
||||||
assert new_dict == {"key": ["value", "value2", "value3"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_csv_bulk_imports_1(tmp_path):
|
def test_validate_csv_bulk_imports_1(tmp_path):
|
||||||
|
|||||||
Reference in New Issue
Block a user