mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-08 05:03:47 -05:00
feat: greatly improve capturing all formats of inline metadata (#41)
feat: greatly improve capturing metadata all formats of inline metadata
This commit is contained in:
29
README.md
29
README.md
@@ -110,6 +110,35 @@ When transposing to inline metadata, the `insert location` value in the config f
|
|||||||
|
|
||||||
- **Commit changes to the vault**
|
- **Commit changes to the vault**
|
||||||
|
|
||||||
|
### Known Limitations
|
||||||
|
|
||||||
|
Multi-level frontmatter is not supported.
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# This works perfectly well
|
||||||
|
---
|
||||||
|
key: "value"
|
||||||
|
key2:
|
||||||
|
- one
|
||||||
|
- two
|
||||||
|
- three
|
||||||
|
key3: ["foo", "bar", "baz"]
|
||||||
|
key4: value
|
||||||
|
|
||||||
|
# This will not work
|
||||||
|
---
|
||||||
|
key1:
|
||||||
|
key2:
|
||||||
|
- one
|
||||||
|
- two
|
||||||
|
- three
|
||||||
|
key3:
|
||||||
|
- one
|
||||||
|
- two
|
||||||
|
- three
|
||||||
|
---
|
||||||
|
```
|
||||||
|
|
||||||
### Configuration
|
### Configuration
|
||||||
|
|
||||||
`obsidian-metadata` requires a configuration file at `~/.obsidian_metadata.toml`. On first run, this file will be created. You can specify a new location for the configuration file with the `--config-file` option.
|
`obsidian-metadata` requires a configuration file at `~/.obsidian_metadata.toml`. On first run, this file will be created. You can specify a new location for the configuration file with the `--config-file` option.
|
||||||
|
|||||||
16
poetry.lock
generated
16
poetry.lock
generated
@@ -251,6 +251,20 @@ files = [
|
|||||||
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
|
{file = "distlib-0.3.6.tar.gz", hash = "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "emoji"
|
||||||
|
version = "2.2.0"
|
||||||
|
description = "Emoji for Python"
|
||||||
|
category = "main"
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
||||||
|
files = [
|
||||||
|
{file = "emoji-2.2.0.tar.gz", hash = "sha256:a2986c21e4aba6b9870df40ef487a17be863cb7778dcf1c01e25917b7cd210bb"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
dev = ["coverage", "coveralls", "pytest"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "exceptiongroup"
|
name = "exceptiongroup"
|
||||||
version = "1.1.1"
|
version = "1.1.1"
|
||||||
@@ -1321,4 +1335,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
|||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.10"
|
python-versions = "^3.10"
|
||||||
content-hash = "791c4a20b082a0ae43b35023ff9db5c9cc212f44c4ec5180a10042970f796af5"
|
content-hash = "26942a873c9b2cf86691e8cfee4ad0eaa673254b189010ec6600b448fdbad831"
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
charset-normalizer = "2.1.0"
|
charset-normalizer = "2.1.0"
|
||||||
|
emoji = "^2.2.0"
|
||||||
loguru = "^0.6.0"
|
loguru = "^0.6.0"
|
||||||
python = "^3.10"
|
python = "^3.10"
|
||||||
questionary = "^1.10.0"
|
questionary = "^1.10.0"
|
||||||
|
|||||||
@@ -8,11 +8,8 @@ from obsidian_metadata._utils.utilities import (
|
|||||||
delete_from_dict,
|
delete_from_dict,
|
||||||
dict_contains,
|
dict_contains,
|
||||||
dict_keys_to_lower,
|
dict_keys_to_lower,
|
||||||
dict_values_to_lists_strings,
|
|
||||||
docstring_parameter,
|
docstring_parameter,
|
||||||
inline_metadata_from_string,
|
|
||||||
merge_dictionaries,
|
merge_dictionaries,
|
||||||
remove_markdown_sections,
|
|
||||||
rename_in_dict,
|
rename_in_dict,
|
||||||
validate_csv_bulk_imports,
|
validate_csv_bulk_imports,
|
||||||
version_callback,
|
version_callback,
|
||||||
@@ -25,13 +22,10 @@ __all__ = [
|
|||||||
"delete_from_dict",
|
"delete_from_dict",
|
||||||
"dict_contains",
|
"dict_contains",
|
||||||
"dict_keys_to_lower",
|
"dict_keys_to_lower",
|
||||||
"dict_values_to_lists_strings",
|
|
||||||
"docstring_parameter",
|
"docstring_parameter",
|
||||||
"LoggerManager",
|
"LoggerManager",
|
||||||
"inline_metadata_from_string",
|
|
||||||
"merge_dictionaries",
|
"merge_dictionaries",
|
||||||
"rename_in_dict",
|
"rename_in_dict",
|
||||||
"remove_markdown_sections",
|
|
||||||
"validate_csv_bulk_imports",
|
"validate_csv_bulk_imports",
|
||||||
"version_callback",
|
"version_callback",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,3 +2,4 @@
|
|||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
|
|
||||||
console = Console()
|
console = Console()
|
||||||
|
console_no_markup = Console(markup=False)
|
||||||
|
|||||||
@@ -78,48 +78,6 @@ def dict_keys_to_lower(dictionary: dict) -> dict:
|
|||||||
return {key.lower(): value for key, value in dictionary.items()}
|
return {key.lower(): value for key, value in dictionary.items()}
|
||||||
|
|
||||||
|
|
||||||
def dict_values_to_lists_strings(
|
|
||||||
dictionary: dict,
|
|
||||||
strip_null_values: bool = False,
|
|
||||||
) -> dict:
|
|
||||||
"""Convert all values in a dictionary to lists of strings.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
dictionary (dict): Dictionary to convert
|
|
||||||
strip_null_values (bool): Whether to strip null values
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with all values converted to lists of strings
|
|
||||||
|
|
||||||
{key: sorted(new_dict[key]) for key in sorted(new_dict)}
|
|
||||||
"""
|
|
||||||
dictionary = copy.deepcopy(dictionary)
|
|
||||||
new_dict = {}
|
|
||||||
|
|
||||||
if strip_null_values:
|
|
||||||
for key, value in dictionary.items():
|
|
||||||
if isinstance(value, list):
|
|
||||||
new_dict[key] = sorted([str(item) for item in value if item is not None])
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
new_dict[key] = dict_values_to_lists_strings(value, strip_null_values=True) # type: ignore[assignment]
|
|
||||||
elif value is None or value == "None" or not value:
|
|
||||||
new_dict[key] = []
|
|
||||||
else:
|
|
||||||
new_dict[key] = [str(value)]
|
|
||||||
|
|
||||||
return new_dict
|
|
||||||
|
|
||||||
for key, value in dictionary.items():
|
|
||||||
if isinstance(value, list):
|
|
||||||
new_dict[key] = sorted([str(item) if item is not None else "" for item in value])
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
new_dict[key] = dict_values_to_lists_strings(value) # type: ignore[assignment]
|
|
||||||
else:
|
|
||||||
new_dict[key] = [str(value) if value is not None else ""]
|
|
||||||
|
|
||||||
return new_dict
|
|
||||||
|
|
||||||
|
|
||||||
def delete_from_dict( # noqa: C901
|
def delete_from_dict( # noqa: C901
|
||||||
dictionary: dict, key: str, value: str = None, is_regex: bool = False
|
dictionary: dict, key: str, value: str = None, is_regex: bool = False
|
||||||
) -> dict:
|
) -> dict:
|
||||||
@@ -183,21 +141,6 @@ def docstring_parameter(*sub: Any) -> Any:
|
|||||||
return dec
|
return dec
|
||||||
|
|
||||||
|
|
||||||
def inline_metadata_from_string(string: str) -> list[tuple[Any, ...]]:
|
|
||||||
"""Search for inline metadata in a string and return a list tuples containing (key, value).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
string (str): String to get metadata from
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
tuple[str]: (key, value)
|
|
||||||
"""
|
|
||||||
from obsidian_metadata.models import Patterns
|
|
||||||
|
|
||||||
results = Patterns().find_inline_metadata.findall(string)
|
|
||||||
return [tuple(filter(None, x)) for x in results]
|
|
||||||
|
|
||||||
|
|
||||||
def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
def merge_dictionaries(dict1: dict, dict2: dict) -> dict:
|
||||||
"""Merge two dictionaries. When the values are lists, they are merged and sorted.
|
"""Merge two dictionaries. When the values are lists, they are merged and sorted.
|
||||||
|
|
||||||
@@ -253,35 +196,6 @@ def rename_in_dict(
|
|||||||
return dictionary
|
return dictionary
|
||||||
|
|
||||||
|
|
||||||
def remove_markdown_sections(
|
|
||||||
text: str,
|
|
||||||
strip_codeblocks: bool = False,
|
|
||||||
strip_inlinecode: bool = False,
|
|
||||||
strip_frontmatter: bool = False,
|
|
||||||
) -> str:
|
|
||||||
"""Strip unwanted markdown sections from text. This is used to remove code blocks and frontmatter from the body of notes before tags and inline metadata are processed.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
text (str): Text to remove code blocks from
|
|
||||||
strip_codeblocks (bool, optional): Strip code blocks. Defaults to False.
|
|
||||||
strip_inlinecode (bool, optional): Strip inline code. Defaults to False.
|
|
||||||
strip_frontmatter (bool, optional): Strip frontmatter. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: Text without code blocks
|
|
||||||
"""
|
|
||||||
if strip_codeblocks:
|
|
||||||
text = re.sub(r"`{3}.*?`{3}", "", text, flags=re.DOTALL)
|
|
||||||
|
|
||||||
if strip_inlinecode:
|
|
||||||
text = re.sub(r"(?<!`{2})`[^`]+?`", "", text)
|
|
||||||
|
|
||||||
if strip_frontmatter:
|
|
||||||
text = re.sub(r"^\s*---.*?---", "", text, flags=re.DOTALL)
|
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
def validate_csv_bulk_imports( # noqa: C901
|
def validate_csv_bulk_imports( # noqa: C901
|
||||||
csv_path: Path, note_paths: list
|
csv_path: Path, note_paths: list
|
||||||
) -> dict[str, list[dict[str, str]]]:
|
) -> dict[str, list[dict[str, str]]]:
|
||||||
|
|||||||
@@ -2,15 +2,9 @@
|
|||||||
from obsidian_metadata.models.enums import (
|
from obsidian_metadata.models.enums import (
|
||||||
InsertLocation,
|
InsertLocation,
|
||||||
MetadataType,
|
MetadataType,
|
||||||
|
Wrapping,
|
||||||
)
|
)
|
||||||
|
from obsidian_metadata.models.metadata import InlineField, dict_to_yaml
|
||||||
from obsidian_metadata.models.patterns import Patterns # isort: skip
|
|
||||||
from obsidian_metadata.models.metadata import (
|
|
||||||
Frontmatter,
|
|
||||||
InlineMetadata,
|
|
||||||
InlineTags,
|
|
||||||
VaultMetadata,
|
|
||||||
)
|
|
||||||
from obsidian_metadata.models.notes import Note
|
from obsidian_metadata.models.notes import Note
|
||||||
from obsidian_metadata.models.vault import Vault, VaultFilter
|
from obsidian_metadata.models.vault import Vault, VaultFilter
|
||||||
|
|
||||||
@@ -18,15 +12,13 @@ from obsidian_metadata.models.application import Application # isort: skip
|
|||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Application",
|
"Application",
|
||||||
"Frontmatter",
|
"dict_to_yaml",
|
||||||
"InlineMetadata",
|
"InlineField",
|
||||||
"InlineTags",
|
|
||||||
"InsertLocation",
|
"InsertLocation",
|
||||||
"LoggerManager",
|
"LoggerManager",
|
||||||
"MetadataType",
|
"MetadataType",
|
||||||
"Note",
|
"Note",
|
||||||
"Patterns",
|
|
||||||
"Vault",
|
"Vault",
|
||||||
"VaultFilter",
|
"VaultFilter",
|
||||||
"VaultMetadata",
|
"Wrapping",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -84,8 +84,8 @@ class Application:
|
|||||||
"Add new metadata to your vault. Currently only supports adding to the frontmatter of a note."
|
"Add new metadata to your vault. Currently only supports adding to the frontmatter of a note."
|
||||||
)
|
)
|
||||||
|
|
||||||
area = self.questions.ask_area()
|
meta_type = self.questions.ask_area()
|
||||||
match area:
|
match meta_type:
|
||||||
case MetadataType.FRONTMATTER | MetadataType.INLINE:
|
case MetadataType.FRONTMATTER | MetadataType.INLINE:
|
||||||
key = self.questions.ask_new_key(question="Enter the key for the new metadata")
|
key = self.questions.ask_new_key(question="Enter the key for the new metadata")
|
||||||
if key is None: # pragma: no cover
|
if key is None: # pragma: no cover
|
||||||
@@ -98,7 +98,7 @@ class Application:
|
|||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.add_metadata(
|
num_changed = self.vault.add_metadata(
|
||||||
area=area, key=key, value=value, location=self.vault.insert_location
|
meta_type=meta_type, key=key, value=value, location=self.vault.insert_location
|
||||||
)
|
)
|
||||||
if num_changed == 0: # pragma: no cover
|
if num_changed == 0: # pragma: no cover
|
||||||
alerts.warning("No notes were changed")
|
alerts.warning("No notes were changed")
|
||||||
@@ -112,7 +112,7 @@ class Application:
|
|||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.add_metadata(
|
num_changed = self.vault.add_metadata(
|
||||||
area=area, value=tag, location=self.vault.insert_location
|
meta_type=meta_type, value=tag, location=self.vault.insert_location
|
||||||
)
|
)
|
||||||
|
|
||||||
if num_changed == 0: # pragma: no cover
|
if num_changed == 0: # pragma: no cover
|
||||||
@@ -373,23 +373,24 @@ class Application:
|
|||||||
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
||||||
case "all_metadata":
|
case "all_metadata":
|
||||||
console.print("")
|
console.print("")
|
||||||
self.vault.metadata.print_metadata(area=MetadataType.ALL)
|
# TODO: Add a way to print metadata
|
||||||
|
self.vault.print_metadata(meta_type=MetadataType.ALL)
|
||||||
console.print("")
|
console.print("")
|
||||||
case "all_frontmatter":
|
case "all_frontmatter":
|
||||||
console.print("")
|
console.print("")
|
||||||
self.vault.metadata.print_metadata(area=MetadataType.FRONTMATTER)
|
self.vault.print_metadata(meta_type=MetadataType.FRONTMATTER)
|
||||||
console.print("")
|
console.print("")
|
||||||
case "all_inline":
|
case "all_inline":
|
||||||
console.print("")
|
console.print("")
|
||||||
self.vault.metadata.print_metadata(area=MetadataType.INLINE)
|
self.vault.print_metadata(meta_type=MetadataType.INLINE)
|
||||||
console.print("")
|
console.print("")
|
||||||
case "all_keys":
|
case "all_keys":
|
||||||
console.print("")
|
console.print("")
|
||||||
self.vault.metadata.print_metadata(area=MetadataType.KEYS)
|
self.vault.print_metadata(meta_type=MetadataType.KEYS)
|
||||||
console.print("")
|
console.print("")
|
||||||
case "all_tags":
|
case "all_tags":
|
||||||
console.print("")
|
console.print("")
|
||||||
self.vault.metadata.print_metadata(area=MetadataType.TAGS)
|
self.vault.print_metadata(meta_type=MetadataType.TAGS)
|
||||||
console.print("")
|
console.print("")
|
||||||
case _:
|
case _:
|
||||||
return
|
return
|
||||||
@@ -503,10 +504,10 @@ class Application:
|
|||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.delete_metadata(
|
num_changed = self.vault.delete_metadata(
|
||||||
key=key_to_delete, area=MetadataType.ALL, is_regex=True
|
key=key_to_delete, meta_type=MetadataType.ALL, is_regex=True
|
||||||
)
|
)
|
||||||
if num_changed == 0:
|
if num_changed == 0:
|
||||||
alerts.warning(f"No notes found with a key matching: [reverse]{key_to_delete}[/]")
|
alerts.warning(f"No notes found with a key matching regex: [reverse]{key_to_delete}[/]")
|
||||||
return
|
return
|
||||||
|
|
||||||
alerts.success(
|
alerts.success(
|
||||||
@@ -527,7 +528,7 @@ class Application:
|
|||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.delete_metadata(
|
num_changed = self.vault.delete_metadata(
|
||||||
key=key, value=value, area=MetadataType.ALL, is_regex=True
|
key=key, value=value, meta_type=MetadataType.ALL, is_regex=True
|
||||||
)
|
)
|
||||||
if num_changed == 0:
|
if num_changed == 0:
|
||||||
alerts.warning(f"No notes found matching: {key}: {value}")
|
alerts.warning(f"No notes found matching: {key}: {value}")
|
||||||
|
|||||||
@@ -3,16 +3,6 @@
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
class MetadataType(Enum):
|
|
||||||
"""Enum class for the type of metadata."""
|
|
||||||
|
|
||||||
FRONTMATTER = "Frontmatter"
|
|
||||||
INLINE = "Inline Metadata"
|
|
||||||
TAGS = "Inline Tags"
|
|
||||||
KEYS = "Metadata Keys Only"
|
|
||||||
ALL = "All Metadata"
|
|
||||||
|
|
||||||
|
|
||||||
class InsertLocation(Enum):
|
class InsertLocation(Enum):
|
||||||
"""Location to add metadata to notes.
|
"""Location to add metadata to notes.
|
||||||
|
|
||||||
@@ -25,3 +15,22 @@ class InsertLocation(Enum):
|
|||||||
TOP = "Top"
|
TOP = "Top"
|
||||||
AFTER_TITLE = "After title"
|
AFTER_TITLE = "After title"
|
||||||
BOTTOM = "Bottom"
|
BOTTOM = "Bottom"
|
||||||
|
|
||||||
|
|
||||||
|
class MetadataType(Enum):
|
||||||
|
"""Enum class for the type of metadata."""
|
||||||
|
|
||||||
|
ALL = "Inline, Frontmatter, and Tags"
|
||||||
|
FRONTMATTER = "Frontmatter"
|
||||||
|
INLINE = "Inline Metadata"
|
||||||
|
KEYS = "Metadata Keys Only"
|
||||||
|
META = "Inline and Frontmatter. No Tags"
|
||||||
|
TAGS = "Inline Tags"
|
||||||
|
|
||||||
|
|
||||||
|
class Wrapping(Enum):
|
||||||
|
"""Wrapping for inline metadata within a block of text."""
|
||||||
|
|
||||||
|
BRACKETS = "Brackets"
|
||||||
|
PARENS = "Parentheses"
|
||||||
|
NONE = None
|
||||||
|
|||||||
@@ -1,663 +1,138 @@
|
|||||||
"""Work with metadata items."""
|
"""Work with metadata items."""
|
||||||
|
|
||||||
import copy
|
|
||||||
import re
|
import re
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
from rich.columns import Columns
|
import rich.repr
|
||||||
from rich.table import Table
|
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
from obsidian_metadata._utils import (
|
from obsidian_metadata.models.enums import MetadataType, Wrapping
|
||||||
clean_dictionary,
|
|
||||||
delete_from_dict,
|
|
||||||
dict_contains,
|
|
||||||
dict_values_to_lists_strings,
|
|
||||||
inline_metadata_from_string,
|
|
||||||
merge_dictionaries,
|
|
||||||
remove_markdown_sections,
|
|
||||||
rename_in_dict,
|
|
||||||
)
|
|
||||||
from obsidian_metadata._utils.alerts import logger as log
|
|
||||||
from obsidian_metadata._utils.console import console
|
|
||||||
from obsidian_metadata.models import Patterns # isort: ignore
|
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
|
||||||
from obsidian_metadata.models.exceptions import (
|
|
||||||
FrontmatterError,
|
|
||||||
InlineMetadataError,
|
|
||||||
InlineTagError,
|
|
||||||
)
|
|
||||||
|
|
||||||
PATTERNS = Patterns()
|
|
||||||
INLINE_TAG_KEY: str = "inline_tag"
|
|
||||||
|
|
||||||
|
|
||||||
class VaultMetadata:
|
def dict_to_yaml(dictionary: dict[str, list[str]], sort_keys: bool = False) -> str:
|
||||||
"""Representation of all Metadata in the Vault.
|
"""Return the a dictionary of {key: [values]} as a YAML string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
dictionary (dict[str, list[str]]): Dictionary of {key: [values]}.
|
||||||
|
sort_keys (bool, optional): Sort the keys. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Frontmatter as a YAML string.
|
||||||
|
sort_keys (bool, optional): Sort the keys. Defaults to False.
|
||||||
|
"""
|
||||||
|
if sort_keys:
|
||||||
|
dictionary = dict(sorted(dictionary.items()))
|
||||||
|
|
||||||
|
for key, value in dictionary.items():
|
||||||
|
if len(value) == 1:
|
||||||
|
dictionary[key] = value[0] # type: ignore [assignment]
|
||||||
|
|
||||||
|
yaml = YAML()
|
||||||
|
yaml.indent(mapping=2, sequence=4, offset=2)
|
||||||
|
string_stream = StringIO()
|
||||||
|
yaml.dump(dictionary, string_stream)
|
||||||
|
yaml_value = string_stream.getvalue()
|
||||||
|
string_stream.close()
|
||||||
|
if yaml_value == "{}\n":
|
||||||
|
return ""
|
||||||
|
return yaml_value
|
||||||
|
|
||||||
|
|
||||||
|
@rich.repr.auto
|
||||||
|
class InlineField:
|
||||||
|
"""Representation of a single inline field.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
dict (dict): Dictionary of all frontmatter and inline metadata. Does not include tags.
|
meta_type (MetadataType): Metadata category.
|
||||||
frontmatter (dict): Dictionary of all frontmatter metadata.
|
clean_key (str): Cleaned key - Key without surround markdown
|
||||||
inline_metadata (dict): Dictionary of all inline metadata.
|
key (str): Metadata key - Complete key found in note
|
||||||
tags (list): List of all tags.
|
key_close (str): Closing key markdown.
|
||||||
|
key_open (str): Opening key markdown.
|
||||||
|
normalized_key (str): Key converted to lowercase w. spaces replaced with dashes
|
||||||
|
normalized_value (str): Value stripped of leading and trailing whitespace.
|
||||||
|
value (str): Metadata value - Complete value found in note.
|
||||||
|
wrapping (Wrapping): Inline metadata may be wrapped with [] or ().
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(
|
||||||
self.dict: dict[str, list[str]] = {}
|
self,
|
||||||
self.frontmatter: dict[str, list[str]] = {}
|
meta_type: MetadataType,
|
||||||
self.inline_metadata: dict[str, list[str]] = {}
|
key: str,
|
||||||
self.tags: list[str] = []
|
value: str,
|
||||||
|
wrapping: Wrapping = Wrapping.NONE,
|
||||||
def __repr__(self) -> str:
|
is_changed: bool = False,
|
||||||
"""Representation of all metadata."""
|
|
||||||
return str(self.dict)
|
|
||||||
|
|
||||||
def index_metadata(
|
|
||||||
self, area: MetadataType, metadata: dict[str, list[str]] | list[str]
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Index pre-existing metadata in the vault. Takes a dictionary as input and merges it with the existing metadata. Does not overwrite existing keys.
|
self.meta_type = meta_type
|
||||||
|
self.key = key
|
||||||
|
self.value = value
|
||||||
|
self.wrapping = wrapping
|
||||||
|
self.is_changed = is_changed
|
||||||
|
|
||||||
Args:
|
# Clean keys of surrounding markdown and convert to lowercase
|
||||||
area (MetadataType): Type of metadata.
|
self.clean_key, self.normalized_key, self.key_open, self.key_close = (
|
||||||
metadata (dict): Metadata to add.
|
self._clean_key(self.key) if self.key else (None, None, "", "")
|
||||||
"""
|
|
||||||
if isinstance(metadata, dict):
|
|
||||||
new_metadata = clean_dictionary(metadata)
|
|
||||||
self.dict = merge_dictionaries(self.dict, new_metadata)
|
|
||||||
|
|
||||||
if area == MetadataType.FRONTMATTER:
|
|
||||||
self.frontmatter = merge_dictionaries(self.frontmatter, new_metadata)
|
|
||||||
|
|
||||||
if area == MetadataType.INLINE:
|
|
||||||
self.inline_metadata = merge_dictionaries(self.inline_metadata, new_metadata)
|
|
||||||
|
|
||||||
if area == MetadataType.TAGS and isinstance(metadata, list):
|
|
||||||
self.tags.extend(metadata)
|
|
||||||
self.tags = sorted({s.strip("#") for s in self.tags})
|
|
||||||
|
|
||||||
def contains(
|
|
||||||
self, area: MetadataType, key: str = None, value: str = None, is_regex: bool = False
|
|
||||||
) -> bool:
|
|
||||||
"""Check if a key and/or a value exists in the metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
area (MetadataType): Type of metadata to check.
|
|
||||||
key (str, optional): Key to check.
|
|
||||||
value (str, optional): Value to check.
|
|
||||||
is_regex (bool, optional): Use regex to check. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the key exists.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValueError: Key must be provided when checking for a key's existence.
|
|
||||||
ValueError: Value must be provided when checking for a tag's existence.
|
|
||||||
"""
|
|
||||||
if area != MetadataType.TAGS and key is None:
|
|
||||||
raise ValueError("Key must be provided when checking for a key's existence.")
|
|
||||||
|
|
||||||
match area:
|
|
||||||
case MetadataType.ALL:
|
|
||||||
return dict_contains(self.dict, key, value, is_regex)
|
|
||||||
case MetadataType.FRONTMATTER:
|
|
||||||
return dict_contains(self.frontmatter, key, value, is_regex)
|
|
||||||
case MetadataType.INLINE:
|
|
||||||
return dict_contains(self.inline_metadata, key, value, is_regex)
|
|
||||||
case MetadataType.KEYS:
|
|
||||||
return dict_contains(self.dict, key, value, is_regex)
|
|
||||||
case MetadataType.TAGS:
|
|
||||||
if value is None:
|
|
||||||
raise ValueError("Value must be provided when checking for a tag's existence.")
|
|
||||||
if is_regex:
|
|
||||||
return any(re.search(value, tag) for tag in self.tags)
|
|
||||||
return value in self.tags
|
|
||||||
|
|
||||||
def delete(self, key: str, value_to_delete: str = None) -> bool:
|
|
||||||
"""Delete a key or a value from the VaultMetadata dict object. Regex is supported to allow deleting more than one key or value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to check.
|
|
||||||
value_to_delete (str, optional): Value to delete.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was deleted
|
|
||||||
"""
|
|
||||||
new_dict = delete_from_dict(
|
|
||||||
dictionary=self.dict,
|
|
||||||
key=key,
|
|
||||||
value=value_to_delete,
|
|
||||||
is_regex=True,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if new_dict != self.dict:
|
# Normalize value for display
|
||||||
self.dict = dict(new_dict)
|
self.normalized_value = "-" if re.match(r"^\s*$", self.value) else self.value.strip()
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||||
|
"""Rich representation of the inline field."""
|
||||||
|
yield "clean_key", self.clean_key
|
||||||
|
yield "is_changed", self.is_changed
|
||||||
|
yield "key_close", self.key_close
|
||||||
|
yield "key_open", self.key_open
|
||||||
|
yield "key", self.key
|
||||||
|
yield "meta_type", self.meta_type.value
|
||||||
|
yield "normalized_key", self.normalized_key
|
||||||
|
yield "normalized_value", self.normalized_value
|
||||||
|
yield "value", self.value
|
||||||
|
yield "wrapping", self.wrapping.value
|
||||||
|
|
||||||
def print_metadata(self, area: MetadataType) -> None:
|
def __eq__(self, other: object) -> bool:
|
||||||
"""Print metadata to the terminal.
|
"""Compare two InlineField objects."""
|
||||||
|
if not isinstance(other, InlineField):
|
||||||
Args:
|
return NotImplemented
|
||||||
area (MetadataType): Type of metadata to print
|
return (
|
||||||
"""
|
self.key == other.key
|
||||||
dict_to_print = None
|
and self.value == other.value
|
||||||
list_to_print = None
|
and self.meta_type == other.meta_type
|
||||||
match area:
|
|
||||||
case MetadataType.INLINE:
|
|
||||||
dict_to_print = self.inline_metadata
|
|
||||||
header = "All inline metadata"
|
|
||||||
case MetadataType.FRONTMATTER:
|
|
||||||
dict_to_print = self.frontmatter
|
|
||||||
header = "All frontmatter"
|
|
||||||
case MetadataType.TAGS:
|
|
||||||
list_to_print = [f"#{x}" for x in self.tags]
|
|
||||||
header = "All inline tags"
|
|
||||||
case MetadataType.KEYS:
|
|
||||||
list_to_print = sorted(self.dict.keys())
|
|
||||||
header = "All Keys"
|
|
||||||
case MetadataType.ALL:
|
|
||||||
dict_to_print = self.dict
|
|
||||||
list_to_print = [f"#{x}" for x in self.tags]
|
|
||||||
header = "All metadata"
|
|
||||||
|
|
||||||
if dict_to_print is not None:
|
|
||||||
table = Table(title=header, show_footer=False, show_lines=True)
|
|
||||||
table.add_column("Keys")
|
|
||||||
table.add_column("Values")
|
|
||||||
for key, value in sorted(dict_to_print.items()):
|
|
||||||
values: str | dict[str, list[str]] = (
|
|
||||||
"\n".join(sorted(value)) if isinstance(value, list) else value
|
|
||||||
)
|
|
||||||
table.add_row(f"[bold]{key}[/]", str(values))
|
|
||||||
console.print(table)
|
|
||||||
|
|
||||||
if list_to_print is not None:
|
|
||||||
columns = Columns(
|
|
||||||
sorted(list_to_print),
|
|
||||||
equal=True,
|
|
||||||
expand=True,
|
|
||||||
title=header if area != MetadataType.ALL else "All inline tags",
|
|
||||||
)
|
|
||||||
console.print(columns)
|
|
||||||
|
|
||||||
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
|
||||||
"""Replace a value in the frontmatter.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to check.
|
|
||||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
|
||||||
value_2 (str, Optional): New value.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was renamed
|
|
||||||
"""
|
|
||||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
|
||||||
|
|
||||||
if new_dict != self.dict:
|
|
||||||
self.dict = dict(new_dict)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class Frontmatter:
|
|
||||||
"""Representation of frontmatter metadata."""
|
|
||||||
|
|
||||||
def __init__(self, file_content: str) -> None:
|
|
||||||
self.dict: dict[str, list[str]] = self._grab_note_frontmatter(file_content)
|
|
||||||
self.dict_original: dict[str, list[str]] = copy.deepcopy(self.dict)
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
|
||||||
"""Representation of the frontmatter.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: frontmatter
|
|
||||||
"""
|
|
||||||
return f"Frontmatter(frontmatter={self.dict})"
|
|
||||||
|
|
||||||
def _grab_note_frontmatter(self, file_content: str) -> dict:
|
|
||||||
"""Grab metadata from a note.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_content (str): Content of the note.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Metadata from the note.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
frontmatter_block: str = PATTERNS.frontmatt_block_strip_separators.search(
|
|
||||||
file_content
|
|
||||||
).group("frontmatter")
|
|
||||||
except AttributeError:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
yaml = YAML(typ="safe")
|
|
||||||
yaml.allow_unicode = False
|
|
||||||
try:
|
|
||||||
frontmatter: dict = yaml.load(frontmatter_block)
|
|
||||||
except Exception as e: # noqa: BLE001
|
|
||||||
raise FrontmatterError(e) from e
|
|
||||||
|
|
||||||
if frontmatter is None or frontmatter == [None]:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
for k in frontmatter:
|
|
||||||
if frontmatter[k] is None:
|
|
||||||
frontmatter[k] = []
|
|
||||||
|
|
||||||
return dict_values_to_lists_strings(frontmatter, strip_null_values=True)
|
|
||||||
|
|
||||||
def add(self, key: str, value: str | list[str] = None) -> bool: # noqa: PLR0911
|
|
||||||
"""Add a key and value to the frontmatter.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to add.
|
|
||||||
value (str, optional): Value to add.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the metadata was added
|
|
||||||
"""
|
|
||||||
if value is None:
|
|
||||||
if key not in self.dict:
|
|
||||||
self.dict[key] = []
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
if key not in self.dict:
|
|
||||||
if isinstance(value, list):
|
|
||||||
self.dict[key] = value
|
|
||||||
return True
|
|
||||||
|
|
||||||
self.dict[key] = [value]
|
|
||||||
return True
|
|
||||||
|
|
||||||
if key in self.dict and value not in self.dict[key]:
|
|
||||||
if isinstance(value, list):
|
|
||||||
self.dict[key].extend(value)
|
|
||||||
self.dict[key] = list(sorted(set(self.dict[key])))
|
|
||||||
return True
|
|
||||||
|
|
||||||
self.dict[key].append(value)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
|
||||||
"""Check if a key or value exists in the metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to check.
|
|
||||||
value (str, optional): Value to check.
|
|
||||||
is_regex (bool, optional): Use regex to check. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the key exists.
|
|
||||||
"""
|
|
||||||
return dict_contains(self.dict, key, value, is_regex)
|
|
||||||
|
|
||||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
|
||||||
"""Delete a value or key in the frontmatter. Regex is supported to allow deleting more than one key or value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
is_regex (bool, optional): Use regex to check. Defaults to False.
|
|
||||||
key (str): If no value, key to delete. If value, key containing the value.
|
|
||||||
value_to_delete (str, optional): Value to delete.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was deleted
|
|
||||||
"""
|
|
||||||
new_dict = delete_from_dict(
|
|
||||||
dictionary=self.dict,
|
|
||||||
key=key,
|
|
||||||
value=value_to_delete,
|
|
||||||
is_regex=is_regex,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if new_dict != self.dict:
|
def __hash__(self) -> int:
|
||||||
self.dict = dict(new_dict)
|
"""Hash the InlineField object."""
|
||||||
return True
|
return hash((self.key, self.value, self.meta_type))
|
||||||
|
|
||||||
return False
|
def _clean_key(self, text: str) -> tuple[str, str, str, str]:
|
||||||
|
"""Remove markdown from the key.
|
||||||
|
|
||||||
def delete_all(self) -> None:
|
Creates the following attributes:
|
||||||
"""Delete all Frontmatter from the note."""
|
|
||||||
self.dict = {}
|
|
||||||
|
|
||||||
def has_changes(self) -> bool:
|
clean_key : The key stripped of opening and closing markdown
|
||||||
"""Check if the frontmatter has changes.
|
normalized_key: The key converted to lowercase with spaces replaced with dashes
|
||||||
|
key_open : The opening markdown
|
||||||
Returns:
|
key_close : The closing markdown.
|
||||||
bool: True if the frontmatter has changes.
|
|
||||||
"""
|
|
||||||
return self.dict != self.dict_original
|
|
||||||
|
|
||||||
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
|
||||||
"""Replace a value in the frontmatter.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): Key to check.
|
text (str): Key to clean.
|
||||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
|
||||||
value_2 (str, Optional): New value.
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if a value was renamed
|
tuple[str, str, str, str]: Cleaned key, normalized key, opening markdown, closing markdown.
|
||||||
"""
|
"""
|
||||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
cleaned = text
|
||||||
|
if tmp := re.search(r"^([\*#_ `~]+)", text):
|
||||||
if new_dict != self.dict:
|
key_open = tmp.group(0)
|
||||||
self.dict = dict(new_dict)
|
cleaned = re.sub(rf"^{re.escape(key_open)}", "", text)
|
||||||
return True
|
else:
|
||||||
|
key_open = ""
|
||||||
return False
|
|
||||||
|
if tmp := re.search(r"([\*#_ `~]+)$", text):
|
||||||
def to_yaml(self, sort_keys: bool = False) -> str:
|
key_close = tmp.group(0)
|
||||||
"""Return the frontmatter as a YAML string.
|
cleaned = re.sub(rf"{re.escape(key_close)}$", "", cleaned)
|
||||||
|
else:
|
||||||
Returns:
|
key_close = ""
|
||||||
str: Frontmatter as a YAML string.
|
|
||||||
sort_keys (bool, optional): Sort the keys. Defaults to False.
|
normalized = cleaned.replace(" ", "-").lower()
|
||||||
"""
|
|
||||||
dict_to_dump = copy.deepcopy(self.dict)
|
return cleaned, normalized, key_open, key_close
|
||||||
for k in dict_to_dump:
|
|
||||||
if dict_to_dump[k] == []:
|
|
||||||
dict_to_dump[k] = None
|
|
||||||
if isinstance(dict_to_dump[k], list) and len(dict_to_dump[k]) == 1:
|
|
||||||
new_val = dict_to_dump[k][0]
|
|
||||||
dict_to_dump[k] = new_val # type: ignore [assignment]
|
|
||||||
|
|
||||||
# Converting stream to string from https://stackoverflow.com/questions/47614862/best-way-to-use-ruamel-yaml-to-dump-yaml-to-string-not-to-stream/63179923#63179923
|
|
||||||
|
|
||||||
if sort_keys:
|
|
||||||
dict_to_dump = dict(sorted(dict_to_dump.items()))
|
|
||||||
|
|
||||||
yaml = YAML()
|
|
||||||
yaml.indent(mapping=2, sequence=4, offset=2)
|
|
||||||
string_stream = StringIO()
|
|
||||||
yaml.dump(dict_to_dump, string_stream)
|
|
||||||
yaml_value = string_stream.getvalue()
|
|
||||||
string_stream.close()
|
|
||||||
return yaml_value
|
|
||||||
|
|
||||||
|
|
||||||
class InlineMetadata:
|
|
||||||
"""Representation of inline metadata in the form of `key:: value`."""
|
|
||||||
|
|
||||||
def __init__(self, file_content: str) -> None:
|
|
||||||
self.dict: dict[str, list[str]] = self._grab_inline_metadata(file_content)
|
|
||||||
self.dict_original: dict[str, list[str]] = copy.deepcopy(self.dict)
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
|
||||||
"""Representation of inline metadata.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: inline metadata
|
|
||||||
"""
|
|
||||||
return f"InlineMetadata(inline_metadata={self.dict})"
|
|
||||||
|
|
||||||
def _grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
|
||||||
"""Grab inline metadata from a note.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, str]: Inline metadata from the note.
|
|
||||||
"""
|
|
||||||
content = remove_markdown_sections(
|
|
||||||
file_content,
|
|
||||||
strip_codeblocks=True,
|
|
||||||
strip_inlinecode=True,
|
|
||||||
strip_frontmatter=True,
|
|
||||||
)
|
|
||||||
found_inline_metadata = inline_metadata_from_string(content)
|
|
||||||
inline_metadata: dict[str, list[str]] = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
for k, v in found_inline_metadata:
|
|
||||||
if not k:
|
|
||||||
log.trace(f"Skipping empty key associated with value: {v}")
|
|
||||||
continue
|
|
||||||
if k in inline_metadata:
|
|
||||||
inline_metadata[k].append(str(v))
|
|
||||||
else:
|
|
||||||
inline_metadata[k] = [str(v)]
|
|
||||||
except ValueError as e:
|
|
||||||
raise InlineMetadataError(
|
|
||||||
f"Error parsing inline metadata: {found_inline_metadata}"
|
|
||||||
) from e
|
|
||||||
except AttributeError as e:
|
|
||||||
raise InlineMetadataError(
|
|
||||||
f"Error parsing inline metadata: {found_inline_metadata}"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
return clean_dictionary(inline_metadata)
|
|
||||||
|
|
||||||
def add(self, key: str, value: str | list[str] = None) -> bool: # noqa: PLR0911
|
|
||||||
"""Add a key and value to the inline metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to add.
|
|
||||||
value (str, optional): Value to add.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the metadata was added
|
|
||||||
"""
|
|
||||||
if value is None:
|
|
||||||
if key not in self.dict:
|
|
||||||
self.dict[key] = []
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
if key not in self.dict:
|
|
||||||
if isinstance(value, list):
|
|
||||||
self.dict[key] = value
|
|
||||||
return True
|
|
||||||
|
|
||||||
self.dict[key] = [value]
|
|
||||||
return True
|
|
||||||
|
|
||||||
if key in self.dict and value not in self.dict[key]:
|
|
||||||
if isinstance(value, list):
|
|
||||||
self.dict[key].extend(value)
|
|
||||||
self.dict[key] = list(sorted(set(self.dict[key])))
|
|
||||||
return True
|
|
||||||
|
|
||||||
self.dict[key].append(value)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
|
||||||
"""Check if a key or value exists in the inline metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to check.
|
|
||||||
value (str, Optional): Value to check.
|
|
||||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the key exists.
|
|
||||||
"""
|
|
||||||
return dict_contains(self.dict, key, value, is_regex)
|
|
||||||
|
|
||||||
def delete(self, key: str, value_to_delete: str = None, is_regex: bool = False) -> bool:
|
|
||||||
"""Delete a value or key in the inline metadata. Regex is supported to allow deleting more than one key or value.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
is_regex (bool, optional): If True, key and value are treated as regex. Defaults to False.
|
|
||||||
key (str): If no value, key to delete. If value, key containing the value.
|
|
||||||
value_to_delete (str, optional): Value to delete.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was deleted
|
|
||||||
"""
|
|
||||||
new_dict = delete_from_dict(
|
|
||||||
dictionary=self.dict,
|
|
||||||
key=key,
|
|
||||||
value=value_to_delete,
|
|
||||||
is_regex=is_regex,
|
|
||||||
)
|
|
||||||
|
|
||||||
if new_dict != self.dict:
|
|
||||||
self.dict = dict(new_dict)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_changes(self) -> bool:
|
|
||||||
"""Check if the metadata has changes.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the metadata has changes.
|
|
||||||
"""
|
|
||||||
return self.dict != self.dict_original
|
|
||||||
|
|
||||||
def rename(self, key: str, value_1: str, value_2: str = None) -> bool:
|
|
||||||
"""Replace a value in the inline metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to check.
|
|
||||||
value_1 (str): `With value_2` this is the value to rename. If `value_2` is None this is the renamed key
|
|
||||||
value_2 (str, Optional): New value.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was renamed
|
|
||||||
"""
|
|
||||||
new_dict = rename_in_dict(dictionary=self.dict, key=key, value_1=value_1, value_2=value_2)
|
|
||||||
|
|
||||||
if new_dict != self.dict:
|
|
||||||
self.dict = dict(new_dict)
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
class InlineTags:
|
|
||||||
"""Representation of inline tags."""
|
|
||||||
|
|
||||||
def __init__(self, file_content: str) -> None:
|
|
||||||
self.metadata_key = INLINE_TAG_KEY
|
|
||||||
self.list: list[str] = self._grab_inline_tags(file_content)
|
|
||||||
self.list_original: list[str] = self.list.copy()
|
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
|
||||||
"""Representation of the inline tags.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
str: inline tags
|
|
||||||
"""
|
|
||||||
return f"InlineTags(tags={self.list})"
|
|
||||||
|
|
||||||
def _grab_inline_tags(self, file_content: str) -> list[str]:
|
|
||||||
"""Grab inline tags from a note.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file_content (str): Total contents of the note file (frontmatter and content).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[str]: Inline tags from the note.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return sorted(
|
|
||||||
PATTERNS.find_inline_tags.findall(
|
|
||||||
remove_markdown_sections(
|
|
||||||
file_content,
|
|
||||||
strip_codeblocks=True,
|
|
||||||
strip_inlinecode=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except AttributeError as e:
|
|
||||||
raise InlineTagError("Error parsing inline tags.") from e
|
|
||||||
except TypeError as e:
|
|
||||||
raise InlineTagError("Error parsing inline tags.") from e
|
|
||||||
except ValueError as e:
|
|
||||||
raise InlineTagError("Error parsing inline tags.") from e
|
|
||||||
|
|
||||||
def add(self, new_tag: str | list[str]) -> bool:
|
|
||||||
"""Add a new inline tag.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
new_tag (str, list[str]): Tag to add.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a tag was added.
|
|
||||||
"""
|
|
||||||
added_tag = False
|
|
||||||
if isinstance(new_tag, list):
|
|
||||||
for _tag in new_tag:
|
|
||||||
if _tag.startswith("#"):
|
|
||||||
_tag = _tag[1:]
|
|
||||||
if _tag in self.list:
|
|
||||||
continue
|
|
||||||
self.list.append(_tag)
|
|
||||||
added_tag = True
|
|
||||||
|
|
||||||
if added_tag:
|
|
||||||
self.list = sorted(self.list)
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
if new_tag.startswith("#"):
|
|
||||||
new_tag = new_tag[1:]
|
|
||||||
if new_tag in self.list:
|
|
||||||
return False
|
|
||||||
new_list = self.list.copy()
|
|
||||||
new_list.append(new_tag)
|
|
||||||
self.list = sorted(new_list)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def contains(self, tag: str, is_regex: bool = False) -> bool:
|
|
||||||
"""Check if a tag exists in the metadata.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tag (str): Tag to check.
|
|
||||||
is_regex (bool, optional): If True, tag is treated as regex. Defaults to False.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the tag exists.
|
|
||||||
"""
|
|
||||||
if is_regex is True:
|
|
||||||
return any(re.search(tag, _t) for _t in self.list)
|
|
||||||
|
|
||||||
if tag in self.list:
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def delete(self, tag_to_delete: str) -> bool:
|
|
||||||
"""Delete a specified inline tag. Regex is supported to allow deleting more than one tag.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
tag_to_delete (str, optional): Value to delete.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was deleted
|
|
||||||
"""
|
|
||||||
new_list = sorted([x for x in self.list if re.search(tag_to_delete, x) is None])
|
|
||||||
|
|
||||||
if new_list != self.list:
|
|
||||||
self.list = new_list
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_changes(self) -> bool:
|
|
||||||
"""Check if the metadata has changes.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if the metadata has changes.
|
|
||||||
"""
|
|
||||||
return self.list != self.list_original
|
|
||||||
|
|
||||||
def rename(self, old_tag: str, new_tag: str) -> bool:
|
|
||||||
"""Replace an inline tag with another string.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
old_tag (str): `With value_2` this is the value to rename.
|
|
||||||
new_tag (str): New value
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if a value was renamed
|
|
||||||
"""
|
|
||||||
if old_tag in self.list and new_tag is not None and new_tag:
|
|
||||||
self.list = sorted({new_tag if i == old_tag else i for i in self.list})
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|||||||
File diff suppressed because it is too large
Load Diff
194
src/obsidian_metadata/models/parsers.py
Normal file
194
src/obsidian_metadata/models/parsers.py
Normal file
@@ -0,0 +1,194 @@
|
|||||||
|
"""Parsers for Obsidian metadata files."""
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
|
||||||
|
import emoji
|
||||||
|
import regex as re
|
||||||
|
|
||||||
|
from obsidian_metadata.models.enums import Wrapping
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Parser:
|
||||||
|
"""Regex parsers for Obsidian metadata files.
|
||||||
|
|
||||||
|
All methods return a list of matches
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Reusable regex patterns
|
||||||
|
internal_link = r"\[\[[^\[\]]*?\]\]" # An Obsidian link of the form [[<link>]]
|
||||||
|
chars_not_in_tags = r"\u2000-\u206F\u2E00-\u2E7F'!\"#\$%&\(\)\*+,\.:;<=>?@\^`\{\|\}~\[\]\\\s"
|
||||||
|
|
||||||
|
# Compiled regex patterns
|
||||||
|
tag = re.compile(
|
||||||
|
r"""
|
||||||
|
(?:
|
||||||
|
(?:^|\s|\\{2}) # If tarts with newline, space, or "\\""
|
||||||
|
(?P<tag>\#[^\u2000-\u206F\u2E00-\u2E7F'!\"\#\$%&\(\)\*+,\.:;<=>?@\^`\{\|\}~\[\]\\\s]+) # capture tag
|
||||||
|
| # Else
|
||||||
|
(?:(?<=
|
||||||
|
\#[^\u2000-\u206F\u2E00-\u2E7F'!\"\#\$%&\(\)\*+,\.:;<=>?@\^`\{\|\}~\[\]\\\s]+
|
||||||
|
)) # if lookbehind is a tag
|
||||||
|
(?P<tag>\#[^\u2000-\u206F\u2E00-\u2E7F'!\"\#\$%&\(\)\*+,\.:;<=>?@\^`\{\|\}~\[\]\\\s]+) # capture tag
|
||||||
|
| # Else
|
||||||
|
(*FAIL)
|
||||||
|
)
|
||||||
|
""",
|
||||||
|
re.X,
|
||||||
|
)
|
||||||
|
frontmatter_complete = re.compile(r"^\s*(?P<frontmatter>---.*?---)", flags=re.DOTALL)
|
||||||
|
frontmatter_data = re.compile(
|
||||||
|
r"(?P<open>^\s*---)(?P<frontmatter>.*?)(?P<close>---)", flags=re.DOTALL
|
||||||
|
)
|
||||||
|
code_block = re.compile(r"```.*?```", flags=re.DOTALL)
|
||||||
|
inline_code = re.compile(r"(?<!`{2})`[^`]+?` ?")
|
||||||
|
inline_metadata = re.compile(
|
||||||
|
r"""
|
||||||
|
(?: # Conditional
|
||||||
|
(?= # If opening wrapper is a bracket or parenthesis
|
||||||
|
(
|
||||||
|
(?<!\[)\[(?!\[) # Single bracket
|
||||||
|
| # Or
|
||||||
|
(?<!\()\((?!\() # Single parenthesis
|
||||||
|
)
|
||||||
|
)
|
||||||
|
(?: # Conditional
|
||||||
|
(?= # If opening wrapper is a bracket
|
||||||
|
(?<!\[)\[(?!\[) # Single bracket
|
||||||
|
)
|
||||||
|
(?<!\[)(?P<open>\[)(?!\[) # Open bracket
|
||||||
|
(?P<key>[0-9\p{Letter}\w\s_/-;\*\~`]+?) # Find key
|
||||||
|
(?<!:)::(?!:) # Separator
|
||||||
|
(?P<value>.*?) # Value
|
||||||
|
(?<!\])(?P<close>\])(?!\]) # Close bracket
|
||||||
|
| # Else if opening wrapper is a parenthesis
|
||||||
|
(?<!\()(?P<open>\()(?!\() # Open parens
|
||||||
|
(?P<key>[0-9\p{Letter}\w\s_/-;\*\~`]+?) # Find key
|
||||||
|
(?<!:)::(?!:) # Separator
|
||||||
|
(?P<value>.*?) # Value
|
||||||
|
(?<!\))(?P<close>\))(?!\)) # Close parenthesis
|
||||||
|
)
|
||||||
|
| # Else grab entire line
|
||||||
|
(?P<key>[0-9\p{Letter}\w\s_/-;\*\~`]+?) # Find key
|
||||||
|
(?<!:)::(?!:) # Separator
|
||||||
|
(?P<value>.*) # Value
|
||||||
|
)
|
||||||
|
|
||||||
|
""",
|
||||||
|
re.X | re.I,
|
||||||
|
)
|
||||||
|
top_with_header = re.compile(
|
||||||
|
r"""^\s* # Start of note
|
||||||
|
(?P<top> # Capture the top of the note
|
||||||
|
.* # Anything above the first header
|
||||||
|
\#+[ ].*?[\r\n] # Full header, if it exists
|
||||||
|
) # End capture group
|
||||||
|
""",
|
||||||
|
flags=re.DOTALL | re.X,
|
||||||
|
)
|
||||||
|
validate_key_text = re.compile(r"[^-_\w\d\/\*\u263a-\U0001f999]")
|
||||||
|
validate_tag_text = re.compile(r"[ \|,;:\*\(\)\[\]\\\.\n#&]")
|
||||||
|
|
||||||
|
def return_inline_metadata(self, line: str) -> list[tuple[str, str, Wrapping]] | None:
|
||||||
|
"""Return a list of metadata matches for a single line.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
line (str): The text to search.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[tuple[str, str, Wrapping]] | None: A list of tuples containing the key, value, and wrapping type.
|
||||||
|
"""
|
||||||
|
sep = r"(?<!:)::(?!:)"
|
||||||
|
if not re.search(sep, line):
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Replace emoji with text
|
||||||
|
line = emoji.demojize(line, delimiters=(";", ";"))
|
||||||
|
|
||||||
|
matches = []
|
||||||
|
for match in self.inline_metadata.finditer(line):
|
||||||
|
match match.group("open"):
|
||||||
|
case "[":
|
||||||
|
wrapper = Wrapping.BRACKETS
|
||||||
|
case "(":
|
||||||
|
wrapper = Wrapping.PARENS
|
||||||
|
case _:
|
||||||
|
wrapper = Wrapping.NONE
|
||||||
|
|
||||||
|
matches.append(
|
||||||
|
(
|
||||||
|
emoji.emojize(match.group("key"), delimiters=(";", ";")),
|
||||||
|
emoji.emojize(match.group("value"), delimiters=(";", ";")),
|
||||||
|
wrapper,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return matches
|
||||||
|
|
||||||
|
def return_frontmatter(self, text: str, data_only: bool = False) -> str | None:
|
||||||
|
"""Return a list of metadata matches.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): The text to search.
|
||||||
|
data_only (bool, optional): If True, only return the frontmatter data and strip the "---" lines from the returned string. Defaults to False
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str | None: The frontmatter block, or None if no frontmatter is found.
|
||||||
|
"""
|
||||||
|
if data_only:
|
||||||
|
result = self.frontmatter_data.search(text)
|
||||||
|
else:
|
||||||
|
result = self.frontmatter_complete.search(text)
|
||||||
|
|
||||||
|
if result:
|
||||||
|
return result.group("frontmatter").strip()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def return_tags(self, text: str) -> list[str]:
|
||||||
|
"""Return a list of tags.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): The text to search.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[str]: A list of tags.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
t.group("tag")
|
||||||
|
for t in self.tag.finditer(text)
|
||||||
|
if not re.match(r"^#[0-9]+$", t.group("tag"))
|
||||||
|
]
|
||||||
|
|
||||||
|
def return_top_with_header(self, text: str) -> str:
|
||||||
|
"""Returns the top content of a string until the end of the first markdown header found.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): The text to search.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The top content of the string.
|
||||||
|
"""
|
||||||
|
result = self.top_with_header.search(text)
|
||||||
|
if result:
|
||||||
|
return result.group("top")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def strip_frontmatter(self, text: str, data_only: bool = False) -> str:
|
||||||
|
"""Strip frontmatter from a string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
text (str): The text to search.
|
||||||
|
data_only (bool, optional): If True, only strip the frontmatter data and leave the '---' lines. Defaults to False
|
||||||
|
"""
|
||||||
|
if data_only:
|
||||||
|
return self.frontmatter_data.sub(r"\g<open>\n\g<close>", text)
|
||||||
|
|
||||||
|
return self.frontmatter_complete.sub("", text)
|
||||||
|
|
||||||
|
def strip_code_blocks(self, text: str) -> str:
|
||||||
|
"""Strip code blocks from a string."""
|
||||||
|
return self.code_block.sub("", text)
|
||||||
|
|
||||||
|
def strip_inline_code(self, text: str) -> str:
|
||||||
|
"""Strip inline code from a string."""
|
||||||
|
return self.inline_code.sub("", text)
|
||||||
@@ -1,62 +0,0 @@
|
|||||||
"""Regexes for parsing frontmatter and note content."""
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
import regex as re
|
|
||||||
from regex import Pattern
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Patterns:
|
|
||||||
"""Regex patterns for parsing frontmatter and note content."""
|
|
||||||
|
|
||||||
find_inline_tags: Pattern[str] = re.compile(
|
|
||||||
r"""
|
|
||||||
(?:^|[ \|_,;:\*\)\[\]\\\.]|(?<!\])\() # Before tag is start of line or separator
|
|
||||||
(?<!\/\/[\w\d_\.\(\)\/&_-]+) # Before tag is not a link
|
|
||||||
\#([^ \|,;:\*\(\)\[\]\\\.\n#&]+) # Match tag until separator or end of line
|
|
||||||
""",
|
|
||||||
re.MULTILINE | re.X,
|
|
||||||
)
|
|
||||||
|
|
||||||
find_inline_metadata: Pattern[str] = re.compile(
|
|
||||||
r""" # First look for in-text key values
|
|
||||||
(?:^\[| \[) # Find key with starting bracket
|
|
||||||
([-_\w\d\/\*\u263a-\U0001f999]+?)::[ ]? # Find key
|
|
||||||
(.*?)\] # Find value until closing bracket
|
|
||||||
| # Else look for key values at start of line
|
|
||||||
(?:^|[^ \w\d]+|^ *>?[-\d\|]?\.? ) # Any non-word or non-digit character
|
|
||||||
([-_\w\d\/\*\u263a-\U0001f9995]+?)::(?!\n)(?:[ ](?!\n))? # Capture the key if not a new line
|
|
||||||
(.*?)$ # Capture the value
|
|
||||||
""",
|
|
||||||
re.X | re.MULTILINE,
|
|
||||||
)
|
|
||||||
|
|
||||||
frontmatter_block: Pattern[str] = re.compile(r"^\s*(?P<frontmatter>---.*?---)", flags=re.DOTALL)
|
|
||||||
frontmatt_block_strip_separators: Pattern[str] = re.compile(
|
|
||||||
r"^\s*---(?P<frontmatter>.*?)---", flags=re.DOTALL
|
|
||||||
)
|
|
||||||
# This pattern will return a tuple of 4 values, two will be empty and will need to be stripped before processing further
|
|
||||||
|
|
||||||
top_with_header: Pattern[str] = re.compile(
|
|
||||||
r"""^\s* # Start of note
|
|
||||||
(?P<top> # Capture the top of the note
|
|
||||||
(---.*?---)? # Frontmatter, if it exists
|
|
||||||
\s* # Any whitespace
|
|
||||||
( # Full header, if it exists
|
|
||||||
\#+[ ] # Match start of any header level
|
|
||||||
( # Text of header
|
|
||||||
[\w\d]+ # Word or digit
|
|
||||||
| # Or
|
|
||||||
[\[\]\(\)\+\{\}\"'\-\.\/\*\$\| ]+ # Special characters
|
|
||||||
| # Or
|
|
||||||
[\u263a-\U0001f999]+ # Emoji
|
|
||||||
)+ # End of header text
|
|
||||||
)? # End of full header
|
|
||||||
) # End capture group
|
|
||||||
""",
|
|
||||||
flags=re.DOTALL | re.X,
|
|
||||||
)
|
|
||||||
|
|
||||||
validate_key_text: Pattern[str] = re.compile(r"[^-_\w\d\/\*\u263a-\U0001f999]")
|
|
||||||
validate_tag_text: Pattern[str] = re.compile(r"[ \|,;:\*\(\)\[\]\\\.\n#&]")
|
|
||||||
@@ -13,10 +13,10 @@ import questionary
|
|||||||
import typer
|
import typer
|
||||||
|
|
||||||
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from obsidian_metadata.models.patterns import Patterns
|
from obsidian_metadata.models.parsers import Parser
|
||||||
from obsidian_metadata.models.vault import Vault
|
from obsidian_metadata.models.vault import Vault
|
||||||
|
|
||||||
PATTERNS = Patterns()
|
P = Parser()
|
||||||
|
|
||||||
# Reset the default style of the questionary prompts qmark
|
# Reset the default style of the questionary prompts qmark
|
||||||
questionary.prompts.checkbox.DEFAULT_STYLE = questionary.Style([("qmark", "")])
|
questionary.prompts.checkbox.DEFAULT_STYLE = questionary.Style([("qmark", "")])
|
||||||
@@ -95,7 +95,7 @@ class Questions:
|
|||||||
if len(text) < 1:
|
if len(text) < 1:
|
||||||
return "Tag cannot be empty"
|
return "Tag cannot be empty"
|
||||||
|
|
||||||
if not self.vault.metadata.contains(area=MetadataType.TAGS, value=text):
|
if not self.vault.contains_metadata(meta_type=MetadataType.TAGS, key=None, value=text):
|
||||||
return f"'{text}' does not exist as a tag in the vault"
|
return f"'{text}' does not exist as a tag in the vault"
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -109,7 +109,7 @@ class Questions:
|
|||||||
if len(text) < 1:
|
if len(text) < 1:
|
||||||
return "Key cannot be empty"
|
return "Key cannot be empty"
|
||||||
|
|
||||||
if not self.vault.metadata.contains(area=MetadataType.KEYS, key=text):
|
if not self.vault.contains_metadata(meta_type=MetadataType.META, key=text):
|
||||||
return f"'{text}' does not exist as a key in the vault"
|
return f"'{text}' does not exist as a key in the vault"
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -128,7 +128,7 @@ class Questions:
|
|||||||
except re.error as error:
|
except re.error as error:
|
||||||
return f"Invalid regex: {error}"
|
return f"Invalid regex: {error}"
|
||||||
|
|
||||||
if not self.vault.metadata.contains(area=MetadataType.KEYS, key=text, is_regex=True):
|
if not self.vault.contains_metadata(meta_type=MetadataType.META, key=text, is_regex=True):
|
||||||
return f"'{text}' does not exist as a key in the vault"
|
return f"'{text}' does not exist as a key in the vault"
|
||||||
|
|
||||||
return True
|
return True
|
||||||
@@ -142,7 +142,7 @@ class Questions:
|
|||||||
Returns:
|
Returns:
|
||||||
bool | str: True if the key is valid, otherwise a string with the error message.
|
bool | str: True if the key is valid, otherwise a string with the error message.
|
||||||
"""
|
"""
|
||||||
if PATTERNS.validate_key_text.search(text) is not None:
|
if P.validate_key_text.search(text) is not None:
|
||||||
return "Key cannot contain spaces or special characters"
|
return "Key cannot contain spaces or special characters"
|
||||||
|
|
||||||
if len(text) == 0:
|
if len(text) == 0:
|
||||||
@@ -159,7 +159,7 @@ class Questions:
|
|||||||
Returns:
|
Returns:
|
||||||
bool | str: True if the tag is valid, otherwise a string with the error message.
|
bool | str: True if the tag is valid, otherwise a string with the error message.
|
||||||
"""
|
"""
|
||||||
if PATTERNS.validate_tag_text.search(text) is not None:
|
if P.validate_tag_text.search(text) is not None:
|
||||||
return "Tag cannot contain spaces or special characters"
|
return "Tag cannot contain spaces or special characters"
|
||||||
|
|
||||||
if len(text) == 0:
|
if len(text) == 0:
|
||||||
@@ -179,8 +179,8 @@ class Questions:
|
|||||||
if len(text) < 1:
|
if len(text) < 1:
|
||||||
return "Value cannot be empty"
|
return "Value cannot be empty"
|
||||||
|
|
||||||
if self.key is not None and self.vault.metadata.contains(
|
if self.key is not None and self.vault.contains_metadata(
|
||||||
area=MetadataType.ALL, key=self.key, value=text
|
meta_type=MetadataType.ALL, key=self.key, value=text
|
||||||
):
|
):
|
||||||
return f"{self.key}:{text} already exists"
|
return f"{self.key}:{text} already exists"
|
||||||
|
|
||||||
@@ -248,8 +248,8 @@ class Questions:
|
|||||||
if len(text) == 0:
|
if len(text) == 0:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if self.key is not None and not self.vault.metadata.contains(
|
if self.key is not None and not self.vault.contains_metadata(
|
||||||
area=MetadataType.ALL, key=self.key, value=text
|
meta_type=MetadataType.ALL, key=self.key, value=text
|
||||||
):
|
):
|
||||||
return f"{self.key}:{text} does not exist"
|
return f"{self.key}:{text} does not exist"
|
||||||
|
|
||||||
@@ -272,8 +272,8 @@ class Questions:
|
|||||||
except re.error as error:
|
except re.error as error:
|
||||||
return f"Invalid regex: {error}"
|
return f"Invalid regex: {error}"
|
||||||
|
|
||||||
if self.key is not None and not self.vault.metadata.contains(
|
if self.key is not None and not self.vault.contains_metadata(
|
||||||
area=MetadataType.ALL, key=self.key, value=text, is_regex=True
|
meta_type=MetadataType.ALL, key=self.key, value=text, is_regex=True
|
||||||
):
|
):
|
||||||
return f"No values in {self.key} match regex: {text}"
|
return f"No values in {self.key} match regex: {text}"
|
||||||
|
|
||||||
|
|||||||
@@ -11,14 +11,15 @@ from typing import Any
|
|||||||
import rich.repr
|
import rich.repr
|
||||||
import typer
|
import typer
|
||||||
from rich import box
|
from rich import box
|
||||||
|
from rich.columns import Columns
|
||||||
from rich.prompt import Confirm
|
from rich.prompt import Confirm
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
from obsidian_metadata._config.config import VaultConfig
|
from obsidian_metadata._config.config import VaultConfig
|
||||||
from obsidian_metadata._utils import alerts
|
from obsidian_metadata._utils import alerts, dict_contains, merge_dictionaries
|
||||||
from obsidian_metadata._utils.alerts import logger as log
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
from obsidian_metadata._utils.console import console
|
from obsidian_metadata._utils.console import console, console_no_markup
|
||||||
from obsidian_metadata.models import InsertLocation, MetadataType, Note, VaultMetadata
|
from obsidian_metadata.models import InsertLocation, MetadataType, Note
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -54,7 +55,9 @@ class Vault:
|
|||||||
self.insert_location: InsertLocation = self._find_insert_location()
|
self.insert_location: InsertLocation = self._find_insert_location()
|
||||||
self.dry_run: bool = dry_run
|
self.dry_run: bool = dry_run
|
||||||
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
||||||
self.metadata = VaultMetadata()
|
self.frontmatter: dict[str, list[str]] = {}
|
||||||
|
self.inline_meta: dict[str, list[str]] = {}
|
||||||
|
self.tags: list[str] = []
|
||||||
self.exclude_paths: list[Path] = []
|
self.exclude_paths: list[Path] = []
|
||||||
|
|
||||||
for p in config.exclude_paths:
|
for p in config.exclude_paths:
|
||||||
@@ -104,16 +107,33 @@ class Vault:
|
|||||||
]
|
]
|
||||||
|
|
||||||
if _filter.tag_filter is not None:
|
if _filter.tag_filter is not None:
|
||||||
notes_list = [n for n in notes_list if n.contains_tag(_filter.tag_filter)]
|
notes_list = [
|
||||||
|
n
|
||||||
|
for n in notes_list
|
||||||
|
if n.contains_metadata(
|
||||||
|
MetadataType.TAGS, search_key="", search_value=_filter.tag_filter
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
if _filter.key_filter is not None and _filter.value_filter is not None:
|
if _filter.key_filter is not None and _filter.value_filter is not None:
|
||||||
notes_list = [
|
notes_list = [
|
||||||
n
|
n
|
||||||
for n in notes_list
|
for n in notes_list
|
||||||
if n.contains_metadata(_filter.key_filter, _filter.value_filter)
|
if n.contains_metadata(
|
||||||
|
meta_type=MetadataType.META,
|
||||||
|
search_key=_filter.key_filter,
|
||||||
|
search_value=_filter.value_filter,
|
||||||
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
if _filter.key_filter is not None and _filter.value_filter is None:
|
if _filter.key_filter is not None and _filter.value_filter is None:
|
||||||
notes_list = [n for n in notes_list if n.contains_metadata(_filter.key_filter)]
|
notes_list = [
|
||||||
|
n
|
||||||
|
for n in notes_list
|
||||||
|
if n.contains_metadata(
|
||||||
|
MetadataType.META, search_key=_filter.key_filter, search_value=None
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
return notes_list
|
return notes_list
|
||||||
|
|
||||||
@@ -167,37 +187,60 @@ class Vault:
|
|||||||
]
|
]
|
||||||
|
|
||||||
def _rebuild_vault_metadata(self) -> None:
|
def _rebuild_vault_metadata(self) -> None:
|
||||||
"""Rebuild vault metadata."""
|
"""Rebuild vault metadata. Indexes all frontmatter, inline metadata, and tags and adds them to dictionary objects."""
|
||||||
self.metadata = VaultMetadata()
|
|
||||||
with console.status(
|
with console.status(
|
||||||
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
"Processing notes... [dim](Can take a while for a large vault)[/]",
|
||||||
spinner="bouncingBall",
|
spinner="bouncingBall",
|
||||||
):
|
):
|
||||||
|
vault_frontmatter = {}
|
||||||
|
vault_inline_meta = {}
|
||||||
|
vault_tags = []
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
self.metadata.index_metadata(
|
for field in _note.metadata:
|
||||||
area=MetadataType.FRONTMATTER, metadata=_note.frontmatter.dict
|
match field.meta_type:
|
||||||
)
|
case MetadataType.FRONTMATTER:
|
||||||
self.metadata.index_metadata(
|
if field.clean_key not in vault_frontmatter:
|
||||||
area=MetadataType.INLINE, metadata=_note.inline_metadata.dict
|
vault_frontmatter[field.clean_key] = (
|
||||||
)
|
[field.normalized_value]
|
||||||
self.metadata.index_metadata(
|
if field.normalized_value != "-"
|
||||||
area=MetadataType.TAGS,
|
else []
|
||||||
metadata=_note.tags.list,
|
)
|
||||||
)
|
elif field.normalized_value != "-":
|
||||||
|
vault_frontmatter[field.clean_key].append(field.normalized_value)
|
||||||
|
case MetadataType.INLINE:
|
||||||
|
if field.clean_key not in vault_inline_meta:
|
||||||
|
vault_inline_meta[field.clean_key] = (
|
||||||
|
[field.normalized_value]
|
||||||
|
if field.normalized_value != "-"
|
||||||
|
else []
|
||||||
|
)
|
||||||
|
elif field.normalized_value != "-":
|
||||||
|
vault_inline_meta[field.clean_key].append(field.normalized_value)
|
||||||
|
case MetadataType.TAGS:
|
||||||
|
if field.normalized_value not in vault_tags:
|
||||||
|
vault_tags.append(field.normalized_value)
|
||||||
|
|
||||||
|
self.frontmatter = {
|
||||||
|
k: sorted(list(set(v))) for k, v in sorted(vault_frontmatter.items())
|
||||||
|
}
|
||||||
|
self.inline_meta = {
|
||||||
|
k: sorted(list(set(v))) for k, v in sorted(vault_inline_meta.items())
|
||||||
|
}
|
||||||
|
self.tags = sorted(list(set(vault_tags)))
|
||||||
|
|
||||||
def add_metadata(
|
def add_metadata(
|
||||||
self,
|
self,
|
||||||
area: MetadataType,
|
meta_type: MetadataType,
|
||||||
key: str = None,
|
key: str = None,
|
||||||
value: str | list[str] = None,
|
value: str = None,
|
||||||
location: InsertLocation = None,
|
location: InsertLocation = None,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Add metadata to all notes in the vault which do not already contain it.
|
"""Add metadata to all notes in the vault which do not already contain it.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
area (MetadataType): Area of metadata to add to.
|
meta_type (MetadataType): Area of metadata to add to.
|
||||||
key (str): Key to add.
|
key (str): Key to add.
|
||||||
value (str|list, optional): Value to add.
|
value (str, optional): Value to add.
|
||||||
location (InsertLocation, optional): Location to insert metadata. (Defaults to `vault.config.insert_location`)
|
location (InsertLocation, optional): Location to insert metadata. (Defaults to `vault.config.insert_location`)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@@ -209,7 +252,9 @@ class Vault:
|
|||||||
num_changed = 0
|
num_changed = 0
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.add_metadata(area=area, key=key, value=value, location=location):
|
if _note.add_metadata(
|
||||||
|
meta_type=meta_type, added_key=key, added_value=value, location=location
|
||||||
|
):
|
||||||
log.trace(f"Added metadata to {_note.note_path}")
|
log.trace(f"Added metadata to {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
@@ -257,6 +302,43 @@ class Vault:
|
|||||||
log.trace(f"writing to {_note.note_path}")
|
log.trace(f"writing to {_note.note_path}")
|
||||||
_note.commit()
|
_note.commit()
|
||||||
|
|
||||||
|
def contains_metadata(
|
||||||
|
self, meta_type: MetadataType, key: str, value: str = None, is_regex: bool = False
|
||||||
|
) -> bool:
|
||||||
|
"""Check if the vault contains metadata.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
meta_type (MetadataType): Area of metadata to check.
|
||||||
|
key (str): Key to check.
|
||||||
|
value (str, optional): Value to check. Defaults to None.
|
||||||
|
is_regex (bool, optional): Whether the value is a regex. Defaults to False.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Whether the vault contains the metadata.
|
||||||
|
"""
|
||||||
|
if meta_type == MetadataType.FRONTMATTER and key is not None:
|
||||||
|
return dict_contains(self.frontmatter, key, value, is_regex)
|
||||||
|
|
||||||
|
if meta_type == MetadataType.INLINE and key is not None:
|
||||||
|
return dict_contains(self.inline_meta, key, value, is_regex)
|
||||||
|
|
||||||
|
if meta_type == MetadataType.TAGS and value is not None:
|
||||||
|
if not is_regex:
|
||||||
|
value = f"^{re.escape(value)}$"
|
||||||
|
return any(re.search(value, item) for item in self.tags)
|
||||||
|
|
||||||
|
if meta_type == MetadataType.META:
|
||||||
|
return self.contains_metadata(
|
||||||
|
MetadataType.FRONTMATTER, key, value, is_regex
|
||||||
|
) or self.contains_metadata(MetadataType.INLINE, key, value, is_regex)
|
||||||
|
|
||||||
|
if meta_type == MetadataType.ALL:
|
||||||
|
return self.contains_metadata(
|
||||||
|
MetadataType.TAGS, key, value, is_regex
|
||||||
|
) or self.contains_metadata(MetadataType.META, key, value, is_regex)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
def delete_backup(self) -> None:
|
def delete_backup(self) -> None:
|
||||||
"""Delete the vault backup."""
|
"""Delete the vault backup."""
|
||||||
log.debug("Deleting vault backup")
|
log.debug("Deleting vault backup")
|
||||||
@@ -280,7 +362,7 @@ class Vault:
|
|||||||
num_changed = 0
|
num_changed = 0
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.delete_tag(tag):
|
if _note.delete_metadata(MetadataType.TAGS, value=tag):
|
||||||
log.trace(f"Deleted tag from {_note.note_path}")
|
log.trace(f"Deleted tag from {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
@@ -293,13 +375,13 @@ class Vault:
|
|||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
value: str = None,
|
value: str = None,
|
||||||
area: MetadataType = MetadataType.ALL,
|
meta_type: MetadataType = MetadataType.ALL,
|
||||||
is_regex: bool = False,
|
is_regex: bool = False,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Delete metadata in the vault.
|
"""Delete metadata in the vault.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
area (MetadataType): Area of metadata to delete from.
|
meta_type (MetadataType): Area of metadata to delete from.
|
||||||
is_regex (bool): Whether to use regex for key and value. Defaults to False.
|
is_regex (bool): Whether to use regex for key and value. Defaults to False.
|
||||||
key (str): Key to delete. Regex is supported
|
key (str): Key to delete. Regex is supported
|
||||||
value (str, optional): Value to delete. Regex is supported
|
value (str, optional): Value to delete. Regex is supported
|
||||||
@@ -310,7 +392,7 @@ class Vault:
|
|||||||
num_changed = 0
|
num_changed = 0
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.delete_metadata(key=key, value=value, area=area, is_regex=is_regex):
|
if _note.delete_metadata(meta_type=meta_type, key=key, value=value, is_regex=is_regex):
|
||||||
log.trace(f"Deleted metadata from {_note.note_path}")
|
log.trace(f"Deleted metadata from {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
@@ -319,7 +401,7 @@ class Vault:
|
|||||||
|
|
||||||
return num_changed
|
return num_changed
|
||||||
|
|
||||||
def export_metadata(self, path: str, export_format: str = "csv") -> None: # noqa: C901
|
def export_metadata(self, path: str, export_format: str = "csv") -> None:
|
||||||
"""Write metadata to a csv file.
|
"""Write metadata to a csv file.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@@ -337,28 +419,28 @@ class Vault:
|
|||||||
writer = csv.writer(f)
|
writer = csv.writer(f)
|
||||||
writer.writerow(["Metadata Type", "Key", "Value"])
|
writer.writerow(["Metadata Type", "Key", "Value"])
|
||||||
|
|
||||||
for key, value in self.metadata.frontmatter.items():
|
for key, value in self.frontmatter.items():
|
||||||
if isinstance(value, list):
|
if len(value) > 0:
|
||||||
if len(value) > 0:
|
for v in value:
|
||||||
for v in value:
|
|
||||||
writer.writerow(["frontmatter", key, v])
|
|
||||||
else:
|
|
||||||
writer.writerow(["frontmatter", key, v])
|
writer.writerow(["frontmatter", key, v])
|
||||||
|
else:
|
||||||
|
writer.writerow(["frontmatter", key, ""])
|
||||||
|
|
||||||
for key, value in self.metadata.inline_metadata.items():
|
for key, value in self.inline_meta.items():
|
||||||
if isinstance(value, list):
|
if len(value) > 0:
|
||||||
if len(value) > 0:
|
for v in value:
|
||||||
for v in value:
|
writer.writerow(["inline_metadata", key, v])
|
||||||
writer.writerow(["inline_metadata", key, v])
|
else:
|
||||||
else:
|
writer.writerow(["inline_metadata", key, ""])
|
||||||
writer.writerow(["frontmatter", key, v])
|
|
||||||
for tag in self.metadata.tags:
|
for tag in self.tags:
|
||||||
writer.writerow(["tags", "", f"{tag}"])
|
writer.writerow(["tags", "", f"{tag}"])
|
||||||
|
|
||||||
case "json":
|
case "json":
|
||||||
dict_to_dump = {
|
dict_to_dump = {
|
||||||
"frontmatter": self.metadata.dict,
|
"frontmatter": self.frontmatter,
|
||||||
"inline_metadata": self.metadata.inline_metadata,
|
"inline_metadata": self.inline_meta,
|
||||||
"tags": self.metadata.tags,
|
"tags": self.tags,
|
||||||
}
|
}
|
||||||
|
|
||||||
with export_file.open(mode="w", encoding="utf-8") as f:
|
with export_file.open(mode="w", encoding="utf-8") as f:
|
||||||
@@ -380,26 +462,21 @@ class Vault:
|
|||||||
writer.writerow(["path", "type", "key", "value"])
|
writer.writerow(["path", "type", "key", "value"])
|
||||||
|
|
||||||
for _note in self.all_notes:
|
for _note in self.all_notes:
|
||||||
for key, value in _note.frontmatter.dict.items():
|
for field in sorted(
|
||||||
for v in value:
|
_note.metadata,
|
||||||
writer.writerow(
|
key=lambda x: (
|
||||||
[_note.note_path.relative_to(self.vault_path), "frontmatter", key, v]
|
x.meta_type.name,
|
||||||
)
|
x.clean_key,
|
||||||
|
x.normalized_value,
|
||||||
for key, value in _note.inline_metadata.dict.items():
|
),
|
||||||
for v in value:
|
):
|
||||||
writer.writerow(
|
|
||||||
[
|
|
||||||
_note.note_path.relative_to(self.vault_path),
|
|
||||||
"inline_metadata",
|
|
||||||
key,
|
|
||||||
v,
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
for tag in _note.tags.list:
|
|
||||||
writer.writerow(
|
writer.writerow(
|
||||||
[_note.note_path.relative_to(self.vault_path), "tag", "", f"{tag}"]
|
[
|
||||||
|
_note.note_path.relative_to(self.vault_path),
|
||||||
|
field.meta_type.name,
|
||||||
|
field.clean_key if field.clean_key is not None else "",
|
||||||
|
field.normalized_value if field.normalized_value != "-" else "",
|
||||||
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
def get_changed_notes(self) -> list[Note]:
|
def get_changed_notes(self) -> list[Note]:
|
||||||
@@ -430,14 +507,14 @@ class Vault:
|
|||||||
table.add_row("Notes with changes", str(len(self.get_changed_notes())))
|
table.add_row("Notes with changes", str(len(self.get_changed_notes())))
|
||||||
table.add_row("Insert Location", str(self.insert_location.value))
|
table.add_row("Insert Location", str(self.insert_location.value))
|
||||||
|
|
||||||
console.print(table)
|
console_no_markup.print(table)
|
||||||
|
|
||||||
def list_editable_notes(self) -> None:
|
def list_editable_notes(self) -> None:
|
||||||
"""Print a list of notes within the scope that are being edited."""
|
"""Print a list of notes within the scope that are being edited."""
|
||||||
table = Table(title="Notes in current scope", show_header=False, box=box.HORIZONTALS)
|
table = Table(title="Notes in current scope", show_header=False, box=box.HORIZONTALS)
|
||||||
for _n, _note in enumerate(self.notes_in_scope, start=1):
|
for _n, _note in enumerate(self.notes_in_scope, start=1):
|
||||||
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
||||||
console.print(table)
|
console_no_markup.print(table)
|
||||||
|
|
||||||
def move_inline_metadata(self, location: InsertLocation) -> int:
|
def move_inline_metadata(self, location: InsertLocation) -> int:
|
||||||
"""Move all inline metadata to the selected location.
|
"""Move all inline metadata to the selected location.
|
||||||
@@ -451,11 +528,15 @@ class Vault:
|
|||||||
num_changed = 0
|
num_changed = 0
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.write_delete_inline_metadata():
|
if _note.transpose_metadata(
|
||||||
log.trace(f"Deleted inline metadata from {_note.note_path}")
|
begin=MetadataType.INLINE,
|
||||||
|
end=MetadataType.INLINE,
|
||||||
|
key=None,
|
||||||
|
value=None,
|
||||||
|
location=location,
|
||||||
|
):
|
||||||
|
log.trace(f"Moved inline metadata in {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
_note.write_all_inline_metadata(location)
|
|
||||||
log.trace(f"Wrote all inline metadata to {_note.note_path}")
|
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
self._rebuild_vault_metadata()
|
self._rebuild_vault_metadata()
|
||||||
@@ -466,6 +547,50 @@ class Vault:
|
|||||||
"""Count number of excluded notes."""
|
"""Count number of excluded notes."""
|
||||||
return len(self.all_notes) - len(self.notes_in_scope)
|
return len(self.all_notes) - len(self.notes_in_scope)
|
||||||
|
|
||||||
|
def print_metadata(self, meta_type: MetadataType = MetadataType.ALL) -> None:
|
||||||
|
"""Print metadata for the vault."""
|
||||||
|
dict_to_print = None
|
||||||
|
list_to_print = None
|
||||||
|
match meta_type:
|
||||||
|
case MetadataType.INLINE:
|
||||||
|
dict_to_print = self.inline_meta
|
||||||
|
header = "All inline metadata"
|
||||||
|
case MetadataType.FRONTMATTER:
|
||||||
|
dict_to_print = self.frontmatter
|
||||||
|
header = "All frontmatter"
|
||||||
|
case MetadataType.TAGS:
|
||||||
|
list_to_print = [f"#{x}" for x in self.tags]
|
||||||
|
header = "All inline tags"
|
||||||
|
case MetadataType.KEYS:
|
||||||
|
list_to_print = sorted(
|
||||||
|
merge_dictionaries(self.frontmatter, self.inline_meta).keys()
|
||||||
|
)
|
||||||
|
header = "All Keys"
|
||||||
|
case MetadataType.ALL:
|
||||||
|
dict_to_print = merge_dictionaries(self.frontmatter, self.inline_meta)
|
||||||
|
list_to_print = [f"#{x}" for x in self.tags]
|
||||||
|
header = "All metadata"
|
||||||
|
|
||||||
|
if dict_to_print is not None:
|
||||||
|
table = Table(title=header, show_footer=False, show_lines=True)
|
||||||
|
table.add_column("Keys", style="bold")
|
||||||
|
table.add_column("Values")
|
||||||
|
for key, value in sorted(dict_to_print.items()):
|
||||||
|
values: str | dict[str, list[str]] = (
|
||||||
|
"\n".join(sorted(value)) if isinstance(value, list) else value
|
||||||
|
)
|
||||||
|
table.add_row(f"{key}", str(values))
|
||||||
|
console_no_markup.print(table)
|
||||||
|
|
||||||
|
if list_to_print is not None:
|
||||||
|
columns = Columns(
|
||||||
|
sorted(list_to_print),
|
||||||
|
equal=True,
|
||||||
|
expand=True,
|
||||||
|
title=header if meta_type != MetadataType.ALL else "All inline tags",
|
||||||
|
)
|
||||||
|
console_no_markup.print(columns)
|
||||||
|
|
||||||
def rename_tag(self, old_tag: str, new_tag: str) -> int:
|
def rename_tag(self, old_tag: str, new_tag: str) -> int:
|
||||||
"""Rename an inline tag in the vault.
|
"""Rename an inline tag in the vault.
|
||||||
|
|
||||||
@@ -518,7 +643,7 @@ class Vault:
|
|||||||
begin: MetadataType,
|
begin: MetadataType,
|
||||||
end: MetadataType,
|
end: MetadataType,
|
||||||
key: str = None,
|
key: str = None,
|
||||||
value: str | list[str] = None,
|
value: str = None,
|
||||||
location: InsertLocation = None,
|
location: InsertLocation = None,
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Transpose metadata from one type to another.
|
"""Transpose metadata from one type to another.
|
||||||
@@ -546,15 +671,15 @@ class Vault:
|
|||||||
location=location,
|
location=location,
|
||||||
):
|
):
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
log.trace(f"Transposed metadata in {_note.note_path}")
|
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
self._rebuild_vault_metadata()
|
self._rebuild_vault_metadata()
|
||||||
|
log.trace(f"Transposed metadata in {_note.note_path}")
|
||||||
|
|
||||||
return num_changed
|
return num_changed
|
||||||
|
|
||||||
def update_from_dict(self, dictionary: dict[str, Any]) -> int:
|
def update_from_dict(self, dictionary: dict[str, Any]) -> int:
|
||||||
"""Update note metadata from a dictionary. This is a destructive operation. All metadata in the specified notes not in the dictionary will be removed.
|
"""Update note metadata from a dictionary. This method is used when updating note metadata from a CSV file. This is a destructive operation. All existing metadata in the specified notes not in the dictionary will be removed.
|
||||||
|
|
||||||
Requires a dictionary with the note path as the key and a dictionary of metadata as the value. Each key must have a list of associated dictionaries in the following format:
|
Requires a dictionary with the note path as the key and a dictionary of metadata as the value. Each key must have a list of associated dictionaries in the following format:
|
||||||
|
|
||||||
@@ -577,25 +702,32 @@ class Vault:
|
|||||||
if str(path) in dictionary:
|
if str(path) in dictionary:
|
||||||
log.debug(f"Bulk update metadata for '{path}'")
|
log.debug(f"Bulk update metadata for '{path}'")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
_note.delete_all_metadata()
|
|
||||||
|
# Deleta all existing metadata in the note
|
||||||
|
_note.delete_metadata(meta_type=MetadataType.META, key=r".*", is_regex=True)
|
||||||
|
_note.delete_metadata(meta_type=MetadataType.TAGS, value=r".*", is_regex=True)
|
||||||
|
|
||||||
|
# Add the new metadata
|
||||||
for row in dictionary[str(path)]:
|
for row in dictionary[str(path)]:
|
||||||
if row["type"].lower() == "frontmatter":
|
if row["type"].lower() == "frontmatter":
|
||||||
_note.add_metadata(
|
_note.add_metadata(
|
||||||
area=MetadataType.FRONTMATTER, key=row["key"], value=row["value"]
|
meta_type=MetadataType.FRONTMATTER,
|
||||||
|
added_key=row["key"],
|
||||||
|
added_value=row["value"],
|
||||||
)
|
)
|
||||||
|
|
||||||
if row["type"].lower() == "inline_metadata":
|
if row["type"].lower() == "inline_metadata":
|
||||||
_note.add_metadata(
|
_note.add_metadata(
|
||||||
area=MetadataType.INLINE,
|
meta_type=MetadataType.INLINE,
|
||||||
key=row["key"],
|
added_key=row["key"],
|
||||||
value=row["value"],
|
added_value=row["value"],
|
||||||
location=self.insert_location,
|
location=self.insert_location,
|
||||||
)
|
)
|
||||||
|
|
||||||
if row["type"].lower() == "tag":
|
if row["type"].lower() == "tag":
|
||||||
_note.add_metadata(
|
_note.add_metadata(
|
||||||
area=MetadataType.TAGS,
|
meta_type=MetadataType.TAGS,
|
||||||
value=row["value"],
|
added_value=row["value"],
|
||||||
location=self.insert_location,
|
location=self.insert_location,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -6,87 +6,87 @@ import pytest
|
|||||||
|
|
||||||
from obsidian_metadata._utils import alerts
|
from obsidian_metadata._utils import alerts
|
||||||
from obsidian_metadata._utils.alerts import logger as log
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
from tests.helpers import Regex
|
from tests.helpers import Regex, strip_ansi
|
||||||
|
|
||||||
|
|
||||||
def test_dryrun(capsys):
|
def test_dryrun(capsys):
|
||||||
"""Test dry run."""
|
"""Test dry run."""
|
||||||
alerts.dryrun("This prints in dry run")
|
alerts.dryrun("This prints in dry run")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "DRYRUN | This prints in dry run\n"
|
assert captured == "DRYRUN | This prints in dry run\n"
|
||||||
|
|
||||||
|
|
||||||
def test_success(capsys):
|
def test_success(capsys):
|
||||||
"""Test success."""
|
"""Test success."""
|
||||||
alerts.success("This prints in success")
|
alerts.success("This prints in success")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "SUCCESS | This prints in success\n"
|
assert captured == "SUCCESS | This prints in success\n"
|
||||||
|
|
||||||
|
|
||||||
def test_error(capsys):
|
def test_error(capsys):
|
||||||
"""Test success."""
|
"""Test success."""
|
||||||
alerts.error("This prints in error")
|
alerts.error("This prints in error")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "ERROR | This prints in error\n"
|
assert captured == "ERROR | This prints in error\n"
|
||||||
|
|
||||||
|
|
||||||
def test_warning(capsys):
|
def test_warning(capsys):
|
||||||
"""Test warning."""
|
"""Test warning."""
|
||||||
alerts.warning("This prints in warning")
|
alerts.warning("This prints in warning")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "WARNING | This prints in warning\n"
|
assert captured == "WARNING | This prints in warning\n"
|
||||||
|
|
||||||
|
|
||||||
def test_notice(capsys):
|
def test_notice(capsys):
|
||||||
"""Test notice."""
|
"""Test notice."""
|
||||||
alerts.notice("This prints in notice")
|
alerts.notice("This prints in notice")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "NOTICE | This prints in notice\n"
|
assert captured == "NOTICE | This prints in notice\n"
|
||||||
|
|
||||||
|
|
||||||
def test_alerts_debug(capsys):
|
def test_alerts_debug(capsys):
|
||||||
"""Test debug."""
|
"""Test debug."""
|
||||||
alerts.debug("This prints in debug")
|
alerts.debug("This prints in debug")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "DEBUG | This prints in debug\n"
|
assert captured == "DEBUG | This prints in debug\n"
|
||||||
|
|
||||||
|
|
||||||
def test_usage(capsys):
|
def test_usage(capsys):
|
||||||
"""Test usage."""
|
"""Test usage."""
|
||||||
alerts.usage("This prints in usage")
|
alerts.usage("This prints in usage")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "USAGE | This prints in usage\n"
|
assert captured == "USAGE | This prints in usage\n"
|
||||||
|
|
||||||
alerts.usage(
|
alerts.usage(
|
||||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
||||||
width=80,
|
width=80,
|
||||||
)
|
)
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "USAGE | Lorem ipsum dolor sit amet" in captured.out
|
assert "USAGE | Lorem ipsum dolor sit amet" in captured
|
||||||
assert " | incididunt ut labore et dolore magna aliqua" in captured.out
|
assert " | incididunt ut labore et dolore magna aliqua" in captured
|
||||||
|
|
||||||
alerts.usage(
|
alerts.usage(
|
||||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
||||||
width=20,
|
width=20,
|
||||||
)
|
)
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "USAGE | Lorem ipsum dolor" in captured.out
|
assert "USAGE | Lorem ipsum dolor" in captured
|
||||||
assert " | sit amet," in captured.out
|
assert " | sit amet," in captured
|
||||||
assert " | adipisicing elit," in captured.out
|
assert " | adipisicing elit," in captured
|
||||||
|
|
||||||
|
|
||||||
def test_info(capsys):
|
def test_info(capsys):
|
||||||
"""Test info."""
|
"""Test info."""
|
||||||
alerts.info("This prints in info")
|
alerts.info("This prints in info")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "INFO | This prints in info\n"
|
assert captured == "INFO | This prints in info\n"
|
||||||
|
|
||||||
|
|
||||||
def test_dim(capsys):
|
def test_dim(capsys):
|
||||||
"""Test info."""
|
"""Test info."""
|
||||||
alerts.dim("This prints in dim")
|
alerts.dim("This prints in dim")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "This prints in dim\n"
|
assert captured == "This prints in dim\n"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
@@ -106,74 +106,74 @@ def test_logging(capsys, tmp_path, verbosity, log_to_file) -> None:
|
|||||||
|
|
||||||
if verbosity >= 3:
|
if verbosity >= 3:
|
||||||
assert logging.is_trace() is True
|
assert logging.is_trace() is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert not captured.out
|
assert not captured
|
||||||
|
|
||||||
assert logging.is_trace("trace text") is True
|
assert logging.is_trace("trace text") is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "trace text\n"
|
assert captured == "trace text\n"
|
||||||
|
|
||||||
log.trace("This is Trace logging")
|
log.trace("This is Trace logging")
|
||||||
captured = capsys.readouterr()
|
cap_error = strip_ansi(capsys.readouterr().err)
|
||||||
assert captured.err == Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
assert cap_error == Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
||||||
else:
|
else:
|
||||||
assert logging.is_trace("trace text") is False
|
assert logging.is_trace("trace text") is False
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out != "trace text\n"
|
assert captured != "trace text\n"
|
||||||
|
|
||||||
log.trace("This is Trace logging")
|
log.trace("This is Trace logging")
|
||||||
captured = capsys.readouterr()
|
cap_error = strip_ansi(capsys.readouterr().err)
|
||||||
assert captured.err != Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
assert cap_error != Regex(r"^TRACE \| This is Trace logging \([\w\._:]+:\d+\)$")
|
||||||
|
|
||||||
if verbosity >= 2:
|
if verbosity >= 2:
|
||||||
assert logging.is_debug() is True
|
assert logging.is_debug() is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert not captured.out
|
assert not captured
|
||||||
|
|
||||||
assert logging.is_debug("debug text") is True
|
assert logging.is_debug("debug text") is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "debug text\n"
|
assert captured == "debug text\n"
|
||||||
|
|
||||||
log.debug("This is Debug logging")
|
log.debug("This is Debug logging")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().err)
|
||||||
assert captured.err == Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
assert captured == Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
||||||
else:
|
else:
|
||||||
assert logging.is_debug("debug text") is False
|
assert logging.is_debug("debug text") is False
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out != "debug text\n"
|
assert captured != "debug text\n"
|
||||||
|
|
||||||
log.debug("This is Debug logging")
|
log.debug("This is Debug logging")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().err)
|
||||||
assert captured.err != Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
assert captured != Regex(r"^DEBUG \| This is Debug logging \([\w\._:]+:\d+\)$")
|
||||||
|
|
||||||
if verbosity >= 1:
|
if verbosity >= 1:
|
||||||
assert logging.is_info() is True
|
assert logging.is_info() is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert not captured.out
|
assert not captured
|
||||||
|
|
||||||
assert logging.is_info("info text") is True
|
assert logging.is_info("info text") is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "info text\n"
|
assert captured == "info text\n"
|
||||||
|
|
||||||
log.info("This is Info logging")
|
log.info("This is Info logging")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().err)
|
||||||
assert captured.err == "INFO | This is Info logging\n"
|
assert captured == "INFO | This is Info logging\n"
|
||||||
else:
|
else:
|
||||||
assert logging.is_info("info text") is False
|
assert logging.is_info("info text") is False
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out != "info text\n"
|
assert captured != "info text\n"
|
||||||
|
|
||||||
log.info("This is Info logging")
|
log.info("This is Info logging")
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert not captured.out
|
assert not captured
|
||||||
|
|
||||||
assert logging.is_default() is True
|
assert logging.is_default() is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert not captured.out
|
assert not captured
|
||||||
|
|
||||||
assert logging.is_default("default text") is True
|
assert logging.is_default("default text") is True
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == "default text\n"
|
assert captured == "default text\n"
|
||||||
|
|
||||||
if log_to_file:
|
if log_to_file:
|
||||||
assert tmp_log.exists() is True
|
assert tmp_log.exists() is True
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from pathlib import Path
|
|||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
from obsidian_metadata.models.enums import MetadataType
|
||||||
from tests.helpers import Regex, remove_ansi
|
from tests.helpers import Regex, strip_ansi
|
||||||
|
|
||||||
|
|
||||||
def test_instantiate_application(test_application) -> None:
|
def test_instantiate_application(test_application) -> None:
|
||||||
@@ -48,7 +48,7 @@ def test_abort(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
|
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "Done!" in captured
|
assert "Done!" in captured
|
||||||
|
|
||||||
|
|
||||||
@@ -80,7 +80,7 @@ def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -112,7 +112,7 @@ def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -140,7 +140,7 @@ def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -168,7 +168,7 @@ def test_delete_tag_1(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -196,7 +196,7 @@ def test_delete_tag_2(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "WARNING | No notes were changed" in captured
|
assert "WARNING | No notes were changed" in captured
|
||||||
|
|
||||||
|
|
||||||
@@ -219,8 +219,8 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert r"WARNING | No notes found with a key matching: \d{7}" in captured
|
assert r"WARNING | No notes found with a key matching regex: \d{7}" in captured
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
@@ -237,7 +237,7 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS \| Deleted keys matching: d\\w\+ from \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS \| Deleted keys matching: d\\w\+ from \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -263,7 +263,7 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert r"WARNING | No notes found matching: area: \d{7}" in captured
|
assert r"WARNING | No notes found matching: area: \d{7}" in captured
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -284,8 +284,8 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert r"SUCCESS | Deleted value ^front\w+$ from key area in 4 notes" in captured
|
assert captured == Regex(r"SUCCESS | Deleted value \^front\\w\+\$ from key area in \d+ notes")
|
||||||
|
|
||||||
|
|
||||||
def test_filter_notes(test_application, mocker, capsys) -> None:
|
def test_filter_notes(test_application, mocker, capsys) -> None:
|
||||||
@@ -307,7 +307,7 @@ def test_filter_notes(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Loaded \d+ notes from \d+ total", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Loaded \d+ notes from \d+ total", re.DOTALL)
|
||||||
assert "02 inline/inline 2.md" in captured
|
assert "02 inline/inline 2.md" in captured
|
||||||
assert "03 mixed/mixed 1.md" not in captured
|
assert "03 mixed/mixed 1.md" not in captured
|
||||||
@@ -362,7 +362,7 @@ def test_filter_clear(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "02 inline/inline 2.md" in captured
|
assert "02 inline/inline 2.md" in captured
|
||||||
assert "03 mixed/mixed 1.md" in captured
|
assert "03 mixed/mixed 1.md" in captured
|
||||||
assert "01 frontmatter/frontmatter 4.md" in captured
|
assert "01 frontmatter/frontmatter 4.md" in captured
|
||||||
@@ -384,8 +384,11 @@ def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"type +│ article", re.DOTALL)
|
assert captured == Regex(r"tags +│ bar ")
|
||||||
|
assert captured == Regex(r"status +│ new ")
|
||||||
|
assert captured == Regex(r"in_text_key +│ in-text value")
|
||||||
|
assert "#breakfast" in captured
|
||||||
|
|
||||||
|
|
||||||
def test_rename_tag(test_application, mocker, capsys) -> None:
|
def test_rename_tag(test_application, mocker, capsys) -> None:
|
||||||
@@ -411,7 +414,7 @@ def test_rename_tag(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "No notes were changed" in captured
|
assert "No notes were changed" in captured
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -433,7 +436,7 @@ def test_rename_tag(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"Renamed breakfast to new_tag in \d+ notes", re.DOTALL)
|
assert captured == Regex(r"Renamed breakfast to new_tag in \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -460,7 +463,7 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "WARNING | No notes were changed" in captured
|
assert "WARNING | No notes were changed" in captured
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -482,7 +485,7 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"Renamed tags to new_tags in \d+ notes", re.DOTALL)
|
assert captured == Regex(r"Renamed tags to new_tags in \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
@@ -512,7 +515,7 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "WARNING | No notes were changed" in captured
|
assert "WARNING | No notes were changed" in captured
|
||||||
|
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -537,7 +540,7 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(
|
assert captured == Regex(
|
||||||
r"SUCCESS +\| Renamed 'area:frontmatter' to 'area:new_key' in \d+ notes", re.DOTALL
|
r"SUCCESS +\| Renamed 'area:frontmatter' to 'area:new_key' in \d+ notes", re.DOTALL
|
||||||
)
|
)
|
||||||
@@ -553,7 +556,7 @@ def test_review_no_changes(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "INFO | No changes to review" in captured
|
assert "INFO | No changes to review" in captured
|
||||||
|
|
||||||
|
|
||||||
@@ -579,7 +582,7 @@ def test_review_changes(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r".*Found \d+ changed notes in the vault", re.DOTALL)
|
assert captured == Regex(r".*Found \d+ changed notes in the vault", re.DOTALL)
|
||||||
assert "- tags:" in captured
|
assert "- tags:" in captured
|
||||||
assert "+ new_tags:" in captured
|
assert "+ new_tags:" in captured
|
||||||
@@ -595,7 +598,7 @@ def test_transpose_metadata_1(test_application, mocker, capsys) -> None:
|
|||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
|
|
||||||
assert app.vault.metadata.inline_metadata["inline_key"] == ["inline_key_value"]
|
assert app.vault.inline_meta["inline_key"] == ["inline_key_value"]
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
side_effect=["reorganize_metadata", KeyError],
|
side_effect=["reorganize_metadata", KeyError],
|
||||||
@@ -607,9 +610,9 @@ def test_transpose_metadata_1(test_application, mocker, capsys) -> None:
|
|||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
|
|
||||||
assert app.vault.metadata.inline_metadata == {}
|
assert app.vault.inline_meta == {}
|
||||||
assert app.vault.metadata.frontmatter["inline_key"] == ["inline_key_value"]
|
assert app.vault.frontmatter["inline_key"] == ["inline_key_value"]
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
||||||
|
|
||||||
|
|
||||||
@@ -623,7 +626,7 @@ def test_transpose_metadata_2(test_application, mocker) -> None:
|
|||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
|
|
||||||
assert app.vault.metadata.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
assert app.vault.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
side_effect=["reorganize_metadata", KeyError],
|
side_effect=["reorganize_metadata", KeyError],
|
||||||
@@ -634,8 +637,8 @@ def test_transpose_metadata_2(test_application, mocker) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
assert app.vault.metadata.inline_metadata["date_created"] == ["2022-12-21", "2022-12-22"]
|
assert app.vault.inline_meta["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||||
assert app.vault.metadata.frontmatter == {}
|
assert app.vault.frontmatter == {}
|
||||||
|
|
||||||
|
|
||||||
def test_vault_backup(test_application, mocker, capsys) -> None:
|
def test_vault_backup(test_application, mocker, capsys) -> None:
|
||||||
@@ -653,7 +656,7 @@ def test_vault_backup(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(
|
assert captured == Regex(
|
||||||
r"SUCCESS +\| Vault backed up to:[-\w\d\/\s]+application\.bak", re.DOTALL
|
r"SUCCESS +\| Vault backed up to:[-\w\d\/\s]+application\.bak", re.DOTALL
|
||||||
)
|
)
|
||||||
@@ -676,5 +679,5 @@ def test_vault_delete(test_application, mocker, capsys, tmp_path) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
||||||
|
|||||||
@@ -7,6 +7,7 @@ from pathlib import Path
|
|||||||
from typer.testing import CliRunner
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
from obsidian_metadata.cli import app
|
from obsidian_metadata.cli import app
|
||||||
|
from tests.helpers import Regex, strip_ansi
|
||||||
|
|
||||||
from .helpers import KeyInputs, Regex # noqa: F401
|
from .helpers import KeyInputs, Regex # noqa: F401
|
||||||
|
|
||||||
@@ -37,6 +38,8 @@ def test_application(tmp_path) -> None:
|
|||||||
# input=KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.ENTER, # noqa: ERA001
|
# input=KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.ENTER, # noqa: ERA001
|
||||||
)
|
)
|
||||||
|
|
||||||
|
output = strip_ansi(result.output)
|
||||||
|
|
||||||
banner = r"""
|
banner = r"""
|
||||||
___ _ _ _ _
|
___ _ _ _ _
|
||||||
/ _ \| |__ ___(_) __| (_) __ _ _ __
|
/ _ \| |__ ___(_) __| (_) __ _ _ __
|
||||||
@@ -49,7 +52,8 @@ def test_application(tmp_path) -> None:
|
|||||||
|_| |_|\___|\__\__,_|\__,_|\__,_|\__\__,_|
|
|_| |_|\___|\__\__,_|\__,_|\__,_|\__\__,_|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
assert banner in result.output
|
assert banner in output
|
||||||
|
assert output == Regex(r"SUCCESS \| Loaded \d+ notes from \d+ total notes")
|
||||||
assert result.exit_code == 1
|
assert result.exit_code == 1
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ def test_vault_path_errors(tmp_path, capsys) -> None:
|
|||||||
assert "Vault path not found" in captured.out
|
assert "Vault path not found" in captured.out
|
||||||
|
|
||||||
with pytest.raises(typer.Exit):
|
with pytest.raises(typer.Exit):
|
||||||
Config(config_path=config_file, vault_path=Path("tests/fixtures/sample_note.md"))
|
Config(config_path=config_file, vault_path=Path("tests/fixtures/test_vault/sample_note.md"))
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert "Vault path is not a directory" in captured.out
|
assert "Vault path is not a directory" in captured.out
|
||||||
|
|
||||||
|
|||||||
@@ -32,7 +32,7 @@ def remove_all(root: Path):
|
|||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def sample_note(tmp_path) -> Path:
|
def sample_note(tmp_path) -> Path:
|
||||||
"""Fixture which creates a temporary note file."""
|
"""Fixture which creates a temporary note file."""
|
||||||
source_file: Path = Path("tests/fixtures/test_vault/test1.md")
|
source_file: Path = Path("tests/fixtures/test_vault/sample_note.md")
|
||||||
if not source_file.exists():
|
if not source_file.exists():
|
||||||
raise FileNotFoundError(f"Original file not found: {source_file}")
|
raise FileNotFoundError(f"Original file not found: {source_file}")
|
||||||
|
|
||||||
|
|||||||
6
tests/fixtures/broken_frontmatter.md
vendored
6
tests/fixtures/broken_frontmatter.md
vendored
@@ -1,6 +0,0 @@
|
|||||||
---
|
|
||||||
tags:
|
|
||||||
invalid = = "content"
|
|
||||||
---
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est la
|
|
||||||
39
tests/fixtures/sample_note.md
vendored
39
tests/fixtures/sample_note.md
vendored
@@ -1,39 +0,0 @@
|
|||||||
---
|
|
||||||
date_created: 2022-12-22
|
|
||||||
tags:
|
|
||||||
- food/fruit/apple
|
|
||||||
- dinner
|
|
||||||
- breakfast
|
|
||||||
- not_food
|
|
||||||
author: John Doe
|
|
||||||
nested_list:
|
|
||||||
nested_list_one:
|
|
||||||
- nested_list_one_a
|
|
||||||
- nested_list_one_b
|
|
||||||
type:
|
|
||||||
- article
|
|
||||||
- note
|
|
||||||
---
|
|
||||||
|
|
||||||
area:: mixed
|
|
||||||
date_modified:: 2022-12-22
|
|
||||||
status:: new
|
|
||||||
type:: book
|
|
||||||
inline_key:: inline_key_value
|
|
||||||
type:: [[article]]
|
|
||||||
tags:: from_inline_metadata
|
|
||||||
**bold_key**:: **bold** key value
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
|
||||||
|
|
||||||
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, [in_text_key:: in-text value] eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur? #inline_tag
|
|
||||||
|
|
||||||
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, #inline_tag2 cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
|
||||||
|
|
||||||
#food/fruit/pear
|
|
||||||
#food/fruit/orange
|
|
||||||
#dinner #breakfast
|
|
||||||
#brunch
|
|
||||||
42
tests/fixtures/test_vault/sample_note.md
vendored
Normal file
42
tests/fixtures/test_vault/sample_note.md
vendored
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
date_created: 2022-12-22 # confirm dates are translated to strings
|
||||||
|
tags:
|
||||||
|
- foo
|
||||||
|
- bar
|
||||||
|
frontmatter1: foo
|
||||||
|
frontmatter2: ["bar", "baz", "qux"]
|
||||||
|
🌱: 🌿
|
||||||
|
# Nested lists are not supported
|
||||||
|
# invalid:
|
||||||
|
# invalid:
|
||||||
|
# - invalid
|
||||||
|
# - invalid2
|
||||||
|
---
|
||||||
|
|
||||||
|
# Heading 1
|
||||||
|
|
||||||
|
inline1:: foo
|
||||||
|
inline1::bar baz
|
||||||
|
**inline2**:: [[foo]]
|
||||||
|
_inline3_:: value
|
||||||
|
🌱::🌿
|
||||||
|
key with space:: foo
|
||||||
|
|
||||||
|
> inline4:: foo
|
||||||
|
|
||||||
|
inline5::
|
||||||
|
|
||||||
|
foo bar [intext1:: foo] baz `#invalid` qux (intext2:: foo) foobar. #tag1 Foo bar #tag2 baz qux. [[link]]
|
||||||
|
|
||||||
|
The quick brown fox jumped over the lazy dog.
|
||||||
|
|
||||||
|
# tag3
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## invalid: invalid
|
||||||
|
|
||||||
|
```python
|
||||||
|
invalid:: invalid
|
||||||
|
#invalid
|
||||||
|
```
|
||||||
47
tests/fixtures/test_vault/test1.md
vendored
47
tests/fixtures/test_vault/test1.md
vendored
@@ -1,47 +0,0 @@
|
|||||||
---
|
|
||||||
date_created: 2022-12-22
|
|
||||||
tags:
|
|
||||||
- shared_tag
|
|
||||||
- frontmatter_tag1
|
|
||||||
- frontmatter_tag2
|
|
||||||
- 📅/frontmatter_tag3
|
|
||||||
frontmatter_Key1: author name
|
|
||||||
frontmatter_Key2: ["article", "note"]
|
|
||||||
shared_key1:
|
|
||||||
- shared_key1_value
|
|
||||||
- shared_key1_value3
|
|
||||||
shared_key2: shared_key2_value1
|
|
||||||
---
|
|
||||||
|
|
||||||
#inline_tag_top1 #inline_tag_top2
|
|
||||||
|
|
||||||
top_key1:: top_key1_value
|
|
||||||
**top_key2:: top_key2_value**
|
|
||||||
top_key3:: [[top_key3_value_as_link]]
|
|
||||||
shared_key1:: shared_key1_value
|
|
||||||
shared_key1:: shared_key1_value2
|
|
||||||
shared_key2:: shared_key2_value2
|
|
||||||
key📅:: 📅_key_value
|
|
||||||
|
|
||||||
# Heading 1
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. #intext_tag1 Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu [intext_key:: intext_value] fugiat nulla (#intext_tag2) pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est lab
|
|
||||||
|
|
||||||
```python
|
|
||||||
#ffffff
|
|
||||||
# This is sample text with tags and metadata
|
|
||||||
#in_codeblock_tag1
|
|
||||||
#ffffff;
|
|
||||||
codeblock_key:: some text
|
|
||||||
in_codeblock_key:: in_codeblock_value
|
|
||||||
The quick brown fox jumped over the #in_codeblock_tag2
|
|
||||||
```
|
|
||||||
|
|
||||||
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab `this is #inline_code_tag1` illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? `this is #inline_code_tag2` Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pari
|
|
||||||
|
|
||||||
bottom_key1:: bottom_key1_value
|
|
||||||
bottom_key2:: bottom_key2_value
|
|
||||||
|
|
||||||
#inline_tag_bottom1
|
|
||||||
#inline_tag_bottom2
|
|
||||||
#shared_tag
|
|
||||||
@@ -22,7 +22,7 @@ class KeyInputs:
|
|||||||
THREE = "3"
|
THREE = "3"
|
||||||
|
|
||||||
|
|
||||||
def remove_ansi(text) -> str:
|
def strip_ansi(text) -> str:
|
||||||
"""Remove ANSI escape sequences from a string.
|
"""Remove ANSI escape sequences from a string.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|||||||
@@ -1,531 +0,0 @@
|
|||||||
# type: ignore
|
|
||||||
"""Test the Frontmatter object from metadata.py."""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from obsidian_metadata.models.exceptions import FrontmatterError
|
|
||||||
from obsidian_metadata.models.metadata import Frontmatter
|
|
||||||
|
|
||||||
FRONTMATTER_CONTENT: str = """
|
|
||||||
---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: "shared_key1_value"
|
|
||||||
---
|
|
||||||
more content
|
|
||||||
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
|
|
||||||
INLINE_CONTENT = """\
|
|
||||||
repeated_key:: repeated_key_value1
|
|
||||||
#inline_tag_top1,#inline_tag_top2
|
|
||||||
**bold_key1**:: bold_key1_value
|
|
||||||
**bold_key2:: bold_key2_value**
|
|
||||||
link_key:: [[link_key_value]]
|
|
||||||
tag_key:: #tag_key_value
|
|
||||||
emoji_📅_key:: emoji_📅_key_value
|
|
||||||
**#bold_tag**
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. [in_text_key1:: in_text_key1_value] Ut enim ad minim veniam, quis nostrud exercitation [in_text_key2:: in_text_key2_value] ullamco laboris nisi ut aliquip ex ea commodo consequat. #in_text_tag Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
|
||||||
|
|
||||||
```python
|
|
||||||
#ffffff
|
|
||||||
# This is sample text [no_key:: value]with tags and metadata
|
|
||||||
#in_codeblock_tag1
|
|
||||||
#ffffff;
|
|
||||||
in_codeblock_key:: in_codeblock_value
|
|
||||||
The quick brown fox jumped over the #in_codeblock_tag2
|
|
||||||
```
|
|
||||||
repeated_key:: repeated_key_value2
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_1() -> None:
|
|
||||||
"""Test frontmatter creation.
|
|
||||||
|
|
||||||
GIVEN valid frontmatter content
|
|
||||||
WHEN a Frontmatter object is created
|
|
||||||
THEN parse the YAML frontmatter and add it to the object
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(INLINE_CONTENT)
|
|
||||||
assert frontmatter.dict == {}
|
|
||||||
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.dict == {
|
|
||||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"shared_key1": ["shared_key1_value"],
|
|
||||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
|
||||||
}
|
|
||||||
assert frontmatter.dict_original == {
|
|
||||||
"frontmatter_Key1": ["frontmatter_Key1_value"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"shared_key1": ["shared_key1_value"],
|
|
||||||
"tags": ["tag_1", "tag_2", "📅/tag_3"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_2() -> None:
|
|
||||||
"""Test frontmatter creation error.
|
|
||||||
|
|
||||||
GIVEN invalid frontmatter content
|
|
||||||
WHEN a Frontmatter object is created
|
|
||||||
THEN raise ValueError
|
|
||||||
"""
|
|
||||||
fn = """---
|
|
||||||
tags: tag
|
|
||||||
invalid = = "content"
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
with pytest.raises(FrontmatterError):
|
|
||||||
Frontmatter(fn)
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_3():
|
|
||||||
"""Test frontmatter creation error.
|
|
||||||
|
|
||||||
GIVEN empty frontmatter content
|
|
||||||
WHEN a Frontmatter object is created
|
|
||||||
THEN set the dict to an empty dict
|
|
||||||
"""
|
|
||||||
content = "---\n\n---"
|
|
||||||
frontmatter = Frontmatter(content)
|
|
||||||
assert frontmatter.dict == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_create_4():
|
|
||||||
"""Test frontmatter creation error.
|
|
||||||
|
|
||||||
GIVEN empty frontmatter content with a yaml marker
|
|
||||||
WHEN a Frontmatter object is created
|
|
||||||
THEN set the dict to an empty dict
|
|
||||||
"""
|
|
||||||
content = "---\n-\n---"
|
|
||||||
frontmatter = Frontmatter(content)
|
|
||||||
assert frontmatter.dict == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_1():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with an existing key
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
|
|
||||||
assert frontmatter.add("frontmatter_Key1") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_2():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with an existing key and existing value
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.add("frontmatter_Key1", "frontmatter_Key1_value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_3():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with a new key
|
|
||||||
THEN return True and add the key to the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.add("added_key") is True
|
|
||||||
assert "added_key" in frontmatter.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_4():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with a new key and a new value
|
|
||||||
THEN return True and add the key and the value to the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.add("added_key", "added_value") is True
|
|
||||||
assert frontmatter.dict["added_key"] == ["added_value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_5():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with an existing key and a new value
|
|
||||||
THEN return True and add the value to the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.add("frontmatter_Key1", "new_value") is True
|
|
||||||
assert frontmatter.dict["frontmatter_Key1"] == ["frontmatter_Key1_value", "new_value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_6():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with an existing key and a list of new values
|
|
||||||
THEN return True and add the values to the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.add("frontmatter_Key1", ["new_value", "new_value2"]) is True
|
|
||||||
assert frontmatter.dict["frontmatter_Key1"] == [
|
|
||||||
"frontmatter_Key1_value",
|
|
||||||
"new_value",
|
|
||||||
"new_value2",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_7():
|
|
||||||
"""Test frontmatter add() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the add() method is called with an existing key and a list of values including an existing value
|
|
||||||
THEN return True and add the new values to the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert (
|
|
||||||
frontmatter.add("frontmatter_Key1", ["frontmatter_Key1_value", "new_value", "new_value2"])
|
|
||||||
is True
|
|
||||||
)
|
|
||||||
assert frontmatter.dict["frontmatter_Key1"] == [
|
|
||||||
"frontmatter_Key1_value",
|
|
||||||
"new_value",
|
|
||||||
"new_value2",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_1():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key
|
|
||||||
THEN return True if the key is found
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("frontmatter_Key1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_2():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key
|
|
||||||
THEN return False if the key is not found
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("no_key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_3():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key and a value
|
|
||||||
THEN return True if the key and value is found
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_4():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key and a value
|
|
||||||
THEN return False if the key and value is not found
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_5():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key regex
|
|
||||||
THEN return True if a key matches the regex
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_6():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key regex
|
|
||||||
THEN return False if no key matches the regex
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_7():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key and value regex
|
|
||||||
THEN return True if a value matches the regex
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_8():
|
|
||||||
"""Test frontmatter contains() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the contains() method is called with a key and value regex
|
|
||||||
THEN return False if a value does not match the regex
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_1():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with a key that does not exist
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("no key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_2():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with an existing key and a value that does not exist
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("tags", "no value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_3():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with a regex that does not match any keys
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete(r"\d{3}", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_4():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with an existing key and a regex that does not match any values
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("tags", r"\d{5}", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_5():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with an existing key and an existing value
|
|
||||||
THEN return True and delete the value from the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("tags", "tag_2") is True
|
|
||||||
assert "tag_2" not in frontmatter.dict["tags"]
|
|
||||||
assert "tags" in frontmatter.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_6():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with an existing key
|
|
||||||
THEN return True and delete the key from the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("tags") is True
|
|
||||||
assert "tags" not in frontmatter.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_7():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with a regex that matches a key
|
|
||||||
THEN return True and delete the matching keys from the dict
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete(r"front\w+", is_regex=True) is True
|
|
||||||
assert "frontmatter_Key1" not in frontmatter.dict
|
|
||||||
assert "frontmatter_Key2" not in frontmatter.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_8():
|
|
||||||
"""Test frontmatter delete() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the delete() method is called with an existing key and a regex that matches values
|
|
||||||
THEN return True and delete the matching values
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.delete("tags", r"\w+_[23]", is_regex=True) is True
|
|
||||||
assert "tag_2" not in frontmatter.dict["tags"]
|
|
||||||
assert "📅/tag_3" not in frontmatter.dict["tags"]
|
|
||||||
assert "tag_1" in frontmatter.dict["tags"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_all():
|
|
||||||
"""Test Frontmatter delete_all method.
|
|
||||||
|
|
||||||
GIVEN Frontmatter with multiple keys
|
|
||||||
WHEN delete_all is called
|
|
||||||
THEN all keys and values are deleted
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
frontmatter.delete_all()
|
|
||||||
assert frontmatter.dict == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_1():
|
|
||||||
"""Test frontmatter has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN no changes have been made to the object
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.has_changes() is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_2():
|
|
||||||
"""Test frontmatter has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN changes have been made to the object
|
|
||||||
THEN return True
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
frontmatter.dict["new key"] = ["new value"]
|
|
||||||
assert frontmatter.has_changes() is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_1():
|
|
||||||
"""Test frontmatter rename() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the rename() method is called with a key
|
|
||||||
THEN return False if the key is not found
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.rename("no key", "new key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_2():
|
|
||||||
"""Test frontmatter rename() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the rename() method is called with an existing key and non-existing value
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.rename("tags", "no tag", "new key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_3():
|
|
||||||
"""Test frontmatter rename() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the rename() method is called with an existing key
|
|
||||||
THEN return True and rename the key
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.rename("frontmatter_Key1", "new key") is True
|
|
||||||
assert "frontmatter_Key1" not in frontmatter.dict
|
|
||||||
assert frontmatter.dict["new key"] == ["frontmatter_Key1_value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_4():
|
|
||||||
"""Test frontmatter rename() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the rename() method is called with an existing key and value
|
|
||||||
THEN return True and rename the value
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.rename("tags", "tag_2", "new tag") is True
|
|
||||||
assert "tag_2" not in frontmatter.dict["tags"]
|
|
||||||
assert "new tag" in frontmatter.dict["tags"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_5():
|
|
||||||
"""Test frontmatter rename() method.
|
|
||||||
|
|
||||||
GIVEN a Frontmatter object
|
|
||||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
|
||||||
THEN return True and remove the old value leaving one instance of the new value
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.rename("tags", "tag_1", "tag_2") is True
|
|
||||||
assert "tag_1" not in frontmatter.dict["tags"]
|
|
||||||
assert frontmatter.dict["tags"] == ["tag_2", "📅/tag_3"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_to_yaml_1():
|
|
||||||
"""Test Frontmatter to_yaml method.
|
|
||||||
|
|
||||||
GIVEN a dictionary
|
|
||||||
WHEN the to_yaml method is called
|
|
||||||
THEN return a string with the yaml representation of the dictionary
|
|
||||||
"""
|
|
||||||
new_frontmatter: str = """\
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: frontmatter_Key1_value
|
|
||||||
frontmatter_Key2:
|
|
||||||
- article
|
|
||||||
- note
|
|
||||||
shared_key1: shared_key1_value
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.to_yaml() == new_frontmatter
|
|
||||||
|
|
||||||
|
|
||||||
def test_to_yaml_2():
|
|
||||||
"""Test Frontmatter to_yaml method.
|
|
||||||
|
|
||||||
GIVEN a dictionary
|
|
||||||
WHEN the to_yaml method is called with sort_keys=True
|
|
||||||
THEN return a string with the sorted yaml representation of the dictionary
|
|
||||||
"""
|
|
||||||
new_frontmatter_sorted: str = """\
|
|
||||||
frontmatter_Key1: frontmatter_Key1_value
|
|
||||||
frontmatter_Key2:
|
|
||||||
- article
|
|
||||||
- note
|
|
||||||
shared_key1: shared_key1_value
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
- 📅/tag_3
|
|
||||||
"""
|
|
||||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
|
||||||
assert frontmatter.to_yaml(sort_keys=True) == new_frontmatter_sorted
|
|
||||||
@@ -1,455 +0,0 @@
|
|||||||
# type: ignore
|
|
||||||
"""Test inline metadata from metadata.py."""
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from obsidian_metadata.models.exceptions import InlineMetadataError
|
|
||||||
from obsidian_metadata.models.metadata import InlineMetadata
|
|
||||||
|
|
||||||
FRONTMATTER_CONTENT: str = """
|
|
||||||
---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: "shared_key1_value"
|
|
||||||
---
|
|
||||||
more content
|
|
||||||
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
|
|
||||||
INLINE_CONTENT = """\
|
|
||||||
key1:: value1
|
|
||||||
key1:: value2
|
|
||||||
key1:: value3
|
|
||||||
key2:: value1
|
|
||||||
Paragraph of text with an [inline_key:: value1] and [inline_key:: value2] and [inline_key:: value3] which should do it.
|
|
||||||
> blockquote_key:: value1
|
|
||||||
> blockquote_key:: value2
|
|
||||||
|
|
||||||
- list_key:: value1
|
|
||||||
- list_key:: value2
|
|
||||||
|
|
||||||
1. list_key:: value1
|
|
||||||
2. list_key:: value2
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def test__grab_inline_metadata_1():
|
|
||||||
"""Test grab inline metadata.
|
|
||||||
|
|
||||||
GIVEN content that has no inline metadata
|
|
||||||
WHEN grab_inline_metadata is called
|
|
||||||
THEN an empty dict is returned
|
|
||||||
|
|
||||||
"""
|
|
||||||
content = """
|
|
||||||
---
|
|
||||||
frontmatter_key1: frontmatter_key1_value
|
|
||||||
---
|
|
||||||
not_a_key: not_a_value
|
|
||||||
```
|
|
||||||
key:: in_codeblock
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(content)
|
|
||||||
assert inline.dict == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test__grab_inline_metadata_2():
|
|
||||||
"""Test grab inline metadata.
|
|
||||||
|
|
||||||
GIVEN content that has inline metadata
|
|
||||||
WHEN grab_inline_metadata is called
|
|
||||||
THEN the inline metadata is parsed and returned as a dict
|
|
||||||
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.dict == {
|
|
||||||
"blockquote_key": ["value1", "value2"],
|
|
||||||
"inline_key": ["value1", "value2", "value3"],
|
|
||||||
"key1": ["value1", "value2", "value3"],
|
|
||||||
"key2": ["value1"],
|
|
||||||
"list_key": ["value1", "value2", "value1", "value2"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test__grab_inline_metadata_3(mocker):
|
|
||||||
"""Test grab inline metadata.
|
|
||||||
|
|
||||||
GIVEN content that has inline metadata
|
|
||||||
WHEN an error occurs parsing the inline metadata
|
|
||||||
THEN raise an InlineMetadataError and pass the error message
|
|
||||||
"""
|
|
||||||
mocker.patch(
|
|
||||||
"obsidian_metadata.models.metadata.inline_metadata_from_string",
|
|
||||||
return_value=[("key")],
|
|
||||||
)
|
|
||||||
with pytest.raises(InlineMetadataError, match=r"Error parsing inline metadata: \['key'\]"):
|
|
||||||
InlineMetadata("")
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_1():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with an existing key
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("key1") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_2():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with an existing key and existing value
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("key1", "value1") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_3():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with a new key
|
|
||||||
THEN return True and add the key to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("added_key") is True
|
|
||||||
assert "added_key" in inline.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_4():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with a new key and a new value
|
|
||||||
THEN return True and add the key and the value to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("added_key", "added_value") is True
|
|
||||||
assert inline.dict["added_key"] == ["added_value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_5():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with an existing key and a new value
|
|
||||||
THEN return True and add the value to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("key1", "new_value") is True
|
|
||||||
assert inline.dict["key1"] == ["value1", "value2", "value3", "new_value"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_6():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with an existing key and a list of new values
|
|
||||||
THEN return True and add the values to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("key2", ["new_value", "new_value2"]) is True
|
|
||||||
assert inline.dict["key2"] == ["new_value", "new_value2", "value1"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_7():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with an existing key and a list of values including an existing value
|
|
||||||
THEN return True and add the new values to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("key1", ["value1", "new_value", "new_value2"]) is True
|
|
||||||
assert inline.dict["key1"] == ["new_value", "new_value2", "value1", "value2", "value3"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_8():
|
|
||||||
"""Test InlineMetadata add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the add() method is called with a new key and a list of values
|
|
||||||
THEN return True and add the new values to the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.add("new_key", ["value1", "new_value", "new_value2"]) is True
|
|
||||||
assert inline.dict["new_key"] == ["value1", "new_value", "new_value2"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_1():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key
|
|
||||||
THEN return True if the key is found
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains("key1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_2():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key
|
|
||||||
THEN return False if the key is not found
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains("no_key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_3():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key and a value
|
|
||||||
THEN return True if the key and value is found
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains("key1", "value1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_4():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key and a value
|
|
||||||
THEN return False if the key and value is not found
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains("key1", "no value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_5():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key regex
|
|
||||||
THEN return True if a key matches the regex
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains(r"\d$", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_6():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key regex
|
|
||||||
THEN return False if no key matches the regex
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains(r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_7():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key and value regex
|
|
||||||
THEN return True if a value matches the regex
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains(r"key\d", r"\w\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_8():
|
|
||||||
"""Test InlineMetadata contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the contains() method is called with a key and value regex
|
|
||||||
THEN return False if a value does not match the regex
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.contains("key1", r"_\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_1():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with a key that does not exist
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("no key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_2():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with an existing key and a value that does not exist
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("key1", "no value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_3():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with a regex that does not match any keys
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete(r"\d{3}", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_4():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with an existing key and a regex that does not match any values
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("key1", r"\d{5}", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_5():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with an existing key and an existing value
|
|
||||||
THEN return True and delete the value from the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("key1", "value1") is True
|
|
||||||
assert "value1" not in inline.dict["key1"]
|
|
||||||
assert "key1" in inline.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_6():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with an existing key
|
|
||||||
THEN return True and delete the key from the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("key1") is True
|
|
||||||
assert "key1" not in inline.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_7():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with a regex that matches a key
|
|
||||||
THEN return True and delete the matching keys from the dict
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete(r"key\w+", is_regex=True) is True
|
|
||||||
assert "key1" not in inline.dict
|
|
||||||
assert "key2" not in inline.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_8():
|
|
||||||
"""Test InlineMetadata delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the delete() method is called with an existing key and a regex that matches values
|
|
||||||
THEN return True and delete the matching values
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.delete("key1", r"\w+\d", is_regex=True) is True
|
|
||||||
assert "value1" not in inline.dict["key1"]
|
|
||||||
assert "value2" not in inline.dict["key1"]
|
|
||||||
assert "value3" not in inline.dict["key1"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_1():
|
|
||||||
"""Test InlineMetadata has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN no changes have been made to the object
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.has_changes() is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_2():
|
|
||||||
"""Test InlineMetadata has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN changes have been made to the object
|
|
||||||
THEN return True
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
inline.dict["new key"] = ["new value"]
|
|
||||||
assert inline.has_changes() is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_1():
|
|
||||||
"""Test InlineMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the rename() method is called with a key
|
|
||||||
THEN return False if the key is not found
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.rename("no key", "new key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_2():
|
|
||||||
"""Test InlineMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and non-existing value
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.rename("key1", "no value", "new value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_3():
|
|
||||||
"""Test InlineMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key
|
|
||||||
THEN return True and rename the key
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.rename("key1", "new key") is True
|
|
||||||
assert "key1" not in inline.dict
|
|
||||||
assert inline.dict["new key"] == ["value1", "value2", "value3"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_4():
|
|
||||||
"""Test InlineMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and value
|
|
||||||
THEN return True and rename the value
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.rename("key1", "value1", "new value") is True
|
|
||||||
assert "value1" not in inline.dict["key1"]
|
|
||||||
assert "new value" in inline.dict["key1"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_5():
|
|
||||||
"""Test InlineMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
|
||||||
THEN return True and remove the old value leaving one instance of the new value
|
|
||||||
"""
|
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
|
||||||
assert inline.rename("key1", "value1", "value2") is True
|
|
||||||
assert inline.dict["key1"] == ["value2", "value3"]
|
|
||||||
@@ -1,367 +0,0 @@
|
|||||||
# type: ignore
|
|
||||||
"""Test inline tags from metadata.py."""
|
|
||||||
|
|
||||||
from obsidian_metadata.models.metadata import InlineTags
|
|
||||||
|
|
||||||
CONTENT = """\
|
|
||||||
#tag1 #tag2
|
|
||||||
> #tag3
|
|
||||||
**#tag4**
|
|
||||||
I am a sentence with #tag5 and #tag6 in the middle
|
|
||||||
#tag🙈7
|
|
||||||
#tag/8
|
|
||||||
#tag/👋/9
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def test__grab_inline_tags_1() -> None:
|
|
||||||
"""Test _grab_inline_tags() method.
|
|
||||||
|
|
||||||
GIVEN a string with a codeblock
|
|
||||||
WHEN the method is called
|
|
||||||
THEN the codeblock is ignored
|
|
||||||
"""
|
|
||||||
content = """
|
|
||||||
some text
|
|
||||||
|
|
||||||
```python
|
|
||||||
#tag1
|
|
||||||
#tag2
|
|
||||||
```
|
|
||||||
|
|
||||||
```
|
|
||||||
#tag3
|
|
||||||
#tag4
|
|
||||||
```
|
|
||||||
"""
|
|
||||||
tags = InlineTags(content)
|
|
||||||
assert tags.list == []
|
|
||||||
assert tags.list_original == []
|
|
||||||
|
|
||||||
|
|
||||||
def test__grab_inline_tags_2() -> None:
|
|
||||||
"""Test _grab_inline_tags() method.
|
|
||||||
|
|
||||||
GIVEN a string with tags
|
|
||||||
WHEN the method is called
|
|
||||||
THEN the tags are extracted
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.list == [
|
|
||||||
"tag/8",
|
|
||||||
"tag/👋/9",
|
|
||||||
"tag1",
|
|
||||||
"tag2",
|
|
||||||
"tag3",
|
|
||||||
"tag4",
|
|
||||||
"tag5",
|
|
||||||
"tag6",
|
|
||||||
"tag🙈7",
|
|
||||||
]
|
|
||||||
assert tags.list_original == [
|
|
||||||
"tag/8",
|
|
||||||
"tag/👋/9",
|
|
||||||
"tag1",
|
|
||||||
"tag2",
|
|
||||||
"tag3",
|
|
||||||
"tag4",
|
|
||||||
"tag5",
|
|
||||||
"tag6",
|
|
||||||
"tag🙈7",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_1():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a tag that exists in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.add("tag1") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_2():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a new tag
|
|
||||||
THEN return True and add the tag to the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.add("new_tag") is True
|
|
||||||
assert "new_tag" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_3():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a list of new tags
|
|
||||||
THEN return True and add the tags to the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
new_tags = ["new_tag1", "new_tag2"]
|
|
||||||
assert tags.add(new_tags) is True
|
|
||||||
assert "new_tag1" in tags.list
|
|
||||||
assert "new_tag2" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_4():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a list of tags, some of which already exist
|
|
||||||
THEN return True and add only the new tags to the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
new_tags = ["new_tag1", "new_tag2", "tag1", "tag2"]
|
|
||||||
assert tags.add(new_tags) is True
|
|
||||||
assert tags.list == [
|
|
||||||
"new_tag1",
|
|
||||||
"new_tag2",
|
|
||||||
"tag/8",
|
|
||||||
"tag/👋/9",
|
|
||||||
"tag1",
|
|
||||||
"tag2",
|
|
||||||
"tag3",
|
|
||||||
"tag4",
|
|
||||||
"tag5",
|
|
||||||
"tag6",
|
|
||||||
"tag🙈7",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_5():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a list of tags which are already in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
new_tags = ["tag1", "tag2"]
|
|
||||||
assert tags.add(new_tags) is False
|
|
||||||
assert "tag1" in tags.list
|
|
||||||
assert "tag2" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_6():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a list of tags which have a # in the name
|
|
||||||
THEN strip the # from the tag name
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
new_tags = ["#tag1", "#tag2", "#new_tag"]
|
|
||||||
assert tags.add(new_tags) is True
|
|
||||||
assert tags.list == [
|
|
||||||
"new_tag",
|
|
||||||
"tag/8",
|
|
||||||
"tag/👋/9",
|
|
||||||
"tag1",
|
|
||||||
"tag2",
|
|
||||||
"tag3",
|
|
||||||
"tag4",
|
|
||||||
"tag5",
|
|
||||||
"tag6",
|
|
||||||
"tag🙈7",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_7():
|
|
||||||
"""Test add() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the add() method is called with a tag which has a # in the name
|
|
||||||
THEN strip the # from the tag name
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.add("#tag1") is False
|
|
||||||
assert tags.add("#new_tag") is True
|
|
||||||
assert "new_tag" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_1():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the contains() method is called with a tag that exists in the list
|
|
||||||
THEN return True
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.contains("tag1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_2():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the contains() method is called with a tag that does not exist in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.contains("no_tag") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_3():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the contains() method is called with a regex that matches a tag in the list
|
|
||||||
THEN return True
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.contains(r"tag\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_4():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the contains() method is called with a regex that does not match any tags in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.contains(r"tag\d\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_1():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the delete() method is called with a tag that exists in the list
|
|
||||||
THEN return True and remove the tag from the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.delete("tag1") is True
|
|
||||||
assert "tag1" not in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_2():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the delete() method is called with a tag that does not exist in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.delete("no_tag") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_3():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the delete() method is called with a regex that matches a tag in the list
|
|
||||||
THEN return True and remove the tag from the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.delete(r"tag\d") is True
|
|
||||||
assert tags.list == ["tag/8", "tag/👋/9", "tag🙈7"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_4():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the delete() method is called with a regex that does not match any tags in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.delete(r"tag\d\d") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_1():
|
|
||||||
"""Test has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the has_changes() method is called
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.has_changes() is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_has_changes_2():
|
|
||||||
"""Test has_changes() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the has_changes() method after the list has been updated
|
|
||||||
THEN return True
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
tags.list = ["new_tag"]
|
|
||||||
assert tags.has_changes() is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_1():
|
|
||||||
"""Test rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the rename() method is called with a tag that exists in the list
|
|
||||||
THEN return True and rename the tag in the list
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.rename("tag1", "new_tag") is True
|
|
||||||
assert "tag1" not in tags.list
|
|
||||||
assert "new_tag" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_2():
|
|
||||||
"""Test rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the rename() method is called with a tag that does not exist in the list
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.rename("no_tag", "new_tag") is False
|
|
||||||
assert "new_tag" not in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_3():
|
|
||||||
"""Test rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the rename() method is called with a tag that exists and the new tag name already exists in the list
|
|
||||||
THEN return True and ensure the new tag name is only in the list once
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.rename(r"tag1", "tag2") is True
|
|
||||||
assert tags.list == [
|
|
||||||
"tag/8",
|
|
||||||
"tag/👋/9",
|
|
||||||
"tag2",
|
|
||||||
"tag3",
|
|
||||||
"tag4",
|
|
||||||
"tag5",
|
|
||||||
"tag6",
|
|
||||||
"tag🙈7",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_4():
|
|
||||||
"""Test rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the rename() method is called with a new tag value that is None
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.rename("tag1", None) is False
|
|
||||||
assert "tag1" in tags.list
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_5():
|
|
||||||
"""Test rename() method.
|
|
||||||
|
|
||||||
GIVEN a InlineTag object
|
|
||||||
WHEN the rename() method is called with a new tag value that is empty
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
tags = InlineTags(CONTENT)
|
|
||||||
assert tags.rename("tag1", "") is False
|
|
||||||
assert "tag1" in tags.list
|
|
||||||
209
tests/metadata_test.py
Normal file
209
tests/metadata_test.py
Normal file
@@ -0,0 +1,209 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test the InlineField class."""
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from obsidian_metadata.models.enums import MetadataType, Wrapping
|
||||||
|
from obsidian_metadata.models.metadata import InlineField, dict_to_yaml
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_to_yaml_1():
|
||||||
|
"""Test dict_to_yaml() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary
|
||||||
|
WHEN values contain lists
|
||||||
|
THEN confirm the output is not sorted
|
||||||
|
"""
|
||||||
|
test_dict = {"k2": ["v1", "v2"], "k1": ["v1", "v2"]}
|
||||||
|
assert dict_to_yaml(test_dict) == "k2:\n - v1\n - v2\nk1:\n - v1\n - v2\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_to_yaml_2():
|
||||||
|
"""Test dict_to_yaml() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary
|
||||||
|
WHEN values contain lists and sort_keys is True
|
||||||
|
THEN confirm the output is sorted
|
||||||
|
"""
|
||||||
|
test_dict = {"k2": ["v1", "v2"], "k1": ["v1", "v2"]}
|
||||||
|
assert dict_to_yaml(test_dict, sort_keys=True) == "k1:\n - v1\n - v2\nk2:\n - v1\n - v2\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_dict_to_yaml_3():
|
||||||
|
"""Test dict_to_yaml() function.
|
||||||
|
|
||||||
|
GIVEN a dictionary
|
||||||
|
WHEN values contain a list with a single value
|
||||||
|
THEN confirm single-value lists are converted to strings
|
||||||
|
"""
|
||||||
|
test_dict = {"k2": ["v1"], "k1": ["v1", "v2"]}
|
||||||
|
assert dict_to_yaml(test_dict, sort_keys=True) == "k1:\n - v1\n - v2\nk2: v1\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_1():
|
||||||
|
"""Test creating an InlineField object.
|
||||||
|
|
||||||
|
GIVEN an inline tag
|
||||||
|
WHEN an InlineField object is created
|
||||||
|
THEN confirm the object's attributes match the expected values
|
||||||
|
"""
|
||||||
|
obj = InlineField(
|
||||||
|
meta_type=MetadataType.TAGS,
|
||||||
|
key=None,
|
||||||
|
value="tag1",
|
||||||
|
)
|
||||||
|
assert obj.meta_type == MetadataType.TAGS
|
||||||
|
assert obj.key is None
|
||||||
|
assert obj.value == "tag1"
|
||||||
|
assert obj.normalized_value == "tag1"
|
||||||
|
assert obj.wrapping == Wrapping.NONE
|
||||||
|
assert obj.clean_key is None
|
||||||
|
assert obj.normalized_key is None
|
||||||
|
assert not obj.key_open
|
||||||
|
assert not obj.key_close
|
||||||
|
assert obj.is_changed is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_init_2():
|
||||||
|
"""Test creating an InlineField object.
|
||||||
|
|
||||||
|
GIVEN an inline key/value pair
|
||||||
|
WHEN an InlineField object is created
|
||||||
|
THEN confirm the object's attributes match the expected values
|
||||||
|
"""
|
||||||
|
obj = InlineField(meta_type=MetadataType.INLINE, key="key", value="value")
|
||||||
|
assert obj.meta_type == MetadataType.INLINE
|
||||||
|
assert obj.key == "key"
|
||||||
|
assert obj.value == "value"
|
||||||
|
assert obj.normalized_value == "value"
|
||||||
|
assert obj.wrapping == Wrapping.NONE
|
||||||
|
assert obj.clean_key == "key"
|
||||||
|
assert obj.normalized_key == "key"
|
||||||
|
assert not obj.key_open
|
||||||
|
assert not obj.key_close
|
||||||
|
assert obj.is_changed is False
|
||||||
|
|
||||||
|
obj = InlineField(
|
||||||
|
meta_type=MetadataType.INLINE,
|
||||||
|
key="key",
|
||||||
|
value="value",
|
||||||
|
wrapping=Wrapping.PARENS,
|
||||||
|
)
|
||||||
|
assert obj.meta_type == MetadataType.INLINE
|
||||||
|
assert obj.key == "key"
|
||||||
|
assert obj.value == "value"
|
||||||
|
assert obj.normalized_value == "value"
|
||||||
|
assert obj.wrapping == Wrapping.PARENS
|
||||||
|
assert obj.clean_key == "key"
|
||||||
|
assert obj.normalized_key == "key"
|
||||||
|
assert not obj.key_open
|
||||||
|
assert not obj.key_close
|
||||||
|
assert obj.is_changed is False
|
||||||
|
|
||||||
|
obj = InlineField(
|
||||||
|
meta_type=MetadataType.INLINE,
|
||||||
|
key="**key**",
|
||||||
|
value="value",
|
||||||
|
wrapping=Wrapping.BRACKETS,
|
||||||
|
)
|
||||||
|
assert obj.meta_type == MetadataType.INLINE
|
||||||
|
assert obj.key == "**key**"
|
||||||
|
assert obj.value == "value"
|
||||||
|
assert obj.normalized_value == "value"
|
||||||
|
assert obj.wrapping == Wrapping.BRACKETS
|
||||||
|
assert obj.clean_key == "key"
|
||||||
|
assert obj.normalized_key == "key"
|
||||||
|
assert obj.key_open == "**"
|
||||||
|
assert obj.key_close == "**"
|
||||||
|
assert obj.is_changed is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
(
|
||||||
|
"original",
|
||||||
|
"cleaned",
|
||||||
|
"normalized",
|
||||||
|
"key_open",
|
||||||
|
"key_close",
|
||||||
|
),
|
||||||
|
[
|
||||||
|
("foo", "foo", "foo", "", ""),
|
||||||
|
("🌱/🌿", "🌱/🌿", "🌱/🌿", "", ""),
|
||||||
|
("FOO 1", "FOO 1", "foo-1", "", ""),
|
||||||
|
("**key foo**", "key foo", "key-foo", "**", "**"),
|
||||||
|
("## KEY", "KEY", "key", "## ", ""),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_init_3(original, cleaned, normalized, key_open, key_close):
|
||||||
|
"""Test creating an InlineField object.
|
||||||
|
|
||||||
|
GIVEN an InlineField object is created
|
||||||
|
WHEN the key needs to be normalized
|
||||||
|
THEN confirm clean_key() returns the expected value
|
||||||
|
"""
|
||||||
|
obj = InlineField(meta_type=MetadataType.INLINE, key=original, value="value")
|
||||||
|
assert obj.clean_key == cleaned
|
||||||
|
assert obj.normalized_key == normalized
|
||||||
|
assert obj.key_open == key_open
|
||||||
|
assert obj.key_close == key_close
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("original", "normalized"),
|
||||||
|
[("foo", "foo"), ("🌱/🌿", "🌱/🌿"), (" value ", "value"), (" ", "-"), ("", "-")],
|
||||||
|
)
|
||||||
|
def test_init_4(original, normalized):
|
||||||
|
"""Test creating an InlineField object.
|
||||||
|
|
||||||
|
GIVEN an InlineField object is created
|
||||||
|
WHEN the value needs to be normalized
|
||||||
|
THEN create the normalized_value attribute
|
||||||
|
"""
|
||||||
|
obj = InlineField(meta_type=MetadataType.INLINE, key="key", value=original)
|
||||||
|
assert obj.value == original
|
||||||
|
assert obj.normalized_value == normalized
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_field_init_5():
|
||||||
|
"""Test updating the is_changed attribute.
|
||||||
|
|
||||||
|
GIVEN creating an object
|
||||||
|
WHEN is_changed set to True at init
|
||||||
|
THEN confirm is_changed is True
|
||||||
|
"""
|
||||||
|
obj = InlineField(meta_type=MetadataType.TAGS, key="key", value="tag1", is_changed=True)
|
||||||
|
assert obj.is_changed is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_field_init_6():
|
||||||
|
"""Test updating the is_changed attribute.
|
||||||
|
|
||||||
|
GIVEN creating an object
|
||||||
|
WHEN is_changed set to True at after init
|
||||||
|
THEN confirm is_changed is True
|
||||||
|
"""
|
||||||
|
obj = InlineField(meta_type=MetadataType.TAGS, key="key", value="tag1", is_changed=False)
|
||||||
|
assert obj.is_changed is False
|
||||||
|
obj.is_changed = True
|
||||||
|
assert obj.is_changed is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_field_init_4():
|
||||||
|
"""Test updating the is_changed attribute.
|
||||||
|
|
||||||
|
GIVEN creating an object
|
||||||
|
WHEN key_open and key_close are set after init
|
||||||
|
THEN confirm they are set correctly
|
||||||
|
"""
|
||||||
|
obj = InlineField(
|
||||||
|
meta_type=MetadataType.INLINE,
|
||||||
|
key="_key_",
|
||||||
|
value="value",
|
||||||
|
is_changed=False,
|
||||||
|
)
|
||||||
|
assert obj.key_open == "_"
|
||||||
|
assert obj.key_close == "_"
|
||||||
|
obj.key_open = "**"
|
||||||
|
obj.key_close = "**"
|
||||||
|
assert obj.key_open == "**"
|
||||||
|
assert obj.key_close == "**"
|
||||||
@@ -1,814 +0,0 @@
|
|||||||
# type: ignore
|
|
||||||
"""Test VaultMetadata object from metadata.py."""
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
|
||||||
from obsidian_metadata.models.metadata import (
|
|
||||||
VaultMetadata,
|
|
||||||
)
|
|
||||||
from tests.helpers import Regex, remove_ansi
|
|
||||||
|
|
||||||
|
|
||||||
def test_vault_metadata__init_1() -> None:
|
|
||||||
"""Test VaultMetadata class."""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
assert vm.dict == {}
|
|
||||||
assert vm.frontmatter == {}
|
|
||||||
assert vm.inline_metadata == {}
|
|
||||||
assert vm.tags == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_1():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is FRONTMATTER and the old dictionary is empty
|
|
||||||
THEN the new dictionary is added to the target area
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
|
||||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
|
||||||
assert vm.dict == new_dict
|
|
||||||
assert vm.frontmatter == new_dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_2():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is FRONTMATTER and the old dictionary is not empty
|
|
||||||
THEN the new dictionary is merged with the old dictionary
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
|
||||||
|
|
||||||
vm.index_metadata(area=MetadataType.FRONTMATTER, metadata=new_dict)
|
|
||||||
assert vm.dict == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value1", "value2", "value3"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"other_key": ["value1"],
|
|
||||||
}
|
|
||||||
assert vm.frontmatter == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value1", "value2", "value3"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_3():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is INLINE and the old dictionary is empty
|
|
||||||
THEN the new dictionary is added to the target area
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
new_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
|
||||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
|
||||||
assert vm.dict == new_dict
|
|
||||||
assert vm.inline_metadata == new_dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_4():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is INLINE and the old dictionary is not empty
|
|
||||||
THEN the new dictionary is merged with the old dictionary
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"], "other_key": ["value1"]}
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
new_dict = {"key1": ["value1"], "key2": ["value1", "value3"], "key3": ["value1"]}
|
|
||||||
|
|
||||||
vm.index_metadata(area=MetadataType.INLINE, metadata=new_dict)
|
|
||||||
assert vm.dict == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value1", "value2", "value3"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"other_key": ["value1"],
|
|
||||||
}
|
|
||||||
assert vm.inline_metadata == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value1", "value2", "value3"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_5():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is TAGS and the old list is empty
|
|
||||||
THEN the new list is added to the target area
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
new_list = ["tag1", "tag2", "tag3"]
|
|
||||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
|
||||||
assert vm.dict == {}
|
|
||||||
assert vm.tags == new_list
|
|
||||||
|
|
||||||
|
|
||||||
def test_index_metadata_6():
|
|
||||||
"""Test index_metadata() method.
|
|
||||||
|
|
||||||
GIVEN a dictionary to add
|
|
||||||
WHEN the target area is TAGS and the old list is not empty
|
|
||||||
THEN the new list is merged with the old list
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
new_list = ["tag1", "tag2", "tag4", "tag5"]
|
|
||||||
|
|
||||||
vm.index_metadata(area=MetadataType.TAGS, metadata=new_list)
|
|
||||||
assert vm.dict == {}
|
|
||||||
assert vm.tags == ["tag1", "tag2", "tag3", "tag4", "tag5"]
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_1():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_2():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_3():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key and value that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_4():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key and value that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key="key2", value="value3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_5():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"\w+\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_6():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.FRONTMATTER, key=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_7():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key and value that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert (
|
|
||||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"\w\d", is_regex=True) is True
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_8():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked for a key and value that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert (
|
|
||||||
vm.contains(area=MetadataType.FRONTMATTER, key="key2", value=r"^\d", is_regex=True) is False
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_9():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is checked with a key is None
|
|
||||||
THEN raise a ValueError
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
with pytest.raises(ValueError, match="Key must be provided"):
|
|
||||||
vm.contains(area=MetadataType.FRONTMATTER, value="value1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_10():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_11():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_12():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key and value that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_13():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key and value that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value="value3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_14():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key=r"\w+\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_15():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_16():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key and value that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"\w\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_17():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked for a key and value that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.INLINE, key="key2", value=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_18():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN INLINE is checked with a key is None
|
|
||||||
THEN raise a ValueError
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.inline_metadata = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
with pytest.raises(ValueError, match="Key must be provided"):
|
|
||||||
vm.contains(area=MetadataType.INLINE, value="value1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_19():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a key but not a value
|
|
||||||
THEN raise a ValueError
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
with pytest.raises(ValueError, match="Value must be provided"):
|
|
||||||
vm.contains(area=MetadataType.TAGS, key="key1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_20():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a value that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
assert vm.contains(area=MetadataType.TAGS, value="tag1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_21():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a value that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
assert vm.contains(area=MetadataType.TAGS, value="value1") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_22():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a key regex but no value
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
with pytest.raises(ValueError, match="Value must be provided"):
|
|
||||||
vm.contains(area=MetadataType.TAGS, key=r"\w", is_regex=True)
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_23():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a value that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
assert vm.contains(area=MetadataType.TAGS, value=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_24():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN TAGS is checked for a value that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
assert vm.contains(area=MetadataType.TAGS, value=r"^tag\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_25():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_26():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_27():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key and value that exists
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value1") is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_28():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key and value that does not exist
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key2", value="value3") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_29():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key=r"\w+\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_30():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_31():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key and value that exists with regex
|
|
||||||
THEN True is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"\w\d", is_regex=True) is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_32():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked for a key and value that does not exist with regex
|
|
||||||
THEN False is returned
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.contains(area=MetadataType.ALL, key="key2", value=r"^\d", is_regex=True) is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_contains_33():
|
|
||||||
"""Test contains() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN ALL is checked with a key is None
|
|
||||||
THEN raise a ValueError
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
with pytest.raises(ValueError, match="Key must be provided"):
|
|
||||||
vm.contains(area=MetadataType.ALL, value="value1")
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_1():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN a key is deleted
|
|
||||||
THEN return True and the key is removed
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.delete(key="key1") is True
|
|
||||||
assert vm.dict == {"key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_2():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN a key is deleted that does not exist
|
|
||||||
THEN return False and the key is not removed
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.delete(key="key3") is False
|
|
||||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_3():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN a key and value are specified
|
|
||||||
THEN return True and remove the value
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.delete(key="key2", value_to_delete="value1") is True
|
|
||||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_4():
|
|
||||||
"""Test delete() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN a key and nonexistent value are specified
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.delete(key="key2", value_to_delete="value11") is False
|
|
||||||
assert vm.dict == {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_1():
|
|
||||||
"""Test VaultMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN the rename() method is called with a key
|
|
||||||
THEN return False if the key is not found
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.rename("no key", "new key") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_2():
|
|
||||||
"""Test VaultMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and non-existing value
|
|
||||||
THEN return False
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.rename("key1", "no value", "new value") is False
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_3():
|
|
||||||
"""Test VaultMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key
|
|
||||||
THEN return True and rename the key
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.rename("key1", "new key") is True
|
|
||||||
assert vm.dict == {"key2": ["value1", "value2"], "new key": ["value1"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_4():
|
|
||||||
"""Test VaultMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and value
|
|
||||||
THEN return True and rename the value
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.rename("key1", "value1", "new value") is True
|
|
||||||
assert vm.dict == {"key1": ["new value"], "key2": ["value1", "value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_5():
|
|
||||||
"""Test VaultMetadata rename() method.
|
|
||||||
|
|
||||||
GIVEN a VaultMetadata object
|
|
||||||
WHEN the rename() method is called with an existing key and value and the new value already exists
|
|
||||||
THEN return True and remove the old value leaving one instance of the new value
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
assert vm.rename("key2", "value1", "value2") is True
|
|
||||||
assert vm.dict == {"key1": ["value1"], "key2": ["value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_metadata_1(capsys):
|
|
||||||
"""Test print_metadata() method.
|
|
||||||
|
|
||||||
GIVEN calling print_metadata() with a VaultMetadata object
|
|
||||||
WHEN ALL is specified
|
|
||||||
THEN print all the metadata
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key2": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
vm.inline_metadata = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
|
|
||||||
vm.print_metadata(area=MetadataType.ALL)
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert "All metadata" in captured
|
|
||||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
|
||||||
assert captured == Regex("│ key1 +│ value1 +│")
|
|
||||||
assert captured == Regex("│ key2 +│ value1 +│")
|
|
||||||
assert captured == Regex("│ key4 +│ value1 +│")
|
|
||||||
assert "All inline tags" in captured
|
|
||||||
assert captured == Regex("#tag1 +#tag2")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_metadata_2(capsys):
|
|
||||||
"""Test print_metadata() method.
|
|
||||||
|
|
||||||
GIVEN calling print_metadata() with a VaultMetadata object
|
|
||||||
WHEN FRONTMATTER is specified
|
|
||||||
THEN print all the metadata
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key2": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
vm.inline_metadata = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
|
|
||||||
vm.print_metadata(area=MetadataType.FRONTMATTER)
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert "All frontmatter" in captured
|
|
||||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
|
||||||
assert captured == Regex("│ key1 +│ value1 +│")
|
|
||||||
assert captured == Regex("│ key2 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key4 +│ value1 +│")
|
|
||||||
assert "All inline tags" not in captured
|
|
||||||
assert captured != Regex("#tag1 +#tag2")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_metadata_3(capsys):
|
|
||||||
"""Test print_metadata() method.
|
|
||||||
|
|
||||||
GIVEN calling print_metadata() with a VaultMetadata object
|
|
||||||
WHEN INLINE is specified
|
|
||||||
THEN print all the metadata
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key2": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
vm.inline_metadata = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
|
|
||||||
vm.print_metadata(area=MetadataType.INLINE)
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert "All inline" in captured
|
|
||||||
assert captured == Regex("┃ Keys +┃ Values +┃")
|
|
||||||
assert captured == Regex("│ key1 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key2 +│ value1 +│")
|
|
||||||
assert captured == Regex("│ key4 +│ value1 +│")
|
|
||||||
assert "All inline tags" not in captured
|
|
||||||
assert captured != Regex("#tag1 +#tag2")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_metadata_4(capsys):
|
|
||||||
"""Test print_metadata() method.
|
|
||||||
|
|
||||||
GIVEN calling print_metadata() with a VaultMetadata object
|
|
||||||
WHEN TAGS is specified
|
|
||||||
THEN print all the tags
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key2": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
vm.inline_metadata = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
|
|
||||||
vm.print_metadata(area=MetadataType.TAGS)
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert "All inline tags" in captured
|
|
||||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
|
||||||
assert captured != Regex("│ key1 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key2 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key4 +│ value1 +│")
|
|
||||||
assert captured == Regex("#tag1 +#tag2 +#tag3")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_metadata_5(capsys):
|
|
||||||
"""Test print_metadata() method.
|
|
||||||
|
|
||||||
GIVEN calling print_metadata() with a VaultMetadata object
|
|
||||||
WHEN KEYS is specified
|
|
||||||
THEN print all the tags
|
|
||||||
"""
|
|
||||||
vm = VaultMetadata()
|
|
||||||
vm.dict = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key2": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.frontmatter = {"key1": ["value1"], "key2": ["value1", "value2"]}
|
|
||||||
vm.inline_metadata = {
|
|
||||||
"key1": ["value1", "value2"],
|
|
||||||
"key3": ["value1"],
|
|
||||||
"key4": ["value1", "value2"],
|
|
||||||
}
|
|
||||||
vm.tags = ["tag1", "tag2", "tag3"]
|
|
||||||
|
|
||||||
vm.print_metadata(area=MetadataType.KEYS)
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert "All Keys" in captured
|
|
||||||
assert captured != Regex("┃ Keys +┃ Values +┃")
|
|
||||||
assert captured != Regex("│ key1 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key2 +│ value1 +│")
|
|
||||||
assert captured != Regex("│ key4 +│ value1 +│")
|
|
||||||
assert captured != Regex("#tag1 +#tag2 +#tag3")
|
|
||||||
assert captured == Regex("key1 +key2 +key3 +key4")
|
|
||||||
230
tests/notes/note_init_test.py
Normal file
230
tests/notes/note_init_test.py
Normal file
@@ -0,0 +1,230 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test notes.py."""
|
||||||
|
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from obsidian_metadata.models.enums import MetadataType
|
||||||
|
from obsidian_metadata.models.exceptions import FrontmatterError
|
||||||
|
from obsidian_metadata.models.metadata import InlineField
|
||||||
|
from obsidian_metadata.models.notes import Note
|
||||||
|
|
||||||
|
|
||||||
|
def test_note_not_exists() -> None:
|
||||||
|
"""Test target not found.
|
||||||
|
|
||||||
|
GIVEN a path to a non-existent file
|
||||||
|
WHEN a Note object is created pointing to that file
|
||||||
|
THEN a typer.Exit exception is raised
|
||||||
|
"""
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
Note(note_path="nonexistent_file.md")
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_note_1(sample_note):
|
||||||
|
"""Test creating a note object.
|
||||||
|
|
||||||
|
GIVEN a path to a markdown file
|
||||||
|
WHEN a Note object is created pointing to that file
|
||||||
|
THEN the Note object is created
|
||||||
|
"""
|
||||||
|
note = Note(note_path=sample_note, dry_run=True)
|
||||||
|
assert note.note_path == Path(sample_note)
|
||||||
|
assert note.dry_run is True
|
||||||
|
assert note.encoding == "utf_8"
|
||||||
|
assert len(note.metadata) == 20
|
||||||
|
|
||||||
|
with sample_note.open():
|
||||||
|
content = sample_note.read_text()
|
||||||
|
|
||||||
|
assert note.file_content == content
|
||||||
|
assert note.original_file_content == content
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_note_2(tmp_path) -> None:
|
||||||
|
"""Test creating a note object.
|
||||||
|
|
||||||
|
GIVEN a text file with invalid frontmatter
|
||||||
|
WHEN the note is initialized
|
||||||
|
THEN a typer exit is raised
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "broken_frontmatter.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""---
|
||||||
|
tags:
|
||||||
|
invalid = = "content"
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
Note(note_path=note_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_note_3(tmp_path) -> None:
|
||||||
|
"""Test creating a note object.
|
||||||
|
|
||||||
|
GIVEN a text file with invalid frontmatter
|
||||||
|
WHEN the note is initialized
|
||||||
|
THEN a typer exit is raised
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "broken_frontmatter.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""---
|
||||||
|
nested1:
|
||||||
|
nested2: "content"
|
||||||
|
nested3:
|
||||||
|
- "content"
|
||||||
|
- "content"
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
Note(note_path=note_path)
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_note_6(tmp_path):
|
||||||
|
"""Test creating a note object.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN there is no content in the file
|
||||||
|
THEN a note is returned with no metadata or content
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "empty_file.md"
|
||||||
|
note_path.touch()
|
||||||
|
note = Note(note_path=note_path)
|
||||||
|
assert note.note_path == note_path
|
||||||
|
assert not note.file_content
|
||||||
|
assert not note.original_file_content
|
||||||
|
assert note.metadata == []
|
||||||
|
|
||||||
|
|
||||||
|
def test__grab_metadata_1(tmp_path):
|
||||||
|
"""Test the _grab_metadata method.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN there is frontmatter
|
||||||
|
THEN the frontmatter is returned in the metadata list
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "test_file.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""
|
||||||
|
---
|
||||||
|
key1: value1
|
||||||
|
key2: 2022-12-22
|
||||||
|
key3:
|
||||||
|
- value3
|
||||||
|
- value4
|
||||||
|
key4:
|
||||||
|
key5: "value5"
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
note = Note(note_path=note_path)
|
||||||
|
assert sorted(note.metadata, key=lambda x: (x.key, x.value)) == [
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key1", value="value1"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key2", value="2022-12-22"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key3", value="value3"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key3", value="value4"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key4", value="None"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key5", value="value5"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test__grab_metadata_2(tmp_path):
|
||||||
|
"""Test the _grab_metadata method.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN there is inline metadata
|
||||||
|
THEN the inline metadata is returned in the metadata list
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "test_file.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""
|
||||||
|
|
||||||
|
key1::value1
|
||||||
|
key2::2022-12-22
|
||||||
|
foo [key3::value3] bar
|
||||||
|
key4::value4
|
||||||
|
foo (key4::value) bar
|
||||||
|
key5::value5
|
||||||
|
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
note = Note(note_path=note_path)
|
||||||
|
assert sorted(note.metadata, key=lambda x: (x.key, x.value)) == [
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key1", value="value1"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key2", value="2022-12-22"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key3", value="value3"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key4", value="value"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key4", value="value4"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key5", value="value5"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test__grab_metadata_3(tmp_path):
|
||||||
|
"""Test the _grab_metadata method.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN there are tags
|
||||||
|
THEN the tags are returned in the metadata list
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "test_file.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text("#tag1\n#tag2")
|
||||||
|
note = Note(note_path=note_path)
|
||||||
|
assert sorted(note.metadata, key=lambda x: x.value) == [
|
||||||
|
InlineField(meta_type=MetadataType.TAGS, key=None, value="tag1"),
|
||||||
|
InlineField(meta_type=MetadataType.TAGS, key=None, value="tag2"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test__grab_metadata_4(tmp_path):
|
||||||
|
"""Test the _grab_metadata method.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN there are tags, frontmatter, and inline metadata
|
||||||
|
THEN all metadata is returned
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "test_file.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""\
|
||||||
|
---
|
||||||
|
key1: value1
|
||||||
|
---
|
||||||
|
key2::value2
|
||||||
|
#tag1\n#tag2"""
|
||||||
|
)
|
||||||
|
note = Note(note_path=note_path)
|
||||||
|
assert sorted(note.metadata, key=lambda x: x.value) == [
|
||||||
|
InlineField(meta_type=MetadataType.TAGS, key=None, value="tag1"),
|
||||||
|
InlineField(meta_type=MetadataType.TAGS, key=None, value="tag2"),
|
||||||
|
InlineField(meta_type=MetadataType.FRONTMATTER, key="key1", value="value1"),
|
||||||
|
InlineField(meta_type=MetadataType.INLINE, key="key2", value="value2"),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test__grab_metadata_5(tmp_path):
|
||||||
|
"""Test the _grab_metadata method.
|
||||||
|
|
||||||
|
GIVEN a text file
|
||||||
|
WHEN invalid metadata is present
|
||||||
|
THEN raise a FrontmatterError
|
||||||
|
"""
|
||||||
|
note_path = Path(tmp_path) / "broken_frontmatter.md"
|
||||||
|
note_path.touch()
|
||||||
|
note_path.write_text(
|
||||||
|
"""---
|
||||||
|
tags:
|
||||||
|
invalid = = "content"
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
Note(note_path=note_path)
|
||||||
1095
tests/notes/note_methods_test.py
Normal file
1095
tests/notes/note_methods_test.py
Normal file
File diff suppressed because it is too large
Load Diff
1233
tests/notes_test.py
1233
tests/notes_test.py
File diff suppressed because it is too large
Load Diff
364
tests/parsers_test.py
Normal file
364
tests/parsers_test.py
Normal file
@@ -0,0 +1,364 @@
|
|||||||
|
# type: ignore
|
||||||
|
"""Test the parsers module."""
|
||||||
|
|
||||||
|
import re
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from obsidian_metadata.models.enums import Wrapping
|
||||||
|
from obsidian_metadata.models.parsers import Parser
|
||||||
|
|
||||||
|
P = Parser()
|
||||||
|
|
||||||
|
|
||||||
|
def test_identify_internal_link_1():
|
||||||
|
"""Test the internal_link attribute.
|
||||||
|
|
||||||
|
GIVEN a string with an external link
|
||||||
|
WHEN the internal_link attribute is called within a regex
|
||||||
|
THEN the external link is not found
|
||||||
|
"""
|
||||||
|
assert re.findall(P.internal_link, "[link](https://example.com/somepage.html)") == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_identify_internal_link_2():
|
||||||
|
"""Test the internal_link attribute.
|
||||||
|
|
||||||
|
GIVEN a string with out any links
|
||||||
|
WHEN the internal_link attribute is called within a regex
|
||||||
|
THEN no links are found
|
||||||
|
"""
|
||||||
|
assert re.findall(P.internal_link, "foo bar baz") == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_identify_internal_link_3():
|
||||||
|
"""Test the internal_link attribute.
|
||||||
|
|
||||||
|
GIVEN a string with an internal link
|
||||||
|
WHEN the internal_link attribute is called within a regex
|
||||||
|
THEN the internal link is found
|
||||||
|
"""
|
||||||
|
assert re.findall(P.internal_link, "[[internal_link]]") == ["[[internal_link]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[internal_link|text]]") == ["[[internal_link|text]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[test/Main.md]]") == ["[[test/Main.md]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[%Man &Machine + Mind%]]") == ["[[%Man &Machine + Mind%]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[Hello \\| There]]") == ["[[Hello \\| There]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[\\||Yes]]") == ["[[\\||Yes]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[test/Main|Yes]]") == ["[[test/Main|Yes]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[2020#^14df]]") == ["[[2020#^14df]]"]
|
||||||
|
assert re.findall(P.internal_link, "!foo[[bar]]baz") == ["[[bar]]"]
|
||||||
|
assert re.findall(P.internal_link, "[[]]") == ["[[]]"]
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_frontmatter_1():
|
||||||
|
"""Test the return_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string with frontmatter
|
||||||
|
WHEN the return_frontmatter method is called
|
||||||
|
THEN the frontmatter is returned
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
# Hello World
|
||||||
|
"""
|
||||||
|
assert P.return_frontmatter(content) == "---\nkey: value\n---"
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_frontmatter_2():
|
||||||
|
"""Test the return_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string without frontmatter
|
||||||
|
WHEN the return_frontmatter method is called
|
||||||
|
THEN None is returned
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
# Hello World
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
assert P.return_frontmatter(content) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_frontmatter_3():
|
||||||
|
"""Test the return_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string with frontmatter
|
||||||
|
WHEN the return_frontmatter method is called with data_only=True
|
||||||
|
THEN the frontmatter is returned
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
key2: value2
|
||||||
|
---
|
||||||
|
# Hello World
|
||||||
|
"""
|
||||||
|
assert P.return_frontmatter(content, data_only=True) == "key: value\nkey2: value2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_frontmatter_4():
|
||||||
|
"""Test the return_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string without frontmatter
|
||||||
|
WHEN the return_frontmatter method is called with data_only=True
|
||||||
|
THEN None is returned
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
# Hello World
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
assert P.return_frontmatter(content, data_only=True) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_inline_metadata_1():
|
||||||
|
"""Test the return_inline_metadata method.
|
||||||
|
|
||||||
|
GIVEN a string with no inline metadata
|
||||||
|
WHEN the return_inline_metadata method is called
|
||||||
|
THEN return None
|
||||||
|
"""
|
||||||
|
assert P.return_inline_metadata("foo bar baz") is None
|
||||||
|
assert P.return_inline_metadata("foo:bar baz") is None
|
||||||
|
assert P.return_inline_metadata("foo:::bar baz") is None
|
||||||
|
assert P.return_inline_metadata("[foo:::bar] baz") is None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("string", "returned"),
|
||||||
|
[
|
||||||
|
("[k1:: v1]", [("k1", " v1", Wrapping.BRACKETS)]),
|
||||||
|
("(k/1:: v/1)", [("k/1", " v/1", Wrapping.PARENS)]),
|
||||||
|
(
|
||||||
|
"[k1::v1] and (k2:: v2)",
|
||||||
|
[("k1", "v1", Wrapping.BRACKETS), ("k2", " v2", Wrapping.PARENS)],
|
||||||
|
),
|
||||||
|
("(début::début)", [("début", "début", Wrapping.PARENS)]),
|
||||||
|
("[😉::🚀]", [("😉", "🚀", Wrapping.BRACKETS)]),
|
||||||
|
(
|
||||||
|
"(🛸rocket🚀ship:: a 🎅 [console] game)",
|
||||||
|
[("🛸rocket🚀ship", " a 🎅 [console] game", Wrapping.PARENS)],
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_return_inline_metadata_2(string, returned):
|
||||||
|
"""Test the return_inline_metadata method.
|
||||||
|
|
||||||
|
GIVEN a string with inline metadata within a wrapping
|
||||||
|
WHEN the return_inline_metadata method is called
|
||||||
|
THEN return the wrapped inline metadata
|
||||||
|
"""
|
||||||
|
assert P.return_inline_metadata(string) == returned
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("string", "returned"),
|
||||||
|
[
|
||||||
|
("k1::v1", [("k1", "v1", Wrapping.NONE)]),
|
||||||
|
("😉::🚀", [("😉", "🚀", Wrapping.NONE)]),
|
||||||
|
("k1:: w/ !@#$| ", [("k1", " w/ !@#$| ", Wrapping.NONE)]),
|
||||||
|
("クリスマス:: 家庭用ゲーム機", [("クリスマス", " 家庭用ゲ\u30fcム機", Wrapping.NONE)]),
|
||||||
|
("Noël:: Un jeu de console", [("Noël", " Un jeu de console", Wrapping.NONE)]),
|
||||||
|
("🎅:: a console game", [("🎅", " a console game", Wrapping.NONE)]),
|
||||||
|
("🛸rocket🚀ship:: a 🎅 console game", [("🛸rocket🚀ship", " a 🎅 console game", Wrapping.NONE)]),
|
||||||
|
(">flag::irish flag 🇮🇪", [("flag", "irish flag 🇮🇪", Wrapping.NONE)]),
|
||||||
|
("foo::[bar] baz", [("foo", "[bar] baz", Wrapping.NONE)]),
|
||||||
|
("foo::bar) baz", [("foo", "bar) baz", Wrapping.NONE)]),
|
||||||
|
("[foo::bar baz", [("foo", "bar baz", Wrapping.NONE)]),
|
||||||
|
("_foo_::bar baz", [("_foo_", "bar baz", Wrapping.NONE)]),
|
||||||
|
("**foo**::bar_baz", [("**foo**", "bar_baz", Wrapping.NONE)]),
|
||||||
|
("`foo`::`bar baz`", [("`foo`", "`bar baz`", Wrapping.NONE)]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_return_inline_metadata_3(string, returned):
|
||||||
|
"""Test the return_inline_metadata method.
|
||||||
|
|
||||||
|
GIVEN a string with inline metadata without a wrapping
|
||||||
|
WHEN the return_inline_metadata method is called
|
||||||
|
THEN return the wrapped inline metadata
|
||||||
|
"""
|
||||||
|
assert P.return_inline_metadata(string) == returned
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("string", "returned"),
|
||||||
|
[
|
||||||
|
("#foo", ["#foo"]),
|
||||||
|
("#tag1 #tag2 #tag3", ["#tag1", "#tag2", "#tag3"]),
|
||||||
|
("#foo.bar", ["#foo"]),
|
||||||
|
("#foo-bar_baz#", ["#foo-bar_baz"]),
|
||||||
|
("#daily/2021/20/08", ["#daily/2021/20/08"]),
|
||||||
|
("#🌱/🌿", ["#🌱/🌿"]),
|
||||||
|
("#début", ["#début"]),
|
||||||
|
("#/some/🚀/tag", ["#/some/🚀/tag"]),
|
||||||
|
(r"\\#foo", ["#foo"]),
|
||||||
|
("#f#oo", ["#f", "#oo"]),
|
||||||
|
("#foo#bar#baz", ["#foo", "#bar", "#baz"]),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_return_tags_1(string, returned):
|
||||||
|
"""Test the return_tags method.
|
||||||
|
|
||||||
|
GIVEN a string with tags
|
||||||
|
WHEN the return_tags method is called
|
||||||
|
THEN the valid tags are returned
|
||||||
|
"""
|
||||||
|
assert P.return_tags(string) == returned
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("string"),
|
||||||
|
[
|
||||||
|
("##foo# ##bar # baz ##"),
|
||||||
|
("##foo"),
|
||||||
|
("foo##bar"),
|
||||||
|
("#1123"),
|
||||||
|
("foo bar"),
|
||||||
|
("aa#foo"),
|
||||||
|
("$#foo"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_return_tags_2(string):
|
||||||
|
"""Test the return_tags method.
|
||||||
|
|
||||||
|
GIVEN a string without valid tags
|
||||||
|
WHEN the return_tags method is called
|
||||||
|
THEN None is returned
|
||||||
|
"""
|
||||||
|
assert P.return_tags(string) == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_top_with_header_1():
|
||||||
|
"""Test the return_top_with_header method.
|
||||||
|
|
||||||
|
GIVEN a string with frontmatter above a first markdown header
|
||||||
|
WHEN return_top_with_header is called
|
||||||
|
THEN return the content up to the end of the first header
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
# Hello World
|
||||||
|
|
||||||
|
foo bar baz
|
||||||
|
"""
|
||||||
|
assert P.return_top_with_header(content) == "---\nkey: value\n---\n# Hello World\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_top_with_header_2():
|
||||||
|
"""Test the return_top_with_header method.
|
||||||
|
|
||||||
|
GIVEN a string with content above a first markdown header on the first line
|
||||||
|
WHEN return_top_with_header is called
|
||||||
|
THEN return the content up to the end of the first header
|
||||||
|
"""
|
||||||
|
content = "\n\n### Hello World\nfoo bar\nfoo bar"
|
||||||
|
assert P.return_top_with_header(content) == "### Hello World\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_top_with_header_3():
|
||||||
|
"""Test the return_top_with_header method.
|
||||||
|
|
||||||
|
GIVEN a string with no markdown headers
|
||||||
|
WHEN return_top_with_header is called
|
||||||
|
THEN return None
|
||||||
|
"""
|
||||||
|
content = "Hello World\nfoo bar\nfoo bar"
|
||||||
|
assert not P.return_top_with_header(content)
|
||||||
|
|
||||||
|
|
||||||
|
def test_return_top_with_header_4():
|
||||||
|
"""Test the return_top_with_header method.
|
||||||
|
|
||||||
|
GIVEN a string with no markdown headers
|
||||||
|
WHEN return_top_with_header is called
|
||||||
|
THEN return None
|
||||||
|
"""
|
||||||
|
content = "qux bar baz\nbaz\nfoo\n### bar\n# baz foo bar"
|
||||||
|
assert P.return_top_with_header(content) == "qux bar baz\nbaz\nfoo\n### bar\n"
|
||||||
|
|
||||||
|
|
||||||
|
def test_strip_frontmatter_1():
|
||||||
|
"""Test the strip_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string with frontmatter
|
||||||
|
WHEN the strip_frontmatter method is called
|
||||||
|
THEN the frontmatter is removed
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
# Hello World
|
||||||
|
"""
|
||||||
|
assert P.strip_frontmatter(content).strip() == "# Hello World"
|
||||||
|
|
||||||
|
|
||||||
|
def test_strip_frontmatter_2():
|
||||||
|
"""Test the strip_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string without frontmatter
|
||||||
|
WHEN the strip_frontmatter method is called
|
||||||
|
THEN nothing is removed
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
# Hello World
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
assert P.strip_frontmatter(content) == content
|
||||||
|
|
||||||
|
|
||||||
|
def test_strip_frontmatter_3():
|
||||||
|
"""Test the strip_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string with frontmatter
|
||||||
|
WHEN the strip_frontmatter method is called with data_only=True
|
||||||
|
THEN the frontmatter is removed
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
# Hello World
|
||||||
|
"""
|
||||||
|
assert P.strip_frontmatter(content, data_only=True).strip() == "---\n---\n# Hello World"
|
||||||
|
|
||||||
|
|
||||||
|
def test_strip_frontmatter_4():
|
||||||
|
"""Test the strip_frontmatter method.
|
||||||
|
|
||||||
|
GIVEN a string without frontmatter
|
||||||
|
WHEN the strip_frontmatter method is called with data_only=True
|
||||||
|
THEN nothing is removed
|
||||||
|
"""
|
||||||
|
content = """
|
||||||
|
# Hello World
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
assert P.strip_frontmatter(content, data_only=True) == content
|
||||||
|
|
||||||
|
|
||||||
|
def test_strip_inline_code_1():
|
||||||
|
"""Test the strip_inline_code method.
|
||||||
|
|
||||||
|
GIVEN a string with inline code
|
||||||
|
WHEN the strip_inline_code method is called
|
||||||
|
THEN the inline code is removed
|
||||||
|
"""
|
||||||
|
assert P.strip_inline_code("Foo `bar` baz `Qux` ```bar\n```") == "Foo baz ```bar\n```"
|
||||||
|
assert P.strip_inline_code("Foo `bar` baz `Qux` ```bar\n```") == "Foo baz ```bar\n```"
|
||||||
|
|
||||||
|
|
||||||
|
def test_validators():
|
||||||
|
"""Test validators."""
|
||||||
|
assert P.validate_tag_text.search("test_tag") is None
|
||||||
|
assert P.validate_tag_text.search("#asdf").group(0) == "#"
|
||||||
@@ -1,225 +0,0 @@
|
|||||||
# type: ignore
|
|
||||||
"""Tests for the regex module."""
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
from obsidian_metadata.models.patterns import Patterns
|
|
||||||
|
|
||||||
TAG_CONTENT: str = "#1 #2 **#3** [[#4]] [[#5|test]] #6#notag #7_8 #9/10 #11-12 #13; #14, #15. #16: #17* #18(#19) #20[#21] #22\\ #23& #24# #25 **#26** #📅/tag [link](#no_tag) https://example.com/somepage.html_#no_url_tags"
|
|
||||||
|
|
||||||
FRONTMATTER_CONTENT: str = """
|
|
||||||
---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: 'shared_key1_value'
|
|
||||||
---
|
|
||||||
more content
|
|
||||||
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
CORRECT_FRONTMATTER_WITH_SEPARATORS: str = """---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: 'shared_key1_value'
|
|
||||||
---"""
|
|
||||||
CORRECT_FRONTMATTER_NO_SEPARATORS: str = """
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: 'shared_key1_value'
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
def test_top_with_header():
|
|
||||||
"""Test identifying the top of a note."""
|
|
||||||
pattern = Patterns()
|
|
||||||
|
|
||||||
no_fm_or_header = """
|
|
||||||
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet.
|
|
||||||
|
|
||||||
# header 1
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
Lorem ipsum dolor sit amet.
|
|
||||||
"""
|
|
||||||
fm_and_header: str = """
|
|
||||||
---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: 'shared_key1_value'
|
|
||||||
---
|
|
||||||
|
|
||||||
# Header 1
|
|
||||||
more content
|
|
||||||
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
fm_and_header_result = """---
|
|
||||||
tags:
|
|
||||||
- tag_1
|
|
||||||
- tag_2
|
|
||||||
-
|
|
||||||
- 📅/tag_3
|
|
||||||
frontmatter_Key1: "frontmatter_Key1_value"
|
|
||||||
frontmatter_Key2: ["note", "article"]
|
|
||||||
shared_key1: 'shared_key1_value'
|
|
||||||
---
|
|
||||||
|
|
||||||
# Header 1"""
|
|
||||||
no_fm = """
|
|
||||||
|
|
||||||
### Header's number 3 [📅] "+$2.00" 🤷
|
|
||||||
---
|
|
||||||
horizontal: rule
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
no_fm_result = '### Header\'s number 3 [📅] "+$2.00" 🤷'
|
|
||||||
|
|
||||||
assert not pattern.top_with_header.search(no_fm_or_header).group("top")
|
|
||||||
assert pattern.top_with_header.search(fm_and_header).group("top") == fm_and_header_result
|
|
||||||
assert pattern.top_with_header.search(no_fm).group("top") == no_fm_result
|
|
||||||
|
|
||||||
|
|
||||||
def test_find_inline_tags():
|
|
||||||
"""Test find_inline_tags regex."""
|
|
||||||
pattern = Patterns()
|
|
||||||
assert pattern.find_inline_tags.findall(TAG_CONTENT) == [
|
|
||||||
"1",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
"7_8",
|
|
||||||
"9/10",
|
|
||||||
"11-12",
|
|
||||||
"13",
|
|
||||||
"14",
|
|
||||||
"15",
|
|
||||||
"16",
|
|
||||||
"17",
|
|
||||||
"18",
|
|
||||||
"19",
|
|
||||||
"20",
|
|
||||||
"21",
|
|
||||||
"22",
|
|
||||||
"23",
|
|
||||||
"24",
|
|
||||||
"25",
|
|
||||||
"26",
|
|
||||||
"📅/tag",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_find_inline_metadata():
|
|
||||||
"""Test find_inline_metadata regex."""
|
|
||||||
pattern = Patterns()
|
|
||||||
content = """
|
|
||||||
**1:: 1**
|
|
||||||
2_2:: [[2_2]] | 2
|
|
||||||
asdfasdf [3:: 3] asdfasdf [7::7] asdf
|
|
||||||
[4:: 4] [5:: 5]
|
|
||||||
> 6:: 6
|
|
||||||
**8**:: **8**
|
|
||||||
10::
|
|
||||||
📅11:: 11/📅/11
|
|
||||||
emoji_📅_key::emoji_📅_key_value
|
|
||||||
key1:: value1
|
|
||||||
key1:: value2
|
|
||||||
key1:: value3
|
|
||||||
indented_key:: value1
|
|
||||||
Paragraph of text with an [inline_key:: value1] and [inline_key:: value2] and [inline_key:: value3] which should do it.
|
|
||||||
> blockquote_key:: value1
|
|
||||||
> blockquote_key:: value2
|
|
||||||
|
|
||||||
- list_key:: value1
|
|
||||||
- list_key:: [[value2]]
|
|
||||||
|
|
||||||
1. list_key:: value1
|
|
||||||
2. list_key:: value2
|
|
||||||
|
|
||||||
| table_key:: value1 | table_key:: value2 |
|
|
||||||
---
|
|
||||||
frontmatter_key1: frontmatter_key1_value
|
|
||||||
---
|
|
||||||
not_a_key: not_a_value
|
|
||||||
paragraph metadata:: key in text
|
|
||||||
"""
|
|
||||||
|
|
||||||
result = pattern.find_inline_metadata.findall(content)
|
|
||||||
assert result == [
|
|
||||||
("", "", "1", "1**"),
|
|
||||||
("", "", "2_2", "[[2_2]] | 2"),
|
|
||||||
("3", "3", "", ""),
|
|
||||||
("7", "7", "", ""),
|
|
||||||
("", "", "4", "4] [5:: 5]"),
|
|
||||||
("", "", "6", "6"),
|
|
||||||
("", "", "8**", "**8**"),
|
|
||||||
("", "", "11", "11/📅/11"),
|
|
||||||
("", "", "emoji_📅_key", "emoji_📅_key_value"),
|
|
||||||
("", "", "key1", "value1"),
|
|
||||||
("", "", "key1", "value2"),
|
|
||||||
("", "", "key1", "value3"),
|
|
||||||
("", "", "indented_key", "value1"),
|
|
||||||
("inline_key", "value1", "", ""),
|
|
||||||
("inline_key", "value2", "", ""),
|
|
||||||
("inline_key", "value3", "", ""),
|
|
||||||
("", "", "blockquote_key", "value1"),
|
|
||||||
("", "", "blockquote_key", "value2"),
|
|
||||||
("", "", "list_key", "value1"),
|
|
||||||
("", "", "list_key", "[[value2]]"),
|
|
||||||
("", "", "list_key", "value1"),
|
|
||||||
("", "", "list_key", "value2"),
|
|
||||||
("", "", "table_key", "value1 | table_key:: value2 |"),
|
|
||||||
("", "", "metadata", "key in text"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_find_frontmatter():
|
|
||||||
"""Test regexes."""
|
|
||||||
pattern = Patterns()
|
|
||||||
found = pattern.frontmatter_block.search(FRONTMATTER_CONTENT).group("frontmatter")
|
|
||||||
assert found == CORRECT_FRONTMATTER_WITH_SEPARATORS
|
|
||||||
|
|
||||||
found = pattern.frontmatt_block_strip_separators.search(FRONTMATTER_CONTENT).group(
|
|
||||||
"frontmatter"
|
|
||||||
)
|
|
||||||
assert found == CORRECT_FRONTMATTER_NO_SEPARATORS
|
|
||||||
|
|
||||||
with pytest.raises(AttributeError):
|
|
||||||
pattern.frontmatt_block_strip_separators.search(TAG_CONTENT).group("frontmatter")
|
|
||||||
|
|
||||||
|
|
||||||
def test_validators():
|
|
||||||
"""Test validators."""
|
|
||||||
pattern = Patterns()
|
|
||||||
|
|
||||||
assert pattern.validate_tag_text.search("test_tag") is None
|
|
||||||
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
|
||||||
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
|
||||||
@@ -34,7 +34,7 @@ def test_validate_key_exists() -> None:
|
|||||||
questions = Questions(vault=VAULT)
|
questions = Questions(vault=VAULT)
|
||||||
assert "'test' does not exist" in questions._validate_key_exists("test")
|
assert "'test' does not exist" in questions._validate_key_exists("test")
|
||||||
assert "Key cannot be empty" in questions._validate_key_exists("")
|
assert "Key cannot be empty" in questions._validate_key_exists("")
|
||||||
assert questions._validate_key_exists("frontmatter_Key1") is True
|
assert questions._validate_key_exists("frontmatter1") is True
|
||||||
|
|
||||||
|
|
||||||
def test_validate_new_key() -> None:
|
def test_validate_new_key() -> None:
|
||||||
@@ -82,7 +82,7 @@ def test_validate_key_exists_regex() -> None:
|
|||||||
assert "'test' does not exist" in questions._validate_key_exists_regex("test")
|
assert "'test' does not exist" in questions._validate_key_exists_regex("test")
|
||||||
assert "Key cannot be empty" in questions._validate_key_exists_regex("")
|
assert "Key cannot be empty" in questions._validate_key_exists_regex("")
|
||||||
assert "Invalid regex" in questions._validate_key_exists_regex("[")
|
assert "Invalid regex" in questions._validate_key_exists_regex("[")
|
||||||
assert questions._validate_key_exists_regex(r"\w+_Key\d") is True
|
assert questions._validate_key_exists_regex(r"f\w+\d") is True
|
||||||
|
|
||||||
|
|
||||||
def test_validate_value() -> None:
|
def test_validate_value() -> None:
|
||||||
@@ -90,29 +90,26 @@ def test_validate_value() -> None:
|
|||||||
questions = Questions(vault=VAULT)
|
questions = Questions(vault=VAULT)
|
||||||
|
|
||||||
assert questions._validate_value("test") is True
|
assert questions._validate_value("test") is True
|
||||||
questions2 = Questions(vault=VAULT, key="frontmatter_Key1")
|
questions2 = Questions(vault=VAULT, key="frontmatter1")
|
||||||
assert questions2._validate_value("test") == "frontmatter_Key1:test does not exist"
|
assert questions2._validate_value("test") == "frontmatter1:test does not exist"
|
||||||
assert questions2._validate_value("author name") is True
|
assert questions2._validate_value("foo") is True
|
||||||
|
|
||||||
|
|
||||||
def test_validate_value_exists_regex() -> None:
|
def test_validate_value_exists_regex() -> None:
|
||||||
"""Test value exists regex validation."""
|
"""Test value exists regex validation."""
|
||||||
questions2 = Questions(vault=VAULT, key="frontmatter_Key1")
|
questions2 = Questions(vault=VAULT, key="frontmatter1")
|
||||||
assert "Invalid regex" in questions2._validate_value_exists_regex("[")
|
assert "Invalid regex" in questions2._validate_value_exists_regex("[")
|
||||||
assert "Regex cannot be empty" in questions2._validate_value_exists_regex("")
|
assert "Regex cannot be empty" in questions2._validate_value_exists_regex("")
|
||||||
assert (
|
assert (
|
||||||
questions2._validate_value_exists_regex(r"\d\d\d\w\d")
|
questions2._validate_value_exists_regex(r"\d\d\d\w\d")
|
||||||
== r"No values in frontmatter_Key1 match regex: \d\d\d\w\d"
|
== r"No values in frontmatter1 match regex: \d\d\d\w\d"
|
||||||
)
|
)
|
||||||
assert questions2._validate_value_exists_regex(r"^author \w+") is True
|
assert questions2._validate_value_exists_regex(r"^f\w{2}$") is True
|
||||||
|
|
||||||
|
|
||||||
def test_validate_new_value() -> None:
|
def test_validate_new_value() -> None:
|
||||||
"""Test new value validation."""
|
"""Test new value validation."""
|
||||||
questions = Questions(vault=VAULT, key="frontmatter_Key1")
|
questions = Questions(vault=VAULT, key="frontmatter1")
|
||||||
assert questions._validate_new_value("not_exists") is True
|
assert questions._validate_new_value("not_exists") is True
|
||||||
assert "Value cannot be empty" in questions._validate_new_value("")
|
assert "Value cannot be empty" in questions._validate_new_value("")
|
||||||
assert (
|
assert questions._validate_new_value("foo") == "frontmatter1:foo already exists"
|
||||||
questions._validate_new_value("author name")
|
|
||||||
== "frontmatter_Key1:author name already exists"
|
|
||||||
)
|
|
||||||
|
|||||||
@@ -6,13 +6,9 @@ import typer
|
|||||||
|
|
||||||
from obsidian_metadata._utils import (
|
from obsidian_metadata._utils import (
|
||||||
clean_dictionary,
|
clean_dictionary,
|
||||||
delete_from_dict,
|
|
||||||
dict_contains,
|
dict_contains,
|
||||||
dict_keys_to_lower,
|
dict_keys_to_lower,
|
||||||
dict_values_to_lists_strings,
|
|
||||||
inline_metadata_from_string,
|
|
||||||
merge_dictionaries,
|
merge_dictionaries,
|
||||||
remove_markdown_sections,
|
|
||||||
rename_in_dict,
|
rename_in_dict,
|
||||||
validate_csv_bulk_imports,
|
validate_csv_bulk_imports,
|
||||||
)
|
)
|
||||||
@@ -84,163 +80,6 @@ def test_clean_dictionary_6():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_1():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values
|
|
||||||
WHEN the delete_from_dict() function is called with a key that exists
|
|
||||||
THEN the key should be deleted from the dictionary and the original dictionary should not be modified
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key1") == {
|
|
||||||
"key2": ["value2", "value3"],
|
|
||||||
"key3": "value4",
|
|
||||||
}
|
|
||||||
assert test_dict == {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_2():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values
|
|
||||||
WHEN the delete_from_dict() function is called with a key that does not exist
|
|
||||||
THEN the dictionary should not be modified
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key5") == test_dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_3():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values in a list
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
|
||||||
THEN the value should be deleted from the specified key in dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key2", value="value3") == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value2"],
|
|
||||||
"key3": "value4",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_4():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value that exists
|
|
||||||
THEN the value and key should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value4") == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value2", "value3"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_5():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value that does not exist
|
|
||||||
THEN the dictionary should not be modified
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key3", value="value5") == test_dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_6():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key regex that matches
|
|
||||||
THEN the matching keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key[23]", is_regex=True) == {
|
|
||||||
"key1": ["value1"]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_7():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key regex that does not match
|
|
||||||
THEN no keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key=r"key\d\d", is_regex=True) == test_dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_8():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
|
||||||
THEN the matching keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key2", value=r"\w+", is_regex=True) == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": [],
|
|
||||||
"key3": "value4",
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_9():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value regex that does not match
|
|
||||||
THEN no keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert (
|
|
||||||
delete_from_dict(dictionary=test_dict, key=r"key2", value=r"^\d", is_regex=True)
|
|
||||||
== test_dict
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_10():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key and value regex that matches
|
|
||||||
THEN the matching keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(dictionary=test_dict, key="key3", value=r"\w+", is_regex=True) == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value2", "value3"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_from_dict_11():
|
|
||||||
"""Test delete_from_dict() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary with values as strings
|
|
||||||
WHEN the delete_from_dict() function is called with a key regex that matches multiple and values that match
|
|
||||||
THEN the values matching the associated keys should be deleted from the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"], "key3": "value4"}
|
|
||||||
|
|
||||||
assert delete_from_dict(
|
|
||||||
dictionary=test_dict, key=r"key[23]", value=r"\w+[34]$", is_regex=True
|
|
||||||
) == {"key1": ["value1"], "key2": ["value2"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_contains_1():
|
def test_dict_contains_1():
|
||||||
"""Test dict_contains() function.
|
"""Test dict_contains() function.
|
||||||
|
|
||||||
@@ -342,140 +181,6 @@ def test_dict_keys_to_lower() -> None:
|
|||||||
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
assert dict_keys_to_lower(test_dict) == {"key1": "Value1", "key2": "Value2", "key3": "Value3"}
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_1():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the dictionary is empty
|
|
||||||
THEN the function should return an empty dictionary
|
|
||||||
"""
|
|
||||||
assert dict_values_to_lists_strings({}) == {}
|
|
||||||
assert dict_values_to_lists_strings({}, strip_null_values=True) == {}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_2():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the dictionary values are already lists of strings
|
|
||||||
THEN the function should return the dictionary
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": ["value1"], "key2": ["value2", "value3"]}
|
|
||||||
assert dict_values_to_lists_strings(test_dict) == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value2", "value3"],
|
|
||||||
}
|
|
||||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
|
||||||
"key1": ["value1"],
|
|
||||||
"key2": ["value2", "value3"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_3():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the a value is None and strip_null_values is False
|
|
||||||
THEN then convert None to an empty string
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": None, "key2": ["value", None]}
|
|
||||||
assert dict_values_to_lists_strings(test_dict) == {"key1": [""], "key2": ["", "value"]}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_4():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the a value is None and strip_null_values is True
|
|
||||||
THEN remove null values
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": None, "key2": ["value", None]}
|
|
||||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
|
||||||
"key1": [],
|
|
||||||
"key2": ["value"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_5():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the a value is a string "None" and strip_null_values is True or False
|
|
||||||
THEN ensure the value is not removed
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": "None", "key2": [None, "None"]}
|
|
||||||
assert dict_values_to_lists_strings(test_dict) == {"key1": ["None"], "key2": ["", "None"]}
|
|
||||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
|
||||||
"key1": [],
|
|
||||||
"key2": ["None"],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_dict_values_to_lists_strings_6():
|
|
||||||
"""Test the dict_values_to_lists_strings() function.
|
|
||||||
|
|
||||||
GIVEN a dictionary passed to the dict_values_to_lists_strings() function
|
|
||||||
WHEN the a value is another dictionary
|
|
||||||
THEN ensure the values in the inner dictionary are converted to lists of strings
|
|
||||||
"""
|
|
||||||
test_dict = {"key1": {"key2": "value2", "key3": ["value3", None]}}
|
|
||||||
assert dict_values_to_lists_strings(test_dict) == {
|
|
||||||
"key1": {"key2": ["value2"], "key3": ["", "value3"]}
|
|
||||||
}
|
|
||||||
assert dict_values_to_lists_strings(test_dict, strip_null_values=True) == {
|
|
||||||
"key1": {"key2": ["value2"], "key3": ["value3"]}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_inline_metadata_from_string_1():
|
|
||||||
"""Test inline_metadata_from_string() function.
|
|
||||||
|
|
||||||
GIVEN a string
|
|
||||||
WHEN the string is empty
|
|
||||||
THEN the function should return an empty list.
|
|
||||||
"""
|
|
||||||
assert inline_metadata_from_string("") == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_inline_metadata_from_string_2():
|
|
||||||
"""Test inline_metadata_from_string() function.
|
|
||||||
|
|
||||||
GIVEN a string
|
|
||||||
WHEN the string contains nothing matching the inline metadata regex
|
|
||||||
THEN the function should return an empty list.
|
|
||||||
"""
|
|
||||||
assert inline_metadata_from_string("this is content that has no inline metadata") == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_inline_metadata_from_string_3():
|
|
||||||
"""Test inline_metadata_from_string() function.
|
|
||||||
|
|
||||||
GIVEN a string
|
|
||||||
WHEN the string contains inline metadata
|
|
||||||
THEN the function should return the key value pair as a tuple within a list.
|
|
||||||
"""
|
|
||||||
assert inline_metadata_from_string("test::test") == [("test", "test")]
|
|
||||||
|
|
||||||
|
|
||||||
def test_inline_metadata_from_string_4():
|
|
||||||
"""Test inline_metadata_from_string() function.
|
|
||||||
|
|
||||||
GIVEN a string
|
|
||||||
WHEN the string contains multiple matches of inline metadata
|
|
||||||
THEN the function should return the key value pairs as a tuple within a list.
|
|
||||||
"""
|
|
||||||
content = """
|
|
||||||
test::test
|
|
||||||
paragraph [key::value] paragraph
|
|
||||||
> test2::test2
|
|
||||||
"""
|
|
||||||
assert inline_metadata_from_string(content) == [
|
|
||||||
("test", "test"),
|
|
||||||
("key", "value"),
|
|
||||||
("test2", "test2"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_merge_dictionaries_1():
|
def test_merge_dictionaries_1():
|
||||||
"""Test merge_dictionaries() function.
|
"""Test merge_dictionaries() function.
|
||||||
|
|
||||||
@@ -661,199 +366,6 @@ def test_rename_in_dict_5():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections_1():
|
|
||||||
"""Test remove_markdown_sections() function.
|
|
||||||
|
|
||||||
GIVEN a string with markdown sections
|
|
||||||
WHEN the remove_markdown_sections() function is called with the default arguments
|
|
||||||
THEN return the string without removing any markdown sections
|
|
||||||
"""
|
|
||||||
text: str = """
|
|
||||||
---
|
|
||||||
key: value
|
|
||||||
---
|
|
||||||
|
|
||||||
# heading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "Hello world"
|
|
||||||
```
|
|
||||||
|
|
||||||
Lorem ipsum `inline_code` lorem ipsum.
|
|
||||||
```
|
|
||||||
echo "foo bar"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
dd
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
|
|
||||||
assert remove_markdown_sections(text) == text
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections_2():
|
|
||||||
"""Test remove_markdown_sections() function.
|
|
||||||
|
|
||||||
GIVEN a string with markdown sections
|
|
||||||
WHEN the remove_markdown_sections() function is called with strip_codeblocks set to True
|
|
||||||
THEN return the string without the codeblocks
|
|
||||||
"""
|
|
||||||
text: str = """
|
|
||||||
---
|
|
||||||
key: value
|
|
||||||
---
|
|
||||||
|
|
||||||
# heading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "Hello world"
|
|
||||||
```
|
|
||||||
|
|
||||||
Lorem ipsum `inline_code` lorem ipsum.
|
|
||||||
```
|
|
||||||
echo "foo bar"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
dd
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
result = remove_markdown_sections(text, strip_codeblocks=True)
|
|
||||||
assert "inline_code" in result
|
|
||||||
assert "```bash" not in result
|
|
||||||
assert "```" not in result
|
|
||||||
assert "foo" not in result
|
|
||||||
assert "world" not in result
|
|
||||||
assert "key: value" in result
|
|
||||||
assert "heading" in result
|
|
||||||
assert "Lorem ipsum" in result
|
|
||||||
assert "---\n" in result
|
|
||||||
assert "dd" in result
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections_3():
|
|
||||||
"""Test remove_markdown_sections() function.
|
|
||||||
|
|
||||||
GIVEN a string with markdown sections
|
|
||||||
WHEN the remove_markdown_sections() function is called with strip_inlinecode set to True
|
|
||||||
THEN return the string without the inline code
|
|
||||||
"""
|
|
||||||
text: str = """
|
|
||||||
---
|
|
||||||
key: value
|
|
||||||
---
|
|
||||||
|
|
||||||
# heading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "Hello world"
|
|
||||||
```
|
|
||||||
|
|
||||||
Lorem ipsum `inline_code` lorem ipsum.
|
|
||||||
```
|
|
||||||
echo "foo bar"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
dd
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
result = remove_markdown_sections(text, strip_inlinecode=True)
|
|
||||||
assert "`inline_code`" not in result
|
|
||||||
assert "```bash" in result
|
|
||||||
assert "```" in result
|
|
||||||
assert "foo" in result
|
|
||||||
assert "world" in result
|
|
||||||
assert "key: value" in result
|
|
||||||
assert "heading" in result
|
|
||||||
assert "Lorem ipsum" in result
|
|
||||||
assert "---\n" in result
|
|
||||||
assert "dd" in result
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections_4():
|
|
||||||
"""Test remove_markdown_sections() function.
|
|
||||||
|
|
||||||
GIVEN a string with markdown sections
|
|
||||||
WHEN the remove_markdown_sections() function is called with strip_frontmatter set to True
|
|
||||||
THEN return the string without the frontmatter
|
|
||||||
"""
|
|
||||||
text: str = """
|
|
||||||
---
|
|
||||||
key: value
|
|
||||||
---
|
|
||||||
|
|
||||||
# heading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "Hello world"
|
|
||||||
```
|
|
||||||
|
|
||||||
Lorem ipsum `inline_code` lorem ipsum.
|
|
||||||
```
|
|
||||||
echo "foo bar"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
dd
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
result = remove_markdown_sections(text, strip_frontmatter=True)
|
|
||||||
assert "`inline_code`" in result
|
|
||||||
assert "```bash" in result
|
|
||||||
assert "```" in result
|
|
||||||
assert "foo" in result
|
|
||||||
assert "world" in result
|
|
||||||
assert "key: value" not in result
|
|
||||||
assert "heading" in result
|
|
||||||
assert "Lorem ipsum" in result
|
|
||||||
assert "---\n" in result
|
|
||||||
assert "dd" in result
|
|
||||||
|
|
||||||
|
|
||||||
def test_remove_markdown_sections_5():
|
|
||||||
"""Test remove_markdown_sections() function.
|
|
||||||
|
|
||||||
GIVEN a string with markdown sections
|
|
||||||
WHEN the remove_markdown_sections() function is called with all arguments set to True
|
|
||||||
THEN return the string without the frontmatter, inline code, and codeblocks
|
|
||||||
"""
|
|
||||||
text: str = """
|
|
||||||
---
|
|
||||||
key: value
|
|
||||||
---
|
|
||||||
|
|
||||||
# heading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo "Hello world"
|
|
||||||
```
|
|
||||||
|
|
||||||
Lorem ipsum `inline_code` lorem ipsum.
|
|
||||||
```
|
|
||||||
echo "foo bar"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
dd
|
|
||||||
---
|
|
||||||
"""
|
|
||||||
result = remove_markdown_sections(
|
|
||||||
text, strip_frontmatter=True, strip_inlinecode=True, strip_codeblocks=True
|
|
||||||
)
|
|
||||||
assert "`inline_code`" not in result
|
|
||||||
assert "bash" not in result
|
|
||||||
assert "```" not in result
|
|
||||||
assert "foo" not in result
|
|
||||||
assert "world" not in result
|
|
||||||
assert "key: value" not in result
|
|
||||||
assert "heading" in result
|
|
||||||
assert "Lorem ipsum" in result
|
|
||||||
assert "---\n" in result
|
|
||||||
assert "dd" in result
|
|
||||||
|
|
||||||
|
|
||||||
def test_validate_csv_bulk_imports_1(tmp_path):
|
def test_validate_csv_bulk_imports_1(tmp_path):
|
||||||
"""Test the validate_csv_bulk_imports function.
|
"""Test the validate_csv_bulk_imports function.
|
||||||
|
|
||||||
|
|||||||
@@ -1,16 +1,17 @@
|
|||||||
# type: ignore
|
# type: ignore
|
||||||
"""Tests for the Vault module."""
|
"""Tests for the Vault module."""
|
||||||
|
|
||||||
|
import re
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import typer
|
import typer
|
||||||
from rich import print
|
|
||||||
|
|
||||||
from obsidian_metadata._config import Config
|
from obsidian_metadata._config import Config
|
||||||
|
from obsidian_metadata._utils.console import console
|
||||||
from obsidian_metadata.models import Vault, VaultFilter
|
from obsidian_metadata.models import Vault, VaultFilter
|
||||||
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from tests.helpers import Regex
|
from tests.helpers import Regex, strip_ansi
|
||||||
|
|
||||||
|
|
||||||
def test_vault_creation(test_vault, tmp_path):
|
def test_vault_creation(test_vault, tmp_path):
|
||||||
@@ -28,65 +29,33 @@ def test_vault_creation(test_vault, tmp_path):
|
|||||||
assert vault.dry_run is False
|
assert vault.dry_run is False
|
||||||
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
||||||
assert len(vault.all_notes) == 2
|
assert len(vault.all_notes) == 2
|
||||||
|
assert vault.frontmatter == {
|
||||||
assert vault.metadata.dict == {
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter1": ["foo"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter2": ["bar", "baz", "qux"],
|
||||||
"intext_key": ["intext_value"],
|
"tags": ["bar", "foo"],
|
||||||
"key📅": ["📅_key_value"],
|
"🌱": ["🌿"],
|
||||||
"shared_key1": [
|
|
||||||
"shared_key1_value",
|
|
||||||
"shared_key1_value2",
|
|
||||||
"shared_key1_value3",
|
|
||||||
],
|
|
||||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
"top_key1": ["top_key1_value"],
|
|
||||||
"top_key2": ["top_key2_value"],
|
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
|
||||||
}
|
}
|
||||||
|
assert vault.inline_meta == {
|
||||||
assert vault.metadata.tags == [
|
"inline1": ["bar baz", "foo"],
|
||||||
"inline_tag_bottom1",
|
"inline2": ["[[foo]]"],
|
||||||
"inline_tag_bottom2",
|
"inline3": ["value"],
|
||||||
"inline_tag_top1",
|
"inline4": ["foo"],
|
||||||
"inline_tag_top2",
|
"inline5": [],
|
||||||
"intext_tag1",
|
"intext1": ["foo"],
|
||||||
"intext_tag2",
|
"intext2": ["foo"],
|
||||||
"shared_tag",
|
"key with space": ["foo"],
|
||||||
|
"🌱": ["🌿"],
|
||||||
|
}
|
||||||
|
assert vault.tags == ["tag1", "tag2"]
|
||||||
|
assert vault.exclude_paths == [
|
||||||
|
tmp_path / "vault" / ".git",
|
||||||
|
tmp_path / "vault" / ".obsidian",
|
||||||
|
tmp_path / "vault" / "ignore_folder",
|
||||||
]
|
]
|
||||||
assert vault.metadata.inline_metadata == {
|
assert vault.filters == []
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
assert len(vault.all_note_paths) == 2
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
assert len(vault.notes_in_scope) == 2
|
||||||
"intext_key": ["intext_value"],
|
|
||||||
"key📅": ["📅_key_value"],
|
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
|
||||||
"shared_key2": ["shared_key2_value2"],
|
|
||||||
"top_key1": ["top_key1_value"],
|
|
||||||
"top_key2": ["top_key2_value"],
|
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
|
||||||
}
|
|
||||||
assert vault.metadata.frontmatter == {
|
|
||||||
"date_created": ["2022-12-22"],
|
|
||||||
"frontmatter_Key1": ["author name"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
|
||||||
"shared_key2": ["shared_key2_value1"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def set_insert_location(test_vault):
|
def set_insert_location(test_vault):
|
||||||
@@ -104,139 +73,36 @@ def set_insert_location(test_vault):
|
|||||||
assert vault.insert_location == InsertLocation.BOTTOM
|
assert vault.insert_location == InsertLocation.BOTTOM
|
||||||
|
|
||||||
|
|
||||||
def test_add_metadata_1(test_vault) -> None:
|
@pytest.mark.parametrize(
|
||||||
"""Test adding metadata to the vault.
|
("meta_type", "key", "value", "expected"),
|
||||||
|
[
|
||||||
|
(MetadataType.FRONTMATTER, "new_key", "new_value", 2),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", "new_value", 2),
|
||||||
|
(MetadataType.INLINE, "new_key", "new_value", 2),
|
||||||
|
(MetadataType.INLINE, "inline5", "new_value", 2),
|
||||||
|
(MetadataType.INLINE, "inline1", "foo", 1),
|
||||||
|
(MetadataType.TAGS, None, "new_value", 2),
|
||||||
|
(MetadataType.TAGS, None, "tag1", 1),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_add_metadata(test_vault, meta_type, key, value, expected):
|
||||||
|
"""Test add_metadata method.
|
||||||
|
|
||||||
GIVEN a vault object
|
GIVEN a vault object
|
||||||
WHEN a new metadata key is added
|
WHEN metadata is added
|
||||||
THEN the metadata is added to the vault
|
THEN add the metadata and return the number of notes updated
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
assert vault.add_metadata(meta_type, key, value) == expected
|
||||||
|
|
||||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key") == 2
|
if meta_type == MetadataType.FRONTMATTER:
|
||||||
assert vault.metadata.dict == {
|
assert value in vault.frontmatter[key]
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
|
||||||
"date_created": ["2022-12-22"],
|
|
||||||
"frontmatter_Key1": ["author name"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"intext_key": ["intext_value"],
|
|
||||||
"key📅": ["📅_key_value"],
|
|
||||||
"new_key": [],
|
|
||||||
"shared_key1": [
|
|
||||||
"shared_key1_value",
|
|
||||||
"shared_key1_value2",
|
|
||||||
"shared_key1_value3",
|
|
||||||
],
|
|
||||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
"top_key1": ["top_key1_value"],
|
|
||||||
"top_key2": ["top_key2_value"],
|
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
|
||||||
}
|
|
||||||
assert vault.metadata.frontmatter == {
|
|
||||||
"date_created": ["2022-12-22"],
|
|
||||||
"frontmatter_Key1": ["author name"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"new_key": [],
|
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
|
||||||
"shared_key2": ["shared_key2_value1"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
if meta_type == MetadataType.INLINE:
|
||||||
|
assert value in vault.inline_meta[key]
|
||||||
|
|
||||||
def test_add_metadata_2(test_vault) -> None:
|
if meta_type == MetadataType.TAGS:
|
||||||
"""Test adding metadata to the vault.
|
assert value in vault.tags
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN a new metadata key and value is added
|
|
||||||
THEN the metadata is added to the vault
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key2", "new_key2_value") == 2
|
|
||||||
assert vault.metadata.dict == {
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
|
||||||
"date_created": ["2022-12-22"],
|
|
||||||
"frontmatter_Key1": ["author name"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"intext_key": ["intext_value"],
|
|
||||||
"key📅": ["📅_key_value"],
|
|
||||||
"new_key2": ["new_key2_value"],
|
|
||||||
"shared_key1": [
|
|
||||||
"shared_key1_value",
|
|
||||||
"shared_key1_value2",
|
|
||||||
"shared_key1_value3",
|
|
||||||
],
|
|
||||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
"top_key1": ["top_key1_value"],
|
|
||||||
"top_key2": ["top_key2_value"],
|
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
|
||||||
}
|
|
||||||
assert vault.metadata.frontmatter == {
|
|
||||||
"date_created": ["2022-12-22"],
|
|
||||||
"frontmatter_Key1": ["author name"],
|
|
||||||
"frontmatter_Key2": ["article", "note"],
|
|
||||||
"new_key2": ["new_key2_value"],
|
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
|
||||||
"shared_key2": ["shared_key2_value1"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def test_commit_changes_1(test_vault, tmp_path):
|
|
||||||
"""Test committing changes to content in the vault.
|
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN the commit_changes method is called
|
|
||||||
THEN the changes are committed to the vault
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
|
|
||||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" not in content
|
|
||||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
|
||||||
vault.commit_changes()
|
|
||||||
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" in committed_content
|
|
||||||
|
|
||||||
|
|
||||||
def test_commit_changes_2(test_vault, tmp_path):
|
|
||||||
"""Test committing changes to content in the vault in dry run mode.
|
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN dry_run is set to True
|
|
||||||
THEN no changes are committed to the vault
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault, dry_run=True)
|
|
||||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" not in content
|
|
||||||
|
|
||||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
|
||||||
vault.commit_changes()
|
|
||||||
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" not in committed_content
|
|
||||||
|
|
||||||
|
|
||||||
def test_backup_1(test_vault, capsys):
|
def test_backup_1(test_vault, capsys):
|
||||||
@@ -276,6 +142,92 @@ def test_backup_2(test_vault, capsys):
|
|||||||
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("meta_type", "key", "value", "is_regex", "expected"),
|
||||||
|
[
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", None, False, True),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", "foo", False, True),
|
||||||
|
(MetadataType.FRONTMATTER, "no_key", None, False, False),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", "no_value", False, False),
|
||||||
|
(MetadataType.FRONTMATTER, r"f\w+\d", None, True, True),
|
||||||
|
(MetadataType.FRONTMATTER, r"f\w+\d", r"\w+", True, True),
|
||||||
|
(MetadataType.FRONTMATTER, r"^\d+", None, True, False),
|
||||||
|
(MetadataType.FRONTMATTER, r"frontmatter1", r"^\d+", True, False),
|
||||||
|
(MetadataType.INLINE, "intext1", None, False, True),
|
||||||
|
(MetadataType.INLINE, "intext1", "foo", False, True),
|
||||||
|
(MetadataType.INLINE, "no_key", None, False, False),
|
||||||
|
(MetadataType.INLINE, "intext1", "no_value", False, False),
|
||||||
|
(MetadataType.INLINE, r"i\w+\d", None, True, True),
|
||||||
|
(MetadataType.INLINE, r"i\w+\d", r"\w+", True, True),
|
||||||
|
(MetadataType.INLINE, r"^\d+", None, True, False),
|
||||||
|
(MetadataType.INLINE, r"intext1", r"^\d+", True, False),
|
||||||
|
(MetadataType.TAGS, None, "tag1", False, True),
|
||||||
|
(MetadataType.TAGS, None, "no tag", False, False),
|
||||||
|
(MetadataType.TAGS, None, r"^\w+\d", True, True),
|
||||||
|
(MetadataType.TAGS, None, r"^\d", True, False),
|
||||||
|
##############3
|
||||||
|
(MetadataType.META, "frontmatter1", None, False, True),
|
||||||
|
(MetadataType.META, "frontmatter1", "foo", False, True),
|
||||||
|
(MetadataType.META, "no_key", None, False, False),
|
||||||
|
(MetadataType.META, "frontmatter1", "no_value", False, False),
|
||||||
|
(MetadataType.META, r"f\w+\d", None, True, True),
|
||||||
|
(MetadataType.META, r"f\w+\d", r"\w+", True, True),
|
||||||
|
(MetadataType.META, r"^\d+", None, True, False),
|
||||||
|
(MetadataType.META, r"frontmatter1", r"^\d+", True, False),
|
||||||
|
(MetadataType.META, r"i\w+\d", None, True, True),
|
||||||
|
(MetadataType.ALL, None, "tag1", False, True),
|
||||||
|
(MetadataType.ALL, None, "no tag", False, False),
|
||||||
|
(MetadataType.ALL, None, r"^\w+\d", True, True),
|
||||||
|
(MetadataType.ALL, None, r"^\d", True, False),
|
||||||
|
(MetadataType.ALL, "frontmatter1", "foo", False, True),
|
||||||
|
(MetadataType.ALL, r"i\w+\d", None, True, True),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_contains_metadata(test_vault, meta_type, key, value, is_regex, expected):
|
||||||
|
"""Test the contains_metadata method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the contains_metadata method is called
|
||||||
|
THEN the method returns True if the metadata is found
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
assert vault.contains_metadata(meta_type, key, value, is_regex) == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_commit_changes_1(test_vault, tmp_path):
|
||||||
|
"""Test committing changes to content in the vault.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the commit_changes method is called
|
||||||
|
THEN the changes are committed to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
content = Path(f"{tmp_path}/vault/sample_note.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
committed_content = Path(f"{tmp_path}/vault/sample_note.md").read_text()
|
||||||
|
assert "new_key: new_key_value" in committed_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_commit_changes_2(test_vault, tmp_path):
|
||||||
|
"""Test committing changes to content in the vault in dry run mode.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN dry_run is set to True
|
||||||
|
THEN no changes are committed to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault, dry_run=True)
|
||||||
|
content = Path(f"{tmp_path}/vault/sample_note.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
committed_content = Path(f"{tmp_path}/vault/sample_note.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in committed_content
|
||||||
|
|
||||||
|
|
||||||
def test_delete_backup_1(test_vault, capsys):
|
def test_delete_backup_1(test_vault, capsys):
|
||||||
"""Test deleting the vault backup.
|
"""Test deleting the vault backup.
|
||||||
|
|
||||||
@@ -315,75 +267,64 @@ def test_delete_backup_2(test_vault, capsys):
|
|||||||
assert vault.backup_path.exists() is True
|
assert vault.backup_path.exists() is True
|
||||||
|
|
||||||
|
|
||||||
def test_delete_tag_1(test_vault) -> None:
|
@pytest.mark.parametrize(
|
||||||
"""Test delete_tag() method.
|
("tag_to_delete", "expected"),
|
||||||
|
[
|
||||||
|
("tag1", 1),
|
||||||
|
("tag2", 1),
|
||||||
|
("tag3", 0),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_delete_tag(test_vault, tag_to_delete, expected):
|
||||||
|
"""Test delete_tag method.
|
||||||
|
|
||||||
GIVEN a vault object
|
GIVEN a vault object
|
||||||
WHEN the delete_tag method is called
|
WHEN the delete_tag method is called
|
||||||
THEN the inline tag is deleted
|
THEN delete tags if found and return the number of notes updated
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.delete_tag("intext_tag2") == 1
|
assert vault.delete_tag(tag_to_delete) == expected
|
||||||
assert vault.metadata.tags == [
|
assert tag_to_delete not in vault.tags
|
||||||
"inline_tag_bottom1",
|
|
||||||
"inline_tag_bottom2",
|
|
||||||
"inline_tag_top1",
|
|
||||||
"inline_tag_top2",
|
|
||||||
"intext_tag1",
|
|
||||||
"shared_tag",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_tag_2(test_vault) -> None:
|
@pytest.mark.parametrize(
|
||||||
"""Test delete_tag() method.
|
("meta_type", "key_to_delete", "value_to_delete", "expected"),
|
||||||
|
[
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", "foo", 1),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", None, 1),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter1", "bar", 0),
|
||||||
|
(MetadataType.FRONTMATTER, "frontmatter2", "bar", 1),
|
||||||
|
(MetadataType.META, "frontmatter1", "foo", 1),
|
||||||
|
(MetadataType.INLINE, "frontmatter1", "foo", 0),
|
||||||
|
(MetadataType.INLINE, "inline1", "foo", 1),
|
||||||
|
(MetadataType.INLINE, "inline1", None, 1),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_delete_metadata(test_vault, meta_type, key_to_delete, value_to_delete, expected):
|
||||||
|
"""Test delete_metadata method.
|
||||||
|
|
||||||
GIVEN a vault object
|
GIVEN a vault object
|
||||||
WHEN the delete_tag method is called with a tag that does not exist
|
WHEN the delete_metadata method is called
|
||||||
THEN no changes are made
|
THEN delete metadata if found and return the number of notes updated
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
assert (
|
||||||
|
vault.delete_metadata(meta_type=meta_type, key=key_to_delete, value=value_to_delete)
|
||||||
|
== expected
|
||||||
|
)
|
||||||
|
|
||||||
assert vault.delete_tag("no tag") == 0
|
if meta_type == MetadataType.FRONTMATTER or meta_type == MetadataType.META:
|
||||||
|
if value_to_delete is None:
|
||||||
|
assert key_to_delete not in vault.frontmatter
|
||||||
|
elif key_to_delete in vault.frontmatter:
|
||||||
|
assert value_to_delete not in vault.frontmatter[key_to_delete]
|
||||||
|
|
||||||
|
if meta_type == MetadataType.INLINE or meta_type == MetadataType.META:
|
||||||
def test_delete_metadata_1(test_vault) -> None:
|
if value_to_delete is None:
|
||||||
"""Test deleting a metadata key/value.
|
assert key_to_delete not in vault.inline_meta
|
||||||
|
elif key_to_delete in vault.inline_meta:
|
||||||
GIVEN a vault object
|
assert value_to_delete not in vault.inline_meta[key_to_delete]
|
||||||
WHEN the delete_metadata method is called with a key and value
|
|
||||||
THEN the specified metadata key/value is deleted
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
|
|
||||||
assert vault.delete_metadata("top_key1", "top_key1_value") == 1
|
|
||||||
assert vault.metadata.dict["top_key1"] == []
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_metadata_2(test_vault) -> None:
|
|
||||||
"""Test deleting a metadata key/value.
|
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN the delete_metadata method is called with a key
|
|
||||||
THEN the specified metadata key is deleted
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
|
|
||||||
assert vault.delete_metadata("top_key2") == 1
|
|
||||||
assert "top_key2" not in vault.metadata.dict
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_metadata_3(test_vault) -> None:
|
|
||||||
"""Test deleting a metadata key/value.
|
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN the delete_metadata method is called with a key and/or value that does not exist
|
|
||||||
THEN no changes are made
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
|
|
||||||
assert vault.delete_metadata("no key") == 0
|
|
||||||
assert vault.delete_metadata("top_key1", "no_value") == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_export_csv_1(tmp_path, test_vault):
|
def test_export_csv_1(tmp_path, test_vault):
|
||||||
@@ -394,11 +335,16 @@ def test_export_csv_1(tmp_path, test_vault):
|
|||||||
THEN the vault metadata is exported to a CSV file
|
THEN the vault metadata is exported to a CSV file
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
export_file = Path(f"{tmp_path}/export.csv")
|
export_file = tmp_path / "export.csv"
|
||||||
|
|
||||||
vault.export_metadata(path=export_file, export_format="csv")
|
vault.export_metadata(path=export_file, export_format="csv")
|
||||||
assert export_file.exists() is True
|
assert export_file.exists() is True
|
||||||
assert "frontmatter,date_created,2022-12-22" in export_file.read_text()
|
result = export_file.read_text()
|
||||||
|
assert "Metadata Type,Key,Value" in result
|
||||||
|
assert "frontmatter,date_created,2022-12-22" in result
|
||||||
|
assert "inline_metadata,🌱,🌿" in result
|
||||||
|
assert "inline_metadata,inline5,\n" in result
|
||||||
|
assert "tags,,tag1" in result
|
||||||
|
|
||||||
|
|
||||||
def test_export_csv_2(tmp_path, test_vault):
|
def test_export_csv_2(tmp_path, test_vault):
|
||||||
@@ -409,7 +355,7 @@ def test_export_csv_2(tmp_path, test_vault):
|
|||||||
THEN an error is raised
|
THEN an error is raised
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
export_file = Path(f"{tmp_path}/does_not_exist/export.csv")
|
export_file = tmp_path / "does_not_exist" / "export.csv"
|
||||||
|
|
||||||
with pytest.raises(typer.Exit):
|
with pytest.raises(typer.Exit):
|
||||||
vault.export_metadata(path=export_file, export_format="csv")
|
vault.export_metadata(path=export_file, export_format="csv")
|
||||||
@@ -424,11 +370,14 @@ def test_export_json(tmp_path, test_vault):
|
|||||||
THEN the vault metadata is exported to a JSON file
|
THEN the vault metadata is exported to a JSON file
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
export_file = Path(f"{tmp_path}/export.json")
|
export_file = tmp_path / "export.json"
|
||||||
|
|
||||||
vault.export_metadata(path=export_file, export_format="json")
|
vault.export_metadata(path=export_file, export_format="json")
|
||||||
assert export_file.exists() is True
|
assert export_file.exists() is True
|
||||||
assert '"frontmatter": {' in export_file.read_text()
|
result = export_file.read_text()
|
||||||
|
assert '"frontmatter": {' in result
|
||||||
|
assert '"inline_metadata": {' in result
|
||||||
|
assert '"tags": [' in result
|
||||||
|
|
||||||
|
|
||||||
def test_export_notes_to_csv_1(tmp_path, test_vault):
|
def test_export_notes_to_csv_1(tmp_path, test_vault):
|
||||||
@@ -439,15 +388,17 @@ def test_export_notes_to_csv_1(tmp_path, test_vault):
|
|||||||
THEN the notes are exported to a CSV file
|
THEN the notes are exported to a CSV file
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
export_file = Path(f"{tmp_path}/export.csv")
|
export_file = tmp_path / "export.csv"
|
||||||
vault.export_notes_to_csv(path=export_file)
|
vault.export_notes_to_csv(path=export_file)
|
||||||
assert export_file.exists() is True
|
assert export_file.exists() is True
|
||||||
assert "path,type,key,value" in export_file.read_text()
|
result = export_file.read_text()
|
||||||
assert "test1.md,frontmatter,shared_key1,shared_key1_value" in export_file.read_text()
|
assert "path,type,key,value" in result
|
||||||
assert "test1.md,inline_metadata,shared_key1,shared_key1_value" in export_file.read_text()
|
assert "sample_note.md,FRONTMATTER,date_created,2022-12-22" in result
|
||||||
assert "test1.md,tag,,shared_tag" in export_file.read_text()
|
assert "sample_note.md,FRONTMATTER,🌱,🌿" in result
|
||||||
assert "test1.md,frontmatter,tags,📅/frontmatter_tag3" in export_file.read_text()
|
assert "sample_note.md,INLINE,inline2,[[foo]]" in result
|
||||||
assert "test1.md,inline_metadata,key📅,📅_key_value" in export_file.read_text()
|
assert "sample_note.md,INLINE,inline1,bar baz" in result
|
||||||
|
assert "sample_note.md,TAGS,,tag1" in result
|
||||||
|
assert "sample_note.md,INLINE,inline5,\n" in result
|
||||||
|
|
||||||
|
|
||||||
def test_export_notes_to_csv_2(test_vault):
|
def test_export_notes_to_csv_2(test_vault):
|
||||||
@@ -531,7 +482,7 @@ def test_get_filtered_notes_4(sample_vault) -> None:
|
|||||||
filters = [VaultFilter(tag_filter="brunch")]
|
filters = [VaultFilter(tag_filter="brunch")]
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
assert len(vault.notes_in_scope) == 1
|
assert len(vault.notes_in_scope) == 0
|
||||||
|
|
||||||
|
|
||||||
def test_get_filtered_notes_5(sample_vault) -> None:
|
def test_get_filtered_notes_5(sample_vault) -> None:
|
||||||
@@ -550,6 +501,21 @@ def test_get_filtered_notes_5(sample_vault) -> None:
|
|||||||
assert len(vault.notes_in_scope) == 0
|
assert len(vault.notes_in_scope) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_changed_notes(test_vault, tmp_path):
|
||||||
|
"""Test get_changed_notes() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_changed_notes method is called
|
||||||
|
THEN the changed notes are returned
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
assert vault.get_changed_notes() == []
|
||||||
|
vault.delete_metadata(key="frontmatter1", meta_type=MetadataType.FRONTMATTER)
|
||||||
|
changed_notes = vault.get_changed_notes()
|
||||||
|
assert len(changed_notes) == 1
|
||||||
|
assert changed_notes[0].note_path == tmp_path / "vault" / "sample_note.md"
|
||||||
|
|
||||||
|
|
||||||
def test_info(test_vault, capsys):
|
def test_info(test_vault, capsys):
|
||||||
"""Test info() method.
|
"""Test info() method.
|
||||||
|
|
||||||
@@ -561,10 +527,10 @@ def test_info(test_vault, capsys):
|
|||||||
|
|
||||||
vault.info()
|
vault.info()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
assert captured.out == Regex(r"Vault +\│ /[\d\w]+")
|
assert captured == Regex(r"Vault +\│ /[\d\w]+")
|
||||||
assert captured.out == Regex(r"Notes in scope +\│ \d+")
|
assert captured == Regex(r"Notes in scope +\│ \d+")
|
||||||
assert captured.out == Regex(r"Backup +\│ None")
|
assert captured == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
def test_list_editable_notes(test_vault, capsys) -> None:
|
def test_list_editable_notes(test_vault, capsys) -> None:
|
||||||
@@ -579,7 +545,7 @@ def test_list_editable_notes(test_vault, capsys) -> None:
|
|||||||
vault.list_editable_notes()
|
vault.list_editable_notes()
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert captured.out == Regex("Notes in current scope")
|
assert captured.out == Regex("Notes in current scope")
|
||||||
assert captured.out == Regex(r"\d +test1\.md")
|
assert captured.out == Regex(r"\d +sample_note\.md")
|
||||||
|
|
||||||
|
|
||||||
def test_move_inline_metadata_1(test_vault) -> None:
|
def test_move_inline_metadata_1(test_vault) -> None:
|
||||||
@@ -594,6 +560,40 @@ def test_move_inline_metadata_1(test_vault) -> None:
|
|||||||
assert vault.move_inline_metadata(location=InsertLocation.TOP) == 1
|
assert vault.move_inline_metadata(location=InsertLocation.TOP) == 1
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
("meta_type", "expected_regex"),
|
||||||
|
[
|
||||||
|
(
|
||||||
|
MetadataType.ALL,
|
||||||
|
r"All metadata.*Keys +┃ Values +┃.*frontmatter1 +│ foo.*inline1 +│ bar baz.*tags +│ bar.*All inline tags.*#tag1.*#tag2",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
MetadataType.FRONTMATTER,
|
||||||
|
r"All frontmatter.*Keys +┃ Values +┃.*frontmatter1 +│ foo.*tags +│ bar",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
MetadataType.INLINE,
|
||||||
|
r"All inline metadata.*Keys +┃ Values +┃.*inline2 +│ \[\[foo\]\]",
|
||||||
|
),
|
||||||
|
(
|
||||||
|
MetadataType.TAGS,
|
||||||
|
r"All inline tags.*#tag1.*#tag2",
|
||||||
|
),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_print_metadata(test_vault, capsys, meta_type, expected_regex) -> None:
|
||||||
|
"""Test print_metadata() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the print_metadata() method is called
|
||||||
|
THEN the metadata is printed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
vault.print_metadata(meta_type=meta_type)
|
||||||
|
captured = strip_ansi(capsys.readouterr().out)
|
||||||
|
assert captured == Regex(expected_regex, re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
def test_rename_tag_1(test_vault) -> None:
|
def test_rename_tag_1(test_vault) -> None:
|
||||||
"""Test rename_tag() method.
|
"""Test rename_tag() method.
|
||||||
|
|
||||||
@@ -603,16 +603,9 @@ def test_rename_tag_1(test_vault) -> None:
|
|||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.rename_tag("intext_tag2", "new_tag") == 1
|
assert vault.rename_tag("tag1", "new_tag") == 1
|
||||||
assert vault.metadata.tags == [
|
assert "tag1" not in vault.tags
|
||||||
"inline_tag_bottom1",
|
assert "new_tag" in vault.tags
|
||||||
"inline_tag_bottom2",
|
|
||||||
"inline_tag_top1",
|
|
||||||
"inline_tag_top2",
|
|
||||||
"intext_tag1",
|
|
||||||
"new_tag",
|
|
||||||
"shared_tag",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_rename_tag_2(test_vault) -> None:
|
def test_rename_tag_2(test_vault) -> None:
|
||||||
@@ -625,9 +618,21 @@ def test_rename_tag_2(test_vault) -> None:
|
|||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.rename_tag("no tag", "new_tag") == 0
|
assert vault.rename_tag("no tag", "new_tag") == 0
|
||||||
|
assert "new_tag" not in vault.tags
|
||||||
|
|
||||||
|
|
||||||
def test_rename_metadata_1(test_vault) -> None:
|
@pytest.mark.parametrize(
|
||||||
|
("key", "value1", "value2", "expected"),
|
||||||
|
[
|
||||||
|
("no key", "new_value", None, 0),
|
||||||
|
("frontmatter1", "no_value", "new_value", 0),
|
||||||
|
("frontmatter1", "foo", "new_value", 1),
|
||||||
|
("inline1", "foo", "new_value", 1),
|
||||||
|
("frontmatter1", "new_key", None, 1),
|
||||||
|
("inline1", "new_key", None, 1),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_rename_metadata(test_vault, key, value1, value2, expected) -> None:
|
||||||
"""Test rename_metadata() method.
|
"""Test rename_metadata() method.
|
||||||
|
|
||||||
GIVEN a vault object
|
GIVEN a vault object
|
||||||
@@ -636,90 +641,63 @@ def test_rename_metadata_1(test_vault) -> None:
|
|||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.rename_metadata("no key", "new_key") == 0
|
assert vault.rename_metadata(key, value1, value2) == expected
|
||||||
assert vault.rename_metadata("tags", "nonexistent_value", "new_vaule") == 0
|
|
||||||
|
if expected > 0 and value2 is None:
|
||||||
|
assert key not in vault.frontmatter
|
||||||
|
assert key not in vault.inline_meta
|
||||||
|
|
||||||
|
if expected > 0 and value2:
|
||||||
|
if key in vault.frontmatter:
|
||||||
|
assert value1 not in vault.frontmatter[key]
|
||||||
|
assert value2 in vault.frontmatter[key]
|
||||||
|
if key in vault.inline_meta:
|
||||||
|
assert value1 not in vault.inline_meta[key]
|
||||||
|
assert value2 in vault.inline_meta[key]
|
||||||
|
|
||||||
|
|
||||||
def test_rename_metadata_2(test_vault) -> None:
|
@pytest.mark.parametrize(
|
||||||
"""Test rename_metadata() method.
|
("begin", "end", "key", "value", "expected"),
|
||||||
|
[
|
||||||
GIVEN a vault object
|
# no matches
|
||||||
WHEN the rename_metadata() method with a key and no value
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "no key", None, 0),
|
||||||
THEN the metadata key is renamed
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "no key", "new_value", 0),
|
||||||
"""
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "inline1", "new_value", 0),
|
||||||
vault = Vault(config=test_vault)
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "no key", None, 0),
|
||||||
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "no key", "new_value", 0),
|
||||||
assert vault.rename_metadata("tags", "new_key") == 1
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "frontmatter1", "new_value", 0),
|
||||||
assert "tags" not in vault.metadata.dict
|
# entire keys
|
||||||
assert vault.metadata.dict["new_key"] == [
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "frontmatter1", None, 1),
|
||||||
"frontmatter_tag1",
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "frontmatter2", None, 1),
|
||||||
"frontmatter_tag2",
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "inline1", None, 1),
|
||||||
"shared_tag",
|
# specific values
|
||||||
"📅/frontmatter_tag3",
|
(MetadataType.FRONTMATTER, MetadataType.INLINE, "frontmatter1", "foo", 1),
|
||||||
]
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "inline1", "bar baz", 1),
|
||||||
|
(MetadataType.INLINE, MetadataType.FRONTMATTER, "inline2", "[[foo]]", 1),
|
||||||
|
],
|
||||||
def test_rename_metadata_3(test_vault) -> None:
|
)
|
||||||
"""Test rename_metadata() method.
|
def test_transpose_metadata_1(test_vault, begin, end, key, value, expected) -> None:
|
||||||
|
|
||||||
GIVEN a vault object
|
|
||||||
WHEN the rename_metadata() method is called with a key and value
|
|
||||||
THEN the metadata key/value is renamed
|
|
||||||
"""
|
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
|
|
||||||
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") == 1
|
|
||||||
assert vault.metadata.dict["tags"] == [
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"new_vaule",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def test_transpose_metadata(test_vault) -> None:
|
|
||||||
"""Test transpose_metadata() method.
|
"""Test transpose_metadata() method.
|
||||||
|
|
||||||
GIVEN a vault object
|
GIVEN a vault object
|
||||||
WHEN the transpose_metadata() method is called
|
WHEN the transpose_metadata() method is called
|
||||||
THEN the metadata is transposed
|
THEN the number of notes with transposed metadata is returned and the vault metadata is updated
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER) == 1
|
assert vault.transpose_metadata(begin=begin, end=end, key=key, value=value) == expected
|
||||||
|
|
||||||
assert vault.metadata.inline_metadata == {}
|
if expected > 0:
|
||||||
assert vault.metadata.frontmatter == {
|
if begin == MetadataType.INLINE and value is None:
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
assert key not in vault.inline_meta
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
assert key in vault.frontmatter
|
||||||
"date_created": ["2022-12-22"],
|
elif begin == MetadataType.FRONTMATTER and value is None:
|
||||||
"frontmatter_Key1": ["author name"],
|
assert key not in vault.frontmatter
|
||||||
"frontmatter_Key2": ["article", "note"],
|
assert key in vault.inline_meta
|
||||||
"intext_key": ["intext_value"],
|
elif begin == MetadataType.INLINE and value:
|
||||||
"key📅": ["📅_key_value"],
|
assert value in vault.frontmatter[key]
|
||||||
"shared_key1": [
|
elif begin == MetadataType.FRONTMATTER and value:
|
||||||
"shared_key1_value",
|
assert value in vault.inline_meta[key]
|
||||||
"shared_key1_value2",
|
|
||||||
"shared_key1_value3",
|
|
||||||
],
|
|
||||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
|
||||||
"tags": [
|
|
||||||
"frontmatter_tag1",
|
|
||||||
"frontmatter_tag2",
|
|
||||||
"shared_tag",
|
|
||||||
"📅/frontmatter_tag3",
|
|
||||||
],
|
|
||||||
"top_key1": ["top_key1_value"],
|
|
||||||
"top_key2": ["top_key2_value"],
|
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
|
||||||
}
|
|
||||||
|
|
||||||
assert (
|
|
||||||
vault.transpose_metadata(
|
|
||||||
begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER, location=InsertLocation.TOP
|
|
||||||
)
|
|
||||||
== 0
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_update_from_dict_1(test_vault):
|
def test_update_from_dict_1(test_vault):
|
||||||
@@ -729,11 +707,11 @@ def test_update_from_dict_1(test_vault):
|
|||||||
WHEN no dictionary keys match paths in the vault
|
WHEN no dictionary keys match paths in the vault
|
||||||
THEN no notes are updated and 0 is returned
|
THEN no notes are updated and 0 is returned
|
||||||
"""
|
"""
|
||||||
vault = Vault(config=test_vault)
|
|
||||||
update_dict = {
|
update_dict = {
|
||||||
"path1": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
"path1": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||||
"path2": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
"path2": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||||
}
|
}
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.update_from_dict(update_dict) == 0
|
assert vault.update_from_dict(update_dict) == 0
|
||||||
assert vault.get_changed_notes() == []
|
assert vault.get_changed_notes() == []
|
||||||
@@ -763,17 +741,18 @@ def test_update_from_dict_3(test_vault):
|
|||||||
vault = Vault(config=test_vault)
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
update_dict = {
|
update_dict = {
|
||||||
"test1.md": [
|
"sample_note.md": [
|
||||||
{"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
{"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||||
{"type": "inline_metadata", "key": "new_key2", "value": "new_value"},
|
{"type": "inline_metadata", "key": "new_key2", "value": "new_value"},
|
||||||
{"type": "tag", "key": "", "value": "new_tag"},
|
{"type": "tag", "key": "", "value": "new_tag"},
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
assert vault.update_from_dict(update_dict) == 1
|
assert vault.update_from_dict(update_dict) == 1
|
||||||
assert vault.get_changed_notes()[0].note_path.name == "test1.md"
|
|
||||||
assert vault.get_changed_notes()[0].frontmatter.dict == {"new_key": ["new_value"]}
|
note = vault.get_changed_notes()[0]
|
||||||
assert vault.get_changed_notes()[0].inline_metadata.dict == {"new_key2": ["new_value"]}
|
|
||||||
assert vault.get_changed_notes()[0].tags.list == ["new_tag"]
|
assert note.note_path.name == "sample_note.md"
|
||||||
assert vault.metadata.frontmatter == {"new_key": ["new_value"]}
|
assert len(note.metadata) == 3
|
||||||
assert vault.metadata.inline_metadata == {"new_key2": ["new_value"]}
|
assert vault.frontmatter == {"new_key": ["new_value"]}
|
||||||
assert vault.metadata.tags == ["new_tag"]
|
assert vault.inline_meta == {"new_key2": ["new_value"]}
|
||||||
|
assert vault.tags == ["new_tag"]
|
||||||
|
|||||||
Reference in New Issue
Block a user