mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-17 09:23:40 -05:00
feat: add new inline metadata (#15)
* feat: add new inline metadata to notes * fix: prepend note content after frontmatter * refactor: cleanup search patterns * feat(regex): find top of note * test: add headers * fix: insert to specified location * test: improve test coverage * docs: add inline metadata
This commit is contained in:
committed by
Nathaniel Landau
parent
13513b2a14
commit
17985615b3
@@ -61,7 +61,7 @@ repos:
|
|||||||
entry: yamllint --strict --config-file .yamllint.yml
|
entry: yamllint --strict --config-file .yamllint.yml
|
||||||
|
|
||||||
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
- repo: "https://github.com/charliermarsh/ruff-pre-commit"
|
||||||
rev: "v0.0.239"
|
rev: "v0.0.240"
|
||||||
hooks:
|
hooks:
|
||||||
- id: ruff
|
- id: ruff
|
||||||
args: ["--extend-ignore", "I001,D301,D401,PLR2004,PLR0913"]
|
args: ["--extend-ignore", "I001,D301,D401,PLR2004,PLR0913"]
|
||||||
|
|||||||
11
README.md
11
README.md
@@ -63,7 +63,7 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
|||||||
**Add Metadata**: Add new metadata to your vault.
|
**Add Metadata**: Add new metadata to your vault.
|
||||||
|
|
||||||
- Add metadata to the frontmatter
|
- Add metadata to the frontmatter
|
||||||
- Add to inline metadata (Not yet implemented)
|
- Add to inline metadata - Set `insert_location` in the config to control where the new metadata is inserted. (Default: Bottom)
|
||||||
- Add to inline tag (Not yet implemented)
|
- Add to inline tag (Not yet implemented)
|
||||||
|
|
||||||
**Rename Metadata**: Rename either a key and all associated values, a specific value within a key. or an in-text tag.
|
**Rename Metadata**: Rename either a key and all associated values, a specific value within a key. or an in-text tag.
|
||||||
@@ -103,9 +103,16 @@ Below is an example with two vaults.
|
|||||||
# Folders within the vault to ignore when indexing metadata
|
# Folders within the vault to ignore when indexing metadata
|
||||||
exclude_paths = [".git", ".obsidian"]
|
exclude_paths = [".git", ".obsidian"]
|
||||||
|
|
||||||
|
# Location to add metadata. One of:
|
||||||
|
# TOP: Directly after frontmatter.
|
||||||
|
# AFTER_TITLE: After a header following frontmatter.
|
||||||
|
# BOTTOM: The bottom of the note
|
||||||
|
insert_location = "BOTTOM"
|
||||||
|
|
||||||
["Vault Two"]
|
["Vault Two"]
|
||||||
path = "/path/to/second_vault"
|
path = "/path/to/second_vault"
|
||||||
exclude_paths = [".git", ".obsidian"]
|
exclude_paths = [".git", ".obsidian", "daily_notes"]
|
||||||
|
insert_location = "AFTER_TITLE"
|
||||||
```
|
```
|
||||||
|
|
||||||
To bypass the configuration file and specify a vault to use at runtime use the `--vault-path` option.
|
To bypass the configuration file and specify a vault to use at runtime use the `--vault-path` option.
|
||||||
|
|||||||
10
poetry.lock
generated
10
poetry.lock
generated
@@ -577,14 +577,14 @@ reports = ["lxml"]
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "mypy-extensions"
|
name = "mypy-extensions"
|
||||||
version = "0.4.3"
|
version = "1.0.0"
|
||||||
description = "Experimental type system extensions for programs checked with the mypy typechecker."
|
description = "Type system extensions for programs checked with the mypy type checker."
|
||||||
category = "dev"
|
category = "dev"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
{file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"},
|
{file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"},
|
||||||
{file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"},
|
{file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
|||||||
@@ -75,7 +75,12 @@
|
|||||||
]
|
]
|
||||||
ignore-init-module-imports = true
|
ignore-init-module-imports = true
|
||||||
line-length = 100
|
line-length = 100
|
||||||
per-file-ignores = { "cli.py" = ["PLR0913"] }
|
per-file-ignores = { "cli.py" = [
|
||||||
|
"PLR0913",
|
||||||
|
], "tests/*.py" = [
|
||||||
|
"E999",
|
||||||
|
"PLR2004",
|
||||||
|
] }
|
||||||
select = [
|
select = [
|
||||||
"A",
|
"A",
|
||||||
"B",
|
"B",
|
||||||
|
|||||||
@@ -17,6 +17,21 @@ from obsidian_metadata._utils.alerts import logger as log
|
|||||||
class ConfigQuestions:
|
class ConfigQuestions:
|
||||||
"""Questions to ask the user when creating a configuration file."""
|
"""Questions to ask the user when creating a configuration file."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _validate_valid_dir(path: str) -> bool | str:
|
||||||
|
"""Validate a valid directory.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool | str: True if the path is valid, otherwise a string with the error message.
|
||||||
|
"""
|
||||||
|
path_to_validate: Path = Path(path).expanduser().resolve()
|
||||||
|
if not path_to_validate.exists():
|
||||||
|
return f"Path does not exist: {path_to_validate}"
|
||||||
|
if not path_to_validate.is_dir():
|
||||||
|
return f"Path is not a directory: {path_to_validate}"
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def ask_for_vault_path() -> Path: # pragma: no cover
|
def ask_for_vault_path() -> Path: # pragma: no cover
|
||||||
"""Ask the user for the path to their vault.
|
"""Ask the user for the path to their vault.
|
||||||
@@ -34,21 +49,6 @@ class ConfigQuestions:
|
|||||||
|
|
||||||
return Path(vault_path).expanduser().resolve()
|
return Path(vault_path).expanduser().resolve()
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _validate_valid_dir(path: str) -> bool | str:
|
|
||||||
"""Validate a valid directory.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool | str: True if the path is valid, otherwise a string with the error message.
|
|
||||||
"""
|
|
||||||
path_to_validate: Path = Path(path).expanduser().resolve()
|
|
||||||
if not path_to_validate.exists():
|
|
||||||
return f"Path does not exist: {path_to_validate}"
|
|
||||||
if not path_to_validate.is_dir():
|
|
||||||
return f"Path is not a directory: {path_to_validate}"
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@rich.repr.auto
|
@rich.repr.auto
|
||||||
class Config:
|
class Config:
|
||||||
@@ -65,7 +65,11 @@ class Config:
|
|||||||
else:
|
else:
|
||||||
self.config_path = None
|
self.config_path = None
|
||||||
self.config = {
|
self.config = {
|
||||||
"command_line_vault": {"path": vault_path, "exclude_paths": [".git", ".obsidian"]}
|
"command_line_vault": {
|
||||||
|
"path": vault_path,
|
||||||
|
"exclude_paths": [".git", ".obsidian"],
|
||||||
|
"insert_location": "BOTTOM",
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -84,6 +88,15 @@ class Config:
|
|||||||
yield "config_path", self.config_path
|
yield "config_path", self.config_path
|
||||||
yield "vaults", self.vaults
|
yield "vaults", self.vaults
|
||||||
|
|
||||||
|
def _load_config(self) -> dict[str, Any]:
|
||||||
|
"""Load the configuration file."""
|
||||||
|
try:
|
||||||
|
with open(self.config_path, encoding="utf-8") as fp:
|
||||||
|
return tomlkit.load(fp)
|
||||||
|
except tomlkit.exceptions.TOMLKitError as e:
|
||||||
|
alerts.error(f"Could not parse '{self.config_path}'")
|
||||||
|
raise typer.Exit(code=1) from e
|
||||||
|
|
||||||
def _validate_config_path(self, config_path: Path | None) -> Path:
|
def _validate_config_path(self, config_path: Path | None) -> Path:
|
||||||
"""Load the configuration path."""
|
"""Load the configuration path."""
|
||||||
if config_path is None:
|
if config_path is None:
|
||||||
@@ -95,15 +108,6 @@ class Config:
|
|||||||
|
|
||||||
return config_path.expanduser().resolve()
|
return config_path.expanduser().resolve()
|
||||||
|
|
||||||
def _load_config(self) -> dict[str, Any]:
|
|
||||||
"""Load the configuration file."""
|
|
||||||
try:
|
|
||||||
with open(self.config_path, encoding="utf-8") as fp:
|
|
||||||
return tomlkit.load(fp)
|
|
||||||
except tomlkit.exceptions.TOMLKitError as e:
|
|
||||||
alerts.error(f"Could not parse '{self.config_path}'")
|
|
||||||
raise typer.Exit(code=1) from e
|
|
||||||
|
|
||||||
def _write_default_config(self, path_to_config: Path) -> None:
|
def _write_default_config(self, path_to_config: Path) -> None:
|
||||||
"""Write the default configuration file when no config file is found."""
|
"""Write the default configuration file when no config file is found."""
|
||||||
vault_path = ConfigQuestions.ask_for_vault_path()
|
vault_path = ConfigQuestions.ask_for_vault_path()
|
||||||
@@ -116,7 +120,14 @@ class Config:
|
|||||||
path = "{vault_path}"
|
path = "{vault_path}"
|
||||||
|
|
||||||
# Folders within the vault to ignore when indexing metadata
|
# Folders within the vault to ignore when indexing metadata
|
||||||
exclude_paths = [".git", ".obsidian"]"""
|
exclude_paths = [".git", ".obsidian"]
|
||||||
|
|
||||||
|
# Location to add metadata. One of:
|
||||||
|
# TOP: Directly after frontmatter.
|
||||||
|
# AFTER_TITLE: After a header following frontmatter.
|
||||||
|
# BOTTOM: The bottom of the note
|
||||||
|
insert_location = "BOTTOM"
|
||||||
|
"""
|
||||||
|
|
||||||
path_to_config.write_text(dedent(config_text))
|
path_to_config.write_text(dedent(config_text))
|
||||||
|
|
||||||
@@ -140,7 +151,12 @@ class VaultConfig:
|
|||||||
try:
|
try:
|
||||||
self.exclude_paths = self.config["exclude_paths"]
|
self.exclude_paths = self.config["exclude_paths"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self.exclude_paths = []
|
self.exclude_paths = [".git", ".obsidian"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.insert_location = self.config["insert_location"]
|
||||||
|
except KeyError:
|
||||||
|
self.insert_location = "BOTTOM"
|
||||||
|
|
||||||
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||||
"""Define rich representation of a vault config."""
|
"""Define rich representation of a vault config."""
|
||||||
|
|||||||
@@ -118,7 +118,8 @@ def main(
|
|||||||
[bold underline]Add Metadata[/]
|
[bold underline]Add Metadata[/]
|
||||||
Add new metadata to your vault.
|
Add new metadata to your vault.
|
||||||
• Add metadata to the frontmatter
|
• Add metadata to the frontmatter
|
||||||
• [dim]Add to inline metadata (Not yet implemented)[/]
|
• Add to inline metadata - Set `insert_location` in the config to
|
||||||
|
control where the new metadata is inserted. (Default: Bottom)
|
||||||
• [dim]Add to inline tag (Not yet implemented)[/]
|
• [dim]Add to inline tag (Not yet implemented)[/]
|
||||||
|
|
||||||
[bold underline]Rename Metadata[/]
|
[bold underline]Rename Metadata[/]
|
||||||
|
|||||||
@@ -1,5 +1,9 @@
|
|||||||
"""Shared models."""
|
"""Shared models."""
|
||||||
from obsidian_metadata.models.enums import MetadataType # isort: skip
|
from obsidian_metadata.models.enums import (
|
||||||
|
InsertLocation,
|
||||||
|
MetadataType,
|
||||||
|
)
|
||||||
|
|
||||||
from obsidian_metadata.models.patterns import Patterns # isort: skip
|
from obsidian_metadata.models.patterns import Patterns # isort: skip
|
||||||
from obsidian_metadata.models.metadata import (
|
from obsidian_metadata.models.metadata import (
|
||||||
Frontmatter,
|
Frontmatter,
|
||||||
@@ -17,11 +21,12 @@ __all__ = [
|
|||||||
"Frontmatter",
|
"Frontmatter",
|
||||||
"InlineMetadata",
|
"InlineMetadata",
|
||||||
"InlineTags",
|
"InlineTags",
|
||||||
|
"InsertLocation",
|
||||||
"LoggerManager",
|
"LoggerManager",
|
||||||
"MetadataType",
|
"MetadataType",
|
||||||
"Note",
|
"Note",
|
||||||
"Patterns",
|
"Patterns",
|
||||||
"Vault",
|
"Vault",
|
||||||
"VaultMetadata",
|
|
||||||
"VaultFilter",
|
"VaultFilter",
|
||||||
|
"VaultMetadata",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -32,6 +32,19 @@ class Application:
|
|||||||
self.questions = Questions()
|
self.questions = Questions()
|
||||||
self.filters: list[VaultFilter] = []
|
self.filters: list[VaultFilter] = []
|
||||||
|
|
||||||
|
def _load_vault(self) -> None:
|
||||||
|
"""Load the vault."""
|
||||||
|
|
||||||
|
if len(self.filters) == 0:
|
||||||
|
self.vault: Vault = Vault(config=self.config, dry_run=self.dry_run)
|
||||||
|
else:
|
||||||
|
self.vault = Vault(config=self.config, dry_run=self.dry_run, filters=self.filters)
|
||||||
|
|
||||||
|
alerts.success(
|
||||||
|
f"Loaded {len(self.vault.notes_in_scope)} notes from {len(self.vault.all_notes)} total notes"
|
||||||
|
)
|
||||||
|
self.questions = Questions(vault=self.vault)
|
||||||
|
|
||||||
def application_main(self) -> None:
|
def application_main(self) -> None:
|
||||||
"""Questions for the main application."""
|
"""Questions for the main application."""
|
||||||
self._load_vault()
|
self._load_vault()
|
||||||
@@ -70,31 +83,29 @@ class Application:
|
|||||||
|
|
||||||
area = self.questions.ask_area()
|
area = self.questions.ask_area()
|
||||||
match area:
|
match area:
|
||||||
case MetadataType.FRONTMATTER:
|
case MetadataType.FRONTMATTER | MetadataType.INLINE:
|
||||||
key = self.questions.ask_new_key(question="Enter the key for the new metadata")
|
key = self.questions.ask_new_key(question="Enter the key for the new metadata")
|
||||||
if key is None:
|
if key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
value = self.questions.ask_new_value(
|
value = self.questions.ask_new_value(
|
||||||
question="Enter the value for the new metadata"
|
question="Enter the value for the new metadata"
|
||||||
)
|
)
|
||||||
if value is None:
|
if value is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.add_metadata(area, key, value)
|
num_changed = self.vault.add_metadata(
|
||||||
if num_changed == 0:
|
area=area, key=key, value=value, location=self.vault.insert_location
|
||||||
|
)
|
||||||
|
if num_changed == 0: # pragma: no cover
|
||||||
alerts.warning(f"No notes were changed")
|
alerts.warning(f"No notes were changed")
|
||||||
return
|
return
|
||||||
|
|
||||||
alerts.success(f"Added metadata to {num_changed} notes")
|
alerts.success(f"Added metadata to {num_changed} notes")
|
||||||
|
|
||||||
case MetadataType.INLINE:
|
|
||||||
alerts.warning(f"Adding metadata to {area} is not supported yet")
|
|
||||||
|
|
||||||
case MetadataType.TAGS:
|
case MetadataType.TAGS:
|
||||||
alerts.warning(f"Adding metadata to {area} is not supported yet")
|
alerts.warning(f"Adding metadata to {area} is not supported yet")
|
||||||
|
case _: # pragma: no cover
|
||||||
case _:
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def application_filter(self) -> None:
|
def application_filter(self) -> None:
|
||||||
@@ -114,7 +125,7 @@ class Application:
|
|||||||
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
||||||
case "apply_path_filter":
|
case "apply_path_filter":
|
||||||
path = self.questions.ask_filter_path()
|
path = self.questions.ask_filter_path()
|
||||||
if path is None or path == "":
|
if path is None or path == "": # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
self.filters.append(VaultFilter(path_filter=path))
|
self.filters.append(VaultFilter(path_filter=path))
|
||||||
@@ -122,14 +133,14 @@ class Application:
|
|||||||
|
|
||||||
case "apply_metadata_filter":
|
case "apply_metadata_filter":
|
||||||
key = self.questions.ask_existing_key()
|
key = self.questions.ask_existing_key()
|
||||||
if key is None:
|
if key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
questions2 = Questions(vault=self.vault, key=key)
|
questions2 = Questions(vault=self.vault, key=key)
|
||||||
value = questions2.ask_existing_value(
|
value = questions2.ask_existing_value(
|
||||||
question="Enter the value for the metadata filter",
|
question="Enter the value for the metadata filter",
|
||||||
)
|
)
|
||||||
if value is None:
|
if value is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
if value == "":
|
if value == "":
|
||||||
self.filters.append(VaultFilter(key_filter=key))
|
self.filters.append(VaultFilter(key_filter=key))
|
||||||
@@ -302,7 +313,7 @@ class Application:
|
|||||||
self.delete_value()
|
self.delete_value()
|
||||||
case "delete_inline_tag":
|
case "delete_inline_tag":
|
||||||
self.delete_inline_tag()
|
self.delete_inline_tag()
|
||||||
case _:
|
case _: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
def application_rename_metadata(self) -> None:
|
def application_rename_metadata(self) -> None:
|
||||||
@@ -325,7 +336,7 @@ class Application:
|
|||||||
self.rename_value()
|
self.rename_value()
|
||||||
case "rename_inline_tag":
|
case "rename_inline_tag":
|
||||||
self.rename_inline_tag()
|
self.rename_inline_tag()
|
||||||
case _:
|
case _: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
def commit_changes(self) -> bool:
|
def commit_changes(self) -> bool:
|
||||||
@@ -373,7 +384,7 @@ class Application:
|
|||||||
key_to_delete = self.questions.ask_existing_keys_regex(
|
key_to_delete = self.questions.ask_existing_keys_regex(
|
||||||
question="Regex for the key(s) you'd like to delete?"
|
question="Regex for the key(s) you'd like to delete?"
|
||||||
)
|
)
|
||||||
if key_to_delete is None:
|
if key_to_delete is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.delete_metadata(key_to_delete)
|
num_changed = self.vault.delete_metadata(key_to_delete)
|
||||||
@@ -390,12 +401,12 @@ class Application:
|
|||||||
def delete_value(self) -> None:
|
def delete_value(self) -> None:
|
||||||
"""Delete a value from the vault."""
|
"""Delete a value from the vault."""
|
||||||
key = self.questions.ask_existing_key(question="Which key contains the value to delete?")
|
key = self.questions.ask_existing_key(question="Which key contains the value to delete?")
|
||||||
if key is None:
|
if key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
questions2 = Questions(vault=self.vault, key=key)
|
questions2 = Questions(vault=self.vault, key=key)
|
||||||
value = questions2.ask_existing_value_regex(question="Regex for the value to delete")
|
value = questions2.ask_existing_value_regex(question="Regex for the value to delete")
|
||||||
if value is None:
|
if value is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.delete_metadata(key, value)
|
num_changed = self.vault.delete_metadata(key, value)
|
||||||
@@ -409,19 +420,6 @@ class Application:
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
def _load_vault(self) -> None:
|
|
||||||
"""Load the vault."""
|
|
||||||
|
|
||||||
if len(self.filters) == 0:
|
|
||||||
self.vault: Vault = Vault(config=self.config, dry_run=self.dry_run)
|
|
||||||
else:
|
|
||||||
self.vault = Vault(config=self.config, dry_run=self.dry_run, filters=self.filters)
|
|
||||||
|
|
||||||
alerts.success(
|
|
||||||
f"Loaded {len(self.vault.notes_in_scope)} notes from {len(self.vault.all_notes)} total notes"
|
|
||||||
)
|
|
||||||
self.questions = Questions(vault=self.vault)
|
|
||||||
|
|
||||||
def noninteractive_export_csv(self, path: Path) -> None:
|
def noninteractive_export_csv(self, path: Path) -> None:
|
||||||
"""Export the vault metadata to CSV."""
|
"""Export the vault metadata to CSV."""
|
||||||
self._load_vault()
|
self._load_vault()
|
||||||
@@ -440,11 +438,11 @@ class Application:
|
|||||||
original_key = self.questions.ask_existing_key(
|
original_key = self.questions.ask_existing_key(
|
||||||
question="Which key would you like to rename?"
|
question="Which key would you like to rename?"
|
||||||
)
|
)
|
||||||
if original_key is None:
|
if original_key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
new_key = self.questions.ask_new_key()
|
new_key = self.questions.ask_new_key()
|
||||||
if new_key is None:
|
if new_key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.rename_metadata(original_key, new_key)
|
num_changed = self.vault.rename_metadata(original_key, new_key)
|
||||||
@@ -460,11 +458,11 @@ class Application:
|
|||||||
"""Rename an inline tag."""
|
"""Rename an inline tag."""
|
||||||
|
|
||||||
original_tag = self.questions.ask_existing_inline_tag(question="Which tag to rename?")
|
original_tag = self.questions.ask_existing_inline_tag(question="Which tag to rename?")
|
||||||
if original_tag is None:
|
if original_tag is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
new_tag = self.questions.ask_new_tag("New tag")
|
new_tag = self.questions.ask_new_tag("New tag")
|
||||||
if new_tag is None:
|
if new_tag is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changed = self.vault.rename_inline_tag(original_tag, new_tag)
|
num_changed = self.vault.rename_inline_tag(original_tag, new_tag)
|
||||||
@@ -480,16 +478,16 @@ class Application:
|
|||||||
def rename_value(self) -> None:
|
def rename_value(self) -> None:
|
||||||
"""Rename a value in the vault."""
|
"""Rename a value in the vault."""
|
||||||
key = self.questions.ask_existing_key(question="Which key contains the value to rename?")
|
key = self.questions.ask_existing_key(question="Which key contains the value to rename?")
|
||||||
if key is None:
|
if key is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
question_key = Questions(vault=self.vault, key=key)
|
question_key = Questions(vault=self.vault, key=key)
|
||||||
value = question_key.ask_existing_value(question="Which value would you like to rename?")
|
value = question_key.ask_existing_value(question="Which value would you like to rename?")
|
||||||
if value is None:
|
if value is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
new_value = question_key.ask_new_value()
|
new_value = question_key.ask_new_value()
|
||||||
if new_value is None:
|
if new_value is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
num_changes = self.vault.rename_metadata(key, value, new_value)
|
num_changes = self.vault.rename_metadata(key, value, new_value)
|
||||||
@@ -511,7 +509,7 @@ class Application:
|
|||||||
answer = self.questions.ask_confirm(
|
answer = self.questions.ask_confirm(
|
||||||
question="View diffs of individual files?", default=False
|
question="View diffs of individual files?", default=False
|
||||||
)
|
)
|
||||||
if not answer:
|
if not answer: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
choices: list[dict[str, Any] | questionary.Separator] = [questionary.Separator()]
|
choices: list[dict[str, Any] | questionary.Separator] = [questionary.Separator()]
|
||||||
|
|||||||
@@ -11,3 +11,17 @@ class MetadataType(Enum):
|
|||||||
TAGS = "Inline Tags"
|
TAGS = "Inline Tags"
|
||||||
KEYS = "Metadata Keys Only"
|
KEYS = "Metadata Keys Only"
|
||||||
ALL = "All Metadata"
|
ALL = "All Metadata"
|
||||||
|
|
||||||
|
|
||||||
|
class InsertLocation(Enum):
|
||||||
|
"""Location to add metadata to notes.
|
||||||
|
|
||||||
|
TOP: Directly after frontmatter.
|
||||||
|
AFTER_TITLE: After a header following frontmatter.
|
||||||
|
BOTTOM: The bottom of the note
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
TOP = "Top"
|
||||||
|
AFTER_TITLE = "Header"
|
||||||
|
BOTTOM = "Bottom"
|
||||||
|
|||||||
@@ -9,6 +9,8 @@ from rich.console import Console
|
|||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
|
from obsidian_metadata._utils import alerts
|
||||||
from obsidian_metadata._utils import (
|
from obsidian_metadata._utils import (
|
||||||
clean_dictionary,
|
clean_dictionary,
|
||||||
dict_contains,
|
dict_contains,
|
||||||
@@ -234,7 +236,7 @@ class Frontmatter:
|
|||||||
dict: Metadata from the note.
|
dict: Metadata from the note.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
frontmatter_block: str = PATTERNS.frontmatt_block_no_separators.search(
|
frontmatter_block: str = PATTERNS.frontmatt_block_strip_separators.search(
|
||||||
file_content
|
file_content
|
||||||
).group("frontmatter")
|
).group("frontmatter")
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
@@ -388,7 +390,7 @@ class InlineMetadata:
|
|||||||
"""Representation of inline metadata in the form of `key:: value`."""
|
"""Representation of inline metadata in the form of `key:: value`."""
|
||||||
|
|
||||||
def __init__(self, file_content: str):
|
def __init__(self, file_content: str):
|
||||||
self.dict: dict[str, list[str]] = self._grab_inline_metadata(file_content)
|
self.dict: dict[str, list[str]] = self.grab_inline_metadata(file_content)
|
||||||
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
self.dict_original: dict[str, list[str]] = self.dict.copy()
|
||||||
|
|
||||||
def __repr__(self) -> str: # pragma: no cover
|
def __repr__(self) -> str: # pragma: no cover
|
||||||
@@ -399,32 +401,8 @@ class InlineMetadata:
|
|||||||
"""
|
"""
|
||||||
return f"InlineMetadata(inline_metadata={self.dict})"
|
return f"InlineMetadata(inline_metadata={self.dict})"
|
||||||
|
|
||||||
def _grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
|
||||||
"""Grab inline metadata from a note.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict[str, str]: Inline metadata from the note.
|
|
||||||
"""
|
|
||||||
content = remove_markdown_sections(
|
|
||||||
file_content,
|
|
||||||
strip_codeblocks=True,
|
|
||||||
strip_inlinecode=True,
|
|
||||||
strip_frontmatter=True,
|
|
||||||
)
|
|
||||||
all_results = PATTERNS.find_inline_metadata.findall(content)
|
|
||||||
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
|
||||||
|
|
||||||
inline_metadata: dict[str, list[str]] = {}
|
|
||||||
for k, v in stripped_null_values:
|
|
||||||
if k in inline_metadata:
|
|
||||||
inline_metadata[k].append(str(v))
|
|
||||||
else:
|
|
||||||
inline_metadata[k] = [str(v)]
|
|
||||||
|
|
||||||
return clean_dictionary(inline_metadata)
|
|
||||||
|
|
||||||
def add(self, key: str, value: str | list[str] = None) -> bool:
|
def add(self, key: str, value: str | list[str] = None) -> bool:
|
||||||
"""Add a key and value to the frontmatter.
|
"""Add a key and value to the inline metadata.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
key (str): Key to add.
|
key (str): Key to add.
|
||||||
@@ -433,8 +411,26 @@ class InlineMetadata:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: True if the metadata was added
|
bool: True if the metadata was added
|
||||||
"""
|
"""
|
||||||
# TODO: implement adding to inline metadata which requires knowing where in the note the metadata is to be added. In addition, unlike frontmatter, it is not possible to have multiple values for a key.
|
if value is None:
|
||||||
pass
|
if key not in self.dict:
|
||||||
|
self.dict[key] = []
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
if isinstance(value, list):
|
||||||
|
value = value[0]
|
||||||
|
|
||||||
|
if key not in self.dict:
|
||||||
|
self.dict[key] = [value]
|
||||||
|
return True
|
||||||
|
|
||||||
|
if key in self.dict and len(self.dict[key]) > 0:
|
||||||
|
if value in self.dict[key]:
|
||||||
|
return False
|
||||||
|
raise ValueError(f"'{key}' not empty")
|
||||||
|
|
||||||
|
self.dict[key].append(value)
|
||||||
|
return True
|
||||||
|
|
||||||
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
def contains(self, key: str, value: str = None, is_regex: bool = False) -> bool:
|
||||||
"""Check if a key or value exists in the inline metadata.
|
"""Check if a key or value exists in the inline metadata.
|
||||||
@@ -477,6 +473,30 @@ class InlineMetadata:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def grab_inline_metadata(self, file_content: str) -> dict[str, list[str]]:
|
||||||
|
"""Grab inline metadata from a note.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[str, str]: Inline metadata from the note.
|
||||||
|
"""
|
||||||
|
content = remove_markdown_sections(
|
||||||
|
file_content,
|
||||||
|
strip_codeblocks=True,
|
||||||
|
strip_inlinecode=True,
|
||||||
|
strip_frontmatter=True,
|
||||||
|
)
|
||||||
|
all_results = PATTERNS.find_inline_metadata.findall(content)
|
||||||
|
stripped_null_values = [tuple(filter(None, x)) for x in all_results]
|
||||||
|
|
||||||
|
inline_metadata: dict[str, list[str]] = {}
|
||||||
|
for k, v in stripped_null_values:
|
||||||
|
if k in inline_metadata:
|
||||||
|
inline_metadata[k].append(str(v))
|
||||||
|
else:
|
||||||
|
inline_metadata[k] = [str(v)]
|
||||||
|
|
||||||
|
return clean_dictionary(inline_metadata)
|
||||||
|
|
||||||
def has_changes(self) -> bool:
|
def has_changes(self) -> bool:
|
||||||
"""Check if the metadata has changes.
|
"""Check if the metadata has changes.
|
||||||
|
|
||||||
|
|||||||
@@ -9,13 +9,13 @@ import rich.repr
|
|||||||
import typer
|
import typer
|
||||||
from rich.console import Console
|
from rich.console import Console
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
from obsidian_metadata._utils import alerts
|
from obsidian_metadata._utils import alerts
|
||||||
from obsidian_metadata._utils.alerts import logger as log
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
from obsidian_metadata.models import (
|
from obsidian_metadata.models import (
|
||||||
Frontmatter,
|
Frontmatter,
|
||||||
InlineMetadata,
|
InlineMetadata,
|
||||||
InlineTags,
|
InlineTags,
|
||||||
|
InsertLocation,
|
||||||
MetadataType,
|
MetadataType,
|
||||||
Patterns,
|
Patterns,
|
||||||
)
|
)
|
||||||
@@ -117,24 +117,37 @@ class Note:
|
|||||||
_v = re.escape(_v)
|
_v = re.escape(_v)
|
||||||
self.sub(f"{_k}:: ?{_v}", f"{_k}:: {value_2}", is_regex=True)
|
self.sub(f"{_k}:: ?{_v}", f"{_k}:: {value_2}", is_regex=True)
|
||||||
|
|
||||||
def add_metadata(self, area: MetadataType, key: str, value: str | list[str] = None) -> bool:
|
def add_metadata(
|
||||||
"""Add metadata to the note.
|
self,
|
||||||
|
area: MetadataType,
|
||||||
|
key: str,
|
||||||
|
value: str | list[str] = None,
|
||||||
|
location: InsertLocation = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Add metadata to the note if it does not already exist.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
area (MetadataType): Area to add metadata to.
|
area (MetadataType): Area to add metadata to.
|
||||||
key (str): Key to add.
|
key (str): Key to add.
|
||||||
|
location (InsertLocation, optional): Location to add inline metadata and tags.
|
||||||
value (str, optional): Value to add.
|
value (str, optional): Value to add.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: Whether the metadata was added.
|
bool: Whether the metadata was added.
|
||||||
"""
|
"""
|
||||||
if area is MetadataType.FRONTMATTER and self.frontmatter.add(key, value):
|
if area is MetadataType.FRONTMATTER and self.frontmatter.add(key, value):
|
||||||
self.replace_frontmatter()
|
self.update_frontmatter()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
if area is MetadataType.INLINE:
|
try:
|
||||||
# TODO: implement adding to inline metadata
|
if area is MetadataType.INLINE and self.inline_metadata.add(key, value):
|
||||||
pass
|
line = f"{key}:: " if value is None else f"{key}:: {value}"
|
||||||
|
self.insert(new_string=line, location=location)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except ValueError as e:
|
||||||
|
log.warning(f"Could not add metadata to {self.note_path}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
if area is MetadataType.TAGS:
|
if area is MetadataType.TAGS:
|
||||||
# TODO: implement adding to intext tags
|
# TODO: implement adding to intext tags
|
||||||
@@ -142,24 +155,6 @@ class Note:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def append(self, string_to_append: str, allow_multiple: bool = False) -> None:
|
|
||||||
"""Append a string to the end of a note.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
string_to_append (str): String to append to the note.
|
|
||||||
allow_multiple (bool): Whether to allow appending the string if it already exists in the note.
|
|
||||||
"""
|
|
||||||
if allow_multiple:
|
|
||||||
self.file_content += f"\n{string_to_append}"
|
|
||||||
else:
|
|
||||||
if len(re.findall(re.escape(string_to_append), self.file_content)) == 0:
|
|
||||||
self.file_content += f"\n{string_to_append}"
|
|
||||||
|
|
||||||
def commit_changes(self) -> None:
|
|
||||||
"""Commit changes to the note to disk."""
|
|
||||||
# TODO: rewrite frontmatter if it has changed
|
|
||||||
pass
|
|
||||||
|
|
||||||
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
def contains_inline_tag(self, tag: str, is_regex: bool = False) -> bool:
|
||||||
"""Check if a note contains the specified inline tag.
|
"""Check if a note contains the specified inline tag.
|
||||||
|
|
||||||
@@ -235,14 +230,14 @@ class Note:
|
|||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
if self.frontmatter.delete(key):
|
if self.frontmatter.delete(key):
|
||||||
self.replace_frontmatter()
|
self.update_frontmatter()
|
||||||
changed_value = True
|
changed_value = True
|
||||||
if self.inline_metadata.delete(key):
|
if self.inline_metadata.delete(key):
|
||||||
self._delete_inline_metadata(key, value)
|
self._delete_inline_metadata(key, value)
|
||||||
changed_value = True
|
changed_value = True
|
||||||
else:
|
else:
|
||||||
if self.frontmatter.delete(key, value):
|
if self.frontmatter.delete(key, value):
|
||||||
self.replace_frontmatter()
|
self.update_frontmatter()
|
||||||
changed_value = True
|
changed_value = True
|
||||||
if self.inline_metadata.delete(key, value):
|
if self.inline_metadata.delete(key, value):
|
||||||
self._delete_inline_metadata(key, value)
|
self._delete_inline_metadata(key, value)
|
||||||
@@ -272,6 +267,53 @@ class Note:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def insert(
|
||||||
|
self,
|
||||||
|
new_string: str,
|
||||||
|
location: InsertLocation,
|
||||||
|
allow_multiple: bool = False,
|
||||||
|
) -> None:
|
||||||
|
"""Insert a string at the top of a note.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
new_string (str): String to insert at the top of the note.
|
||||||
|
allow_multiple (bool): Whether to allow inserting the string if it already exists in the note.
|
||||||
|
location (InsertLocation): Location to insert the string.
|
||||||
|
"""
|
||||||
|
if not allow_multiple and len(re.findall(re.escape(new_string), self.file_content)) > 0:
|
||||||
|
return
|
||||||
|
|
||||||
|
match location: # noqa: E999
|
||||||
|
case InsertLocation.BOTTOM:
|
||||||
|
self.file_content += f"\n{new_string}"
|
||||||
|
case InsertLocation.TOP:
|
||||||
|
try:
|
||||||
|
top = PATTERNS.frontmatter_block.search(self.file_content).group("frontmatter")
|
||||||
|
except AttributeError:
|
||||||
|
top = ""
|
||||||
|
|
||||||
|
if top == "":
|
||||||
|
self.file_content = f"{new_string}\n{self.file_content}"
|
||||||
|
else:
|
||||||
|
new_string = f"{top}\n{new_string}"
|
||||||
|
top = re.escape(top)
|
||||||
|
self.sub(top, new_string, is_regex=True)
|
||||||
|
case InsertLocation.AFTER_TITLE:
|
||||||
|
try:
|
||||||
|
top = PATTERNS.top_with_header.search(self.file_content).group("top")
|
||||||
|
except AttributeError:
|
||||||
|
top = ""
|
||||||
|
|
||||||
|
if top == "":
|
||||||
|
self.file_content = f"{new_string}\n{self.file_content}"
|
||||||
|
else:
|
||||||
|
new_string = f"{top}\n{new_string}"
|
||||||
|
top = re.escape(top)
|
||||||
|
self.sub(top, new_string, is_regex=True)
|
||||||
|
case _:
|
||||||
|
raise ValueError(f"Invalid location: {location}")
|
||||||
|
pass
|
||||||
|
|
||||||
def print_note(self) -> None:
|
def print_note(self) -> None:
|
||||||
"""Print the note to the console."""
|
"""Print the note to the console."""
|
||||||
print(self.file_content)
|
print(self.file_content)
|
||||||
@@ -293,28 +335,6 @@ class Note:
|
|||||||
|
|
||||||
Console().print(table)
|
Console().print(table)
|
||||||
|
|
||||||
def replace_frontmatter(self, sort_keys: bool = False) -> None:
|
|
||||||
"""Replace the frontmatter in the note with the current frontmatter object."""
|
|
||||||
try:
|
|
||||||
current_frontmatter = PATTERNS.frontmatt_block_with_separators.search(
|
|
||||||
self.file_content
|
|
||||||
).group("frontmatter")
|
|
||||||
except AttributeError:
|
|
||||||
current_frontmatter = None
|
|
||||||
|
|
||||||
if current_frontmatter is None and self.frontmatter.dict == {}:
|
|
||||||
return
|
|
||||||
|
|
||||||
new_frontmatter = self.frontmatter.to_yaml(sort_keys=sort_keys)
|
|
||||||
new_frontmatter = f"---\n{new_frontmatter}---\n"
|
|
||||||
|
|
||||||
if current_frontmatter is None:
|
|
||||||
self.file_content = new_frontmatter + self.file_content
|
|
||||||
return
|
|
||||||
|
|
||||||
current_frontmatter = re.escape(current_frontmatter)
|
|
||||||
self.sub(current_frontmatter, new_frontmatter, is_regex=True)
|
|
||||||
|
|
||||||
def rename_inline_tag(self, tag_1: str, tag_2: str) -> bool:
|
def rename_inline_tag(self, tag_1: str, tag_2: str) -> bool:
|
||||||
"""Rename an inline tag from the note ONLY if it's not in the metadata as well.
|
"""Rename an inline tag from the note ONLY if it's not in the metadata as well.
|
||||||
|
|
||||||
@@ -351,14 +371,14 @@ class Note:
|
|||||||
changed_value: bool = False
|
changed_value: bool = False
|
||||||
if value_2 is None:
|
if value_2 is None:
|
||||||
if self.frontmatter.rename(key, value_1):
|
if self.frontmatter.rename(key, value_1):
|
||||||
self.replace_frontmatter()
|
self.update_frontmatter()
|
||||||
changed_value = True
|
changed_value = True
|
||||||
if self.inline_metadata.rename(key, value_1):
|
if self.inline_metadata.rename(key, value_1):
|
||||||
self._rename_inline_metadata(key, value_1)
|
self._rename_inline_metadata(key, value_1)
|
||||||
changed_value = True
|
changed_value = True
|
||||||
else:
|
else:
|
||||||
if self.frontmatter.rename(key, value_1, value_2):
|
if self.frontmatter.rename(key, value_1, value_2):
|
||||||
self.replace_frontmatter()
|
self.update_frontmatter()
|
||||||
changed_value = True
|
changed_value = True
|
||||||
if self.inline_metadata.rename(key, value_1, value_2):
|
if self.inline_metadata.rename(key, value_1, value_2):
|
||||||
self._rename_inline_metadata(key, value_1, value_2)
|
self._rename_inline_metadata(key, value_1, value_2)
|
||||||
@@ -382,6 +402,28 @@ class Note:
|
|||||||
|
|
||||||
self.file_content = re.sub(pattern, replacement, self.file_content, re.MULTILINE)
|
self.file_content = re.sub(pattern, replacement, self.file_content, re.MULTILINE)
|
||||||
|
|
||||||
|
def update_frontmatter(self, sort_keys: bool = False) -> None:
|
||||||
|
"""Replace the frontmatter in the note with the current frontmatter object."""
|
||||||
|
try:
|
||||||
|
current_frontmatter = PATTERNS.frontmatter_block.search(self.file_content).group(
|
||||||
|
"frontmatter"
|
||||||
|
)
|
||||||
|
except AttributeError:
|
||||||
|
current_frontmatter = None
|
||||||
|
|
||||||
|
if current_frontmatter is None and self.frontmatter.dict == {}:
|
||||||
|
return
|
||||||
|
|
||||||
|
new_frontmatter = self.frontmatter.to_yaml(sort_keys=sort_keys)
|
||||||
|
new_frontmatter = f"---\n{new_frontmatter}---\n"
|
||||||
|
|
||||||
|
if current_frontmatter is None:
|
||||||
|
self.file_content = new_frontmatter + self.file_content
|
||||||
|
return
|
||||||
|
|
||||||
|
current_frontmatter = re.escape(current_frontmatter)
|
||||||
|
self.sub(current_frontmatter, new_frontmatter, is_regex=True)
|
||||||
|
|
||||||
def write(self, path: Path = None) -> None:
|
def write(self, path: Path = None) -> None:
|
||||||
"""Write the note's content to disk.
|
"""Write the note's content to disk.
|
||||||
|
|
||||||
|
|||||||
@@ -22,23 +22,41 @@ class Patterns:
|
|||||||
find_inline_metadata: Pattern[str] = re.compile(
|
find_inline_metadata: Pattern[str] = re.compile(
|
||||||
r""" # First look for in-text key values
|
r""" # First look for in-text key values
|
||||||
(?:^\[| \[) # Find key with starting bracket
|
(?:^\[| \[) # Find key with starting bracket
|
||||||
([-_\w\d\/\*\u263a-\U0001f645]+?)::[ ]? # Find key
|
([-_\w\d\/\*\u263a-\U0001f999]+?)::[ ]? # Find key
|
||||||
(.*?)\] # Find value until closing bracket
|
(.*?)\] # Find value until closing bracket
|
||||||
| # Else look for key values at start of line
|
| # Else look for key values at start of line
|
||||||
(?:^|[^ \w\d]+| \[) # Any non-word or non-digit character
|
(?:^|[^ \w\d]+| \[) # Any non-word or non-digit character
|
||||||
([-_\w\d\/\*\u263a-\U0001f645]+?)::(?!\n)(?:[ ](?!\n))? # Capture the key if not a new line
|
([-_\w\d\/\*\u263a-\U0001f9995]+?)::(?!\n)(?:[ ](?!\n))? # Capture the key if not a new line
|
||||||
(.*?)$ # Capture the value
|
(.*?)$ # Capture the value
|
||||||
""",
|
""",
|
||||||
re.X | re.MULTILINE,
|
re.X | re.MULTILINE,
|
||||||
)
|
)
|
||||||
|
|
||||||
frontmatt_block_with_separators: Pattern[str] = re.compile(
|
frontmatter_block: Pattern[str] = re.compile(r"^\s*(?P<frontmatter>---.*?---)", flags=re.DOTALL)
|
||||||
r"^\s*(?P<frontmatter>---.*?---)", flags=re.DOTALL
|
frontmatt_block_strip_separators: Pattern[str] = re.compile(
|
||||||
)
|
|
||||||
frontmatt_block_no_separators: Pattern[str] = re.compile(
|
|
||||||
r"^\s*---(?P<frontmatter>.*?)---", flags=re.DOTALL
|
r"^\s*---(?P<frontmatter>.*?)---", flags=re.DOTALL
|
||||||
)
|
)
|
||||||
# This pattern will return a tuple of 4 values, two will be empty and will need to be stripped before processing further
|
# This pattern will return a tuple of 4 values, two will be empty and will need to be stripped before processing further
|
||||||
|
|
||||||
validate_key_text: Pattern[str] = re.compile(r"[^-_\w\d\/\*\u263a-\U0001f645]")
|
top_with_header: Pattern[str] = re.compile(
|
||||||
|
r"""^\s* # Start of note
|
||||||
|
(?P<top> # Capture the top of the note
|
||||||
|
(---.*?---)? # Frontmatter, if it exists
|
||||||
|
\s* # Any whitespace
|
||||||
|
( # Full header, if it exists
|
||||||
|
\#+[ ] # Match start of any header level
|
||||||
|
( # Text of header
|
||||||
|
[\w\d]+ # Word or digit
|
||||||
|
| # Or
|
||||||
|
[\[\]\(\)\+\{\}\"'\-\.\/\*\$\| ]+ # Special characters
|
||||||
|
| # Or
|
||||||
|
[\u263a-\U0001f999]+ # Emoji
|
||||||
|
)+ # End of header text
|
||||||
|
)? # End of full header
|
||||||
|
) # End capture group
|
||||||
|
""",
|
||||||
|
flags=re.DOTALL | re.X,
|
||||||
|
)
|
||||||
|
|
||||||
|
validate_key_text: Pattern[str] = re.compile(r"[^-_\w\d\/\*\u263a-\U0001f999]")
|
||||||
validate_tag_text: Pattern[str] = re.compile(r"[ \|,;:\*\(\)\[\]\\\.\n#&]")
|
validate_tag_text: Pattern[str] = re.compile(r"[ \|,;:\*\(\)\[\]\\\.\n#&]")
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from typing import Any
|
|||||||
import questionary
|
import questionary
|
||||||
import typer
|
import typer
|
||||||
|
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from obsidian_metadata.models.patterns import Patterns
|
from obsidian_metadata.models.patterns import Patterns
|
||||||
from obsidian_metadata.models.vault import Vault
|
from obsidian_metadata.models.vault import Vault
|
||||||
|
|
||||||
@@ -76,6 +76,7 @@ class Questions:
|
|||||||
("qmark", "bold"),
|
("qmark", "bold"),
|
||||||
("question", "bold"),
|
("question", "bold"),
|
||||||
("separator", "fg:#808080"),
|
("separator", "fg:#808080"),
|
||||||
|
("answer", "fg:#FF9D00 bold"),
|
||||||
("instruction", "fg:#808080"),
|
("instruction", "fg:#808080"),
|
||||||
("highlighted", "bold underline"),
|
("highlighted", "bold underline"),
|
||||||
("text", ""),
|
("text", ""),
|
||||||
@@ -405,6 +406,23 @@ class Questions:
|
|||||||
qmark="INPUT |",
|
qmark="INPUT |",
|
||||||
).ask()
|
).ask()
|
||||||
|
|
||||||
|
def ask_metadata_location(
|
||||||
|
self, question: str = "Where in a note should we add metadata"
|
||||||
|
) -> InsertLocation: # pragma: no cover
|
||||||
|
"""Ask the user for the location within a note to place new metadata.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
InsertLocation: The location within a note to place new metadata.
|
||||||
|
"""
|
||||||
|
choices = []
|
||||||
|
for metadata_location in InsertLocation:
|
||||||
|
choices.append({"name": metadata_location.value, "value": metadata_location})
|
||||||
|
|
||||||
|
return self.ask_selection(
|
||||||
|
choices=choices,
|
||||||
|
question="Select the location for the metadata",
|
||||||
|
)
|
||||||
|
|
||||||
def ask_new_key(self, question: str = "New key name") -> str: # pragma: no cover
|
def ask_new_key(self, question: str = "New key name") -> str: # pragma: no cover
|
||||||
"""Ask the user for a new metadata key.
|
"""Ask the user for a new metadata key.
|
||||||
|
|
||||||
|
|||||||
@@ -13,10 +13,10 @@ from rich.progress import Progress, SpinnerColumn, TextColumn
|
|||||||
from rich.prompt import Confirm
|
from rich.prompt import Confirm
|
||||||
from rich.table import Table
|
from rich.table import Table
|
||||||
|
|
||||||
from obsidian_metadata._config import VaultConfig
|
from obsidian_metadata._config.config import Config, VaultConfig
|
||||||
from obsidian_metadata._utils import alerts
|
from obsidian_metadata._utils import alerts
|
||||||
from obsidian_metadata._utils.alerts import logger as log
|
from obsidian_metadata._utils.alerts import logger as log
|
||||||
from obsidian_metadata.models import MetadataType, Note, VaultMetadata
|
from obsidian_metadata.models import InsertLocation, MetadataType, Note, VaultMetadata
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -46,8 +46,10 @@ class Vault:
|
|||||||
dry_run: bool = False,
|
dry_run: bool = False,
|
||||||
filters: list[VaultFilter] = [],
|
filters: list[VaultFilter] = [],
|
||||||
):
|
):
|
||||||
|
self.config = config.config
|
||||||
self.vault_path: Path = config.path
|
self.vault_path: Path = config.path
|
||||||
self.name = self.vault_path.name
|
self.name = self.vault_path.name
|
||||||
|
self.insert_location: InsertLocation = self._find_insert_location()
|
||||||
self.dry_run: bool = dry_run
|
self.dry_run: bool = dry_run
|
||||||
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
||||||
self.exclude_paths: list[Path] = []
|
self.exclude_paths: list[Path] = []
|
||||||
@@ -110,6 +112,21 @@ class Vault:
|
|||||||
|
|
||||||
return notes_list
|
return notes_list
|
||||||
|
|
||||||
|
def _find_insert_location(self) -> InsertLocation:
|
||||||
|
"""Find the insert location for a note.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
InsertLocation: Insert location for the note.
|
||||||
|
"""
|
||||||
|
if self.config["insert_location"].upper() == "TOP":
|
||||||
|
return InsertLocation.TOP
|
||||||
|
elif self.config["insert_location"].upper() == "HEADER":
|
||||||
|
return InsertLocation.AFTER_TITLE
|
||||||
|
elif self.config["insert_location"].upper() == "BOTTOM":
|
||||||
|
return InsertLocation.BOTTOM
|
||||||
|
else:
|
||||||
|
return InsertLocation.BOTTOM
|
||||||
|
|
||||||
def _find_markdown_notes(self) -> list[Path]:
|
def _find_markdown_notes(self) -> list[Path]:
|
||||||
"""Build list of all markdown files in the vault.
|
"""Build list of all markdown files in the vault.
|
||||||
|
|
||||||
@@ -145,21 +162,31 @@ class Vault:
|
|||||||
metadata=_note.inline_tags.list,
|
metadata=_note.inline_tags.list,
|
||||||
)
|
)
|
||||||
|
|
||||||
def add_metadata(self, area: MetadataType, key: str, value: str | list[str] = None) -> int:
|
def add_metadata(
|
||||||
"""Add metadata to all notes in the vault.
|
self,
|
||||||
|
area: MetadataType,
|
||||||
|
key: str,
|
||||||
|
value: str | list[str] = None,
|
||||||
|
location: InsertLocation = None,
|
||||||
|
) -> int:
|
||||||
|
"""Add metadata to all notes in the vault which do not already contain it.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
area (MetadataType): Area of metadata to add to.
|
area (MetadataType): Area of metadata to add to.
|
||||||
key (str): Key to add.
|
key (str): Key to add.
|
||||||
value (str|list, optional): Value to add.
|
value (str|list, optional): Value to add.
|
||||||
|
location (InsertLocation, optional): Location to insert metadata. (Defaults to `vault.config.insert_location`)
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
int: Number of notes updated.
|
int: Number of notes updated.
|
||||||
"""
|
"""
|
||||||
|
if location is None:
|
||||||
|
location = self.insert_location
|
||||||
|
|
||||||
num_changed = 0
|
num_changed = 0
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.add_metadata(area, key, value):
|
if _note.add_metadata(area, key, value, location):
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -258,91 +285,6 @@ class Vault:
|
|||||||
|
|
||||||
return num_changed
|
return num_changed
|
||||||
|
|
||||||
def get_changed_notes(self) -> list[Note]:
|
|
||||||
"""Returns a list of notes that have changes.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list[Note]: List of notes that have changes.
|
|
||||||
"""
|
|
||||||
changed_notes = []
|
|
||||||
for _note in self.notes_in_scope:
|
|
||||||
if _note.has_changes():
|
|
||||||
changed_notes.append(_note)
|
|
||||||
|
|
||||||
changed_notes = sorted(changed_notes, key=lambda x: x.note_path)
|
|
||||||
return changed_notes
|
|
||||||
|
|
||||||
def info(self) -> None:
|
|
||||||
"""Print information about the vault."""
|
|
||||||
table = Table(show_header=False)
|
|
||||||
table.add_row("Vault", str(self.vault_path))
|
|
||||||
if self.backup_path.exists():
|
|
||||||
table.add_row("Backup path", str(self.backup_path))
|
|
||||||
else:
|
|
||||||
table.add_row("Backup", "None")
|
|
||||||
table.add_row("Notes in scope", str(len(self.notes_in_scope)))
|
|
||||||
table.add_row("Notes excluded from scope", str(self.num_excluded_notes()))
|
|
||||||
table.add_row("Active filters", str(len(self.filters)))
|
|
||||||
table.add_row("Notes with changes", str(len(self.get_changed_notes())))
|
|
||||||
|
|
||||||
Console().print(table)
|
|
||||||
|
|
||||||
def list_editable_notes(self) -> None:
|
|
||||||
"""Print a list of notes within the scope that are being edited."""
|
|
||||||
table = Table(title="Notes in current scope", show_header=False, box=box.HORIZONTALS)
|
|
||||||
for _n, _note in enumerate(self.notes_in_scope, start=1):
|
|
||||||
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
|
||||||
Console().print(table)
|
|
||||||
|
|
||||||
def num_excluded_notes(self) -> int:
|
|
||||||
"""Count number of excluded notes."""
|
|
||||||
return len(self.all_notes) - len(self.notes_in_scope)
|
|
||||||
|
|
||||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
|
||||||
"""Renames a key or key-value pair in the note's metadata.
|
|
||||||
|
|
||||||
If no value is provided, will rename an entire key.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key (str): Key to rename.
|
|
||||||
value_1 (str): Value to rename or new name of key if no value_2 is provided.
|
|
||||||
value_2 (str, optional): New value.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: Number of notes that had metadata renamed.
|
|
||||||
"""
|
|
||||||
num_changed = 0
|
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
|
||||||
if _note.rename_metadata(key, value_1, value_2):
|
|
||||||
num_changed += 1
|
|
||||||
|
|
||||||
if num_changed > 0:
|
|
||||||
self._rebuild_vault_metadata()
|
|
||||||
|
|
||||||
return num_changed
|
|
||||||
|
|
||||||
def rename_inline_tag(self, old_tag: str, new_tag: str) -> int:
|
|
||||||
"""Rename an inline tag in the vault.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
old_tag (str): Old tag name.
|
|
||||||
new_tag (str): New tag name.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
int: Number of notes that had inline tags renamed.
|
|
||||||
"""
|
|
||||||
num_changed = 0
|
|
||||||
|
|
||||||
for _note in self.notes_in_scope:
|
|
||||||
if _note.rename_inline_tag(old_tag, new_tag):
|
|
||||||
num_changed += 1
|
|
||||||
|
|
||||||
if num_changed > 0:
|
|
||||||
self._rebuild_vault_metadata()
|
|
||||||
|
|
||||||
return num_changed
|
|
||||||
|
|
||||||
def export_metadata(self, path: str, format: str = "csv") -> None:
|
def export_metadata(self, path: str, format: str = "csv") -> None:
|
||||||
"""Write metadata to a csv file.
|
"""Write metadata to a csv file.
|
||||||
|
|
||||||
@@ -384,3 +326,88 @@ class Vault:
|
|||||||
|
|
||||||
with open(export_file, "w", encoding="UTF8") as f:
|
with open(export_file, "w", encoding="UTF8") as f:
|
||||||
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
||||||
|
|
||||||
|
def get_changed_notes(self) -> list[Note]:
|
||||||
|
"""Returns a list of notes that have changes.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
list[Note]: List of notes that have changes.
|
||||||
|
"""
|
||||||
|
changed_notes = []
|
||||||
|
for _note in self.notes_in_scope:
|
||||||
|
if _note.has_changes():
|
||||||
|
changed_notes.append(_note)
|
||||||
|
|
||||||
|
changed_notes = sorted(changed_notes, key=lambda x: x.note_path)
|
||||||
|
return changed_notes
|
||||||
|
|
||||||
|
def info(self) -> None:
|
||||||
|
"""Print information about the vault."""
|
||||||
|
table = Table(show_header=False)
|
||||||
|
table.add_row("Vault", str(self.vault_path))
|
||||||
|
if self.backup_path.exists():
|
||||||
|
table.add_row("Backup path", str(self.backup_path))
|
||||||
|
else:
|
||||||
|
table.add_row("Backup", "None")
|
||||||
|
table.add_row("Notes in scope", str(len(self.notes_in_scope)))
|
||||||
|
table.add_row("Notes excluded from scope", str(self.num_excluded_notes()))
|
||||||
|
table.add_row("Active filters", str(len(self.filters)))
|
||||||
|
table.add_row("Notes with changes", str(len(self.get_changed_notes())))
|
||||||
|
|
||||||
|
Console().print(table)
|
||||||
|
|
||||||
|
def list_editable_notes(self) -> None:
|
||||||
|
"""Print a list of notes within the scope that are being edited."""
|
||||||
|
table = Table(title="Notes in current scope", show_header=False, box=box.HORIZONTALS)
|
||||||
|
for _n, _note in enumerate(self.notes_in_scope, start=1):
|
||||||
|
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
||||||
|
Console().print(table)
|
||||||
|
|
||||||
|
def num_excluded_notes(self) -> int:
|
||||||
|
"""Count number of excluded notes."""
|
||||||
|
return len(self.all_notes) - len(self.notes_in_scope)
|
||||||
|
|
||||||
|
def rename_inline_tag(self, old_tag: str, new_tag: str) -> int:
|
||||||
|
"""Rename an inline tag in the vault.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
old_tag (str): Old tag name.
|
||||||
|
new_tag (str): New tag name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of notes that had inline tags renamed.
|
||||||
|
"""
|
||||||
|
num_changed = 0
|
||||||
|
|
||||||
|
for _note in self.notes_in_scope:
|
||||||
|
if _note.rename_inline_tag(old_tag, new_tag):
|
||||||
|
num_changed += 1
|
||||||
|
|
||||||
|
if num_changed > 0:
|
||||||
|
self._rebuild_vault_metadata()
|
||||||
|
|
||||||
|
return num_changed
|
||||||
|
|
||||||
|
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
||||||
|
"""Renames a key or key-value pair in the note's metadata.
|
||||||
|
|
||||||
|
If no value is provided, will rename an entire key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key (str): Key to rename.
|
||||||
|
value_1 (str): Value to rename or new name of key if no value_2 is provided.
|
||||||
|
value_2 (str, optional): New value.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of notes that had metadata renamed.
|
||||||
|
"""
|
||||||
|
num_changed = 0
|
||||||
|
|
||||||
|
for _note in self.notes_in_scope:
|
||||||
|
if _note.rename_metadata(key, value_1, value_2):
|
||||||
|
num_changed += 1
|
||||||
|
|
||||||
|
if num_changed > 0:
|
||||||
|
self._rebuild_vault_metadata()
|
||||||
|
|
||||||
|
return num_changed
|
||||||
|
|||||||
@@ -69,6 +69,33 @@ def test_add_metadata_frontmatter_success(test_application, mocker, capsys) -> N
|
|||||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_metadata_inline_success(test_application, mocker, capsys) -> None:
|
||||||
|
"""Test adding new metadata to the vault."""
|
||||||
|
app = test_application
|
||||||
|
app._load_vault()
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
|
side_effect=["add_metadata", KeyError],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_area",
|
||||||
|
return_value=MetadataType.INLINE,
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_new_key",
|
||||||
|
return_value="new_key",
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_new_value",
|
||||||
|
return_value="new_key_value",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
app.application_main()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||||
"""Test renaming an inline tag."""
|
"""Test renaming an inline tag."""
|
||||||
app = test_application
|
app = test_application
|
||||||
|
|||||||
@@ -49,6 +49,7 @@ def test_multiple_vaults_okay() -> None:
|
|||||||
assert config.config == {
|
assert config.config == {
|
||||||
"Sample Vault": {
|
"Sample Vault": {
|
||||||
"exclude_paths": [".git", ".obsidian", "ignore_folder"],
|
"exclude_paths": [".git", ".obsidian", "ignore_folder"],
|
||||||
|
"insert_location": "top",
|
||||||
"path": "tests/fixtures/sample_vault",
|
"path": "tests/fixtures/sample_vault",
|
||||||
},
|
},
|
||||||
"Test Vault": {
|
"Test Vault": {
|
||||||
@@ -74,6 +75,7 @@ def test_single_vault() -> None:
|
|||||||
"Test Vault": {
|
"Test Vault": {
|
||||||
"exclude_paths": [".git", ".obsidian", "ignore_folder"],
|
"exclude_paths": [".git", ".obsidian", "ignore_folder"],
|
||||||
"path": "tests/fixtures/test_vault",
|
"path": "tests/fixtures/test_vault",
|
||||||
|
"insert_location": "BOTTOM",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
assert len(config.vaults) == 1
|
assert len(config.vaults) == 1
|
||||||
@@ -104,7 +106,14 @@ def test_no_config_no_vault(tmp_path, mocker) -> None:
|
|||||||
path = "{str(fake_vault)}"
|
path = "{str(fake_vault)}"
|
||||||
|
|
||||||
# Folders within the vault to ignore when indexing metadata
|
# Folders within the vault to ignore when indexing metadata
|
||||||
exclude_paths = [".git", ".obsidian"]"""
|
exclude_paths = [".git", ".obsidian"]
|
||||||
|
|
||||||
|
# Location to add metadata. One of:
|
||||||
|
# TOP: Directly after frontmatter.
|
||||||
|
# AFTER_TITLE: After a header following frontmatter.
|
||||||
|
# BOTTOM: The bottom of the note
|
||||||
|
insert_location = "BOTTOM\"
|
||||||
|
"""
|
||||||
|
|
||||||
assert config_file.exists() is True
|
assert config_file.exists() is True
|
||||||
assert content == dedent(sample_config)
|
assert content == dedent(sample_config)
|
||||||
@@ -114,5 +123,6 @@ def test_no_config_no_vault(tmp_path, mocker) -> None:
|
|||||||
"Vault 1": {
|
"Vault 1": {
|
||||||
"path": str(fake_vault),
|
"path": str(fake_vault),
|
||||||
"exclude_paths": [".git", ".obsidian"],
|
"exclude_paths": [".git", ".obsidian"],
|
||||||
|
"insert_location": "BOTTOM",
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -37,6 +37,27 @@ def sample_note(tmp_path) -> Path:
|
|||||||
dest_file.unlink()
|
dest_file.unlink()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def short_note(tmp_path) -> Path:
|
||||||
|
"""Fixture which creates a temporary short note file."""
|
||||||
|
source_file1: Path = Path("tests/fixtures/short_textfile.md")
|
||||||
|
source_file2: Path = Path("tests/fixtures/no_metadata.md")
|
||||||
|
if not source_file1.exists():
|
||||||
|
raise FileNotFoundError(f"Original file not found: {source_file1}")
|
||||||
|
if not source_file2.exists():
|
||||||
|
raise FileNotFoundError(f"Original file not found: {source_file2}")
|
||||||
|
|
||||||
|
dest_file1: Path = Path(tmp_path / source_file1.name)
|
||||||
|
dest_file2: Path = Path(tmp_path / source_file2.name)
|
||||||
|
shutil.copy(source_file1, dest_file1)
|
||||||
|
shutil.copy(source_file2, dest_file2)
|
||||||
|
yield dest_file1, dest_file2
|
||||||
|
|
||||||
|
# after test - remove fixtures
|
||||||
|
dest_file1.unlink()
|
||||||
|
dest_file2.unlink()
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture()
|
@pytest.fixture()
|
||||||
def sample_vault(tmp_path) -> Path:
|
def sample_vault(tmp_path) -> Path:
|
||||||
"""Fixture which creates a sample vault."""
|
"""Fixture which creates a sample vault."""
|
||||||
|
|||||||
1
tests/fixtures/multiple_vaults.toml
vendored
1
tests/fixtures/multiple_vaults.toml
vendored
@@ -1,5 +1,6 @@
|
|||||||
["Sample Vault"]
|
["Sample Vault"]
|
||||||
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
insert_location = "top"
|
||||||
path = "tests/fixtures/sample_vault"
|
path = "tests/fixtures/sample_vault"
|
||||||
["Test Vault"]
|
["Test Vault"]
|
||||||
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
|||||||
1
tests/fixtures/no_metadata.md
vendored
Normal file
1
tests/fixtures/no_metadata.md
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
Lorem ipsum dolor sit amet.
|
||||||
@@ -1,4 +1,3 @@
|
|||||||
|
|
||||||
area:: frontmatter
|
area:: frontmatter
|
||||||
date_created:: 2022-12-22
|
date_created:: 2022-12-22
|
||||||
date_modified:: 2022-12-22
|
date_modified:: 2022-12-22
|
||||||
@@ -11,9 +10,12 @@ on_one_note:: one
|
|||||||
#food/fruit/pear
|
#food/fruit/pear
|
||||||
#dinner #lunch #breakfast
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
# note header
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
### header 3
|
||||||
|
|
||||||
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
# Header 1
|
||||||
|
|
||||||
area:: frontmatter
|
area:: frontmatter
|
||||||
date_created:: 2022-12-22
|
date_created:: 2022-12-22
|
||||||
@@ -10,9 +11,12 @@ type:: article
|
|||||||
#food/fruit/pear
|
#food/fruit/pear
|
||||||
#dinner #lunch #breakfast
|
#dinner #lunch #breakfast
|
||||||
|
|
||||||
|
## Header 2
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur?
|
||||||
|
|
||||||
|
### Header 3
|
||||||
|
|
||||||
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
@@ -24,13 +24,16 @@ type:: [[article]]
|
|||||||
tags:: from_inline_metadata
|
tags:: from_inline_metadata
|
||||||
**bold_key**:: **bold** key value
|
**bold_key**:: **bold** key value
|
||||||
|
|
||||||
|
# Note header
|
||||||
|
|
||||||
|
|
||||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.
|
||||||
|
|
||||||
|
## Header 2
|
||||||
|
|
||||||
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, [in_text_key:: in-text value] eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur? #inline_tag
|
Sed ut perspiciatis unde omnis iste natus error sit voluptatem accusantium doloremque laudantium, totam rem aperiam, [in_text_key:: in-text value] eaque ipsa quae ab illo inventore veritatis et quasi architecto beatae vitae dicta sunt explicabo. Nemo enim ipsam voluptatem quia voluptas sit aspernatur aut odit aut fugit, sed quia consequuntur magni dolores eos qui ratione voluptatem sequi nesciunt. Neque porro quisquam est, qui dolorem ipsum quia dolor sit amet, consectetur, adipisci velit, sed quia non numquam eius modi tempora incidunt ut labore et dolore magnam aliquam quaerat voluptatem. Ut enim ad minima veniam, quis nostrum exercitationem ullam corporis suscipit laboriosam, nisi ut aliquid ex ea commodi consequatur? Quis autem vel eum iure reprehenderit qui in ea voluptate velit esse quam nihil molestiae consequatur, vel illum qui dolorem eum fugiat quo voluptas nulla pariatur? #inline_tag
|
||||||
|
|
||||||
|
### header 3
|
||||||
|
|
||||||
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, #inline_tag2 cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
At vero eos et accusamus et iusto odio dignissimos ducimus qui blanditiis praesentium voluptatum deleniti atque corrupti quos dolores et quas molestias excepturi sint occaecati cupiditate non provident, similique sunt in culpa qui officia deserunt mollitia animi, id est laborum et dolorum fuga. Et harum quidem rerum facilis est et expedita distinctio. Nam libero tempore, #inline_tag2 cum soluta nobis est eligendi optio cumque nihil impedit quo minus id quod maxime placeat facere possimus, omnis voluptas assumenda est, omnis dolor repellendus. Temporibus autem quibusdam et aut officiis debitis aut rerum necessitatibus saepe eveniet ut et voluptates repudiandae sint et molestiae non recusandae. Itaque earum rerum hic tenetur a sapiente delectus, ut aut reiciendis voluptatibus maiores alias consequatur aut perferendis doloribus asperiores repellat.
|
||||||
|
|
||||||
#food/fruit/pear
|
#food/fruit/pear
|
||||||
|
|||||||
7
tests/fixtures/short_textfile.md
vendored
Normal file
7
tests/fixtures/short_textfile.md
vendored
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
1
tests/fixtures/test_vault_config.toml
vendored
1
tests/fixtures/test_vault_config.toml
vendored
@@ -1,3 +1,4 @@
|
|||||||
["Test Vault"]
|
["Test Vault"]
|
||||||
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
insert_location = "BOTTOM"
|
||||||
path = "tests/fixtures/test_vault"
|
path = "tests/fixtures/test_vault"
|
||||||
|
|||||||
@@ -517,6 +517,86 @@ def test_inline_contains() -> None:
|
|||||||
assert inline.contains("key", r"^\d_value", is_regex=True) is False
|
assert inline.contains("key", r"^\d_value", is_regex=True) is False
|
||||||
|
|
||||||
|
|
||||||
|
def test_inline_add() -> None:
|
||||||
|
"""Test inline add."""
|
||||||
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
|
||||||
|
assert inline.add("bold_key1") is False
|
||||||
|
assert inline.add("bold_key1", "bold_key1_value") is False
|
||||||
|
assert inline.add("added_key") is True
|
||||||
|
assert inline.dict == {
|
||||||
|
"added_key": [],
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert inline.add("added_key1", "added_value") is True
|
||||||
|
assert inline.dict == {
|
||||||
|
"added_key": [],
|
||||||
|
"added_key1": ["added_value"],
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
with pytest.raises(ValueError):
|
||||||
|
assert inline.add("added_key1", "added_value_2") is True
|
||||||
|
|
||||||
|
assert inline.dict == {
|
||||||
|
"added_key": [],
|
||||||
|
"added_key1": ["added_value"],
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert inline.add("added_key2", ["added_value_1", "added_value_2"]) is True
|
||||||
|
assert inline.dict == {
|
||||||
|
"added_key": [],
|
||||||
|
"added_key1": ["added_value"],
|
||||||
|
"added_key2": ["added_value_1"],
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
assert inline.add("added_key", "added_value")
|
||||||
|
assert inline.dict == {
|
||||||
|
"added_key": ["added_value"],
|
||||||
|
"added_key1": ["added_value"],
|
||||||
|
"added_key2": ["added_value_1"],
|
||||||
|
"bold_key1": ["bold_key1_value"],
|
||||||
|
"bold_key2": ["bold_key2_value"],
|
||||||
|
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||||
|
"in_text_key1": ["in_text_key1_value"],
|
||||||
|
"in_text_key2": ["in_text_key2_value"],
|
||||||
|
"link_key": ["link_key_value"],
|
||||||
|
"repeated_key": ["repeated_key_value1", "repeated_key_value2"],
|
||||||
|
"tag_key": ["tag_key_value"],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_inline_metadata_rename() -> None:
|
def test_inline_metadata_rename() -> None:
|
||||||
"""Test inline metadata rename."""
|
"""Test inline metadata rename."""
|
||||||
inline = InlineMetadata(INLINE_CONTENT)
|
inline = InlineMetadata(INLINE_CONTENT)
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from pathlib import Path
|
|||||||
import pytest
|
import pytest
|
||||||
import typer
|
import typer
|
||||||
|
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from obsidian_metadata.models.notes import Note
|
from obsidian_metadata.models.notes import Note
|
||||||
from tests.helpers import Regex
|
from tests.helpers import Regex
|
||||||
|
|
||||||
@@ -74,36 +74,33 @@ def test_note_create(sample_note) -> None:
|
|||||||
assert note.original_file_content == content
|
assert note.original_file_content == content
|
||||||
|
|
||||||
|
|
||||||
def test_append(sample_note) -> None:
|
def test_add_metadata_inline(short_note) -> None:
|
||||||
"""Test appending to note."""
|
"""Test adding metadata."""
|
||||||
note = Note(note_path=sample_note)
|
path1, path2 = short_note
|
||||||
assert note.dry_run is False
|
note = Note(note_path=path1)
|
||||||
|
|
||||||
string = "This is a test string."
|
assert note.inline_metadata.dict == {}
|
||||||
string2 = "Lorem ipsum dolor sit"
|
assert (
|
||||||
|
note.add_metadata(MetadataType.INLINE, location=InsertLocation.BOTTOM, key="new_key1")
|
||||||
|
is True
|
||||||
|
)
|
||||||
|
assert note.inline_metadata.dict == {"new_key1": []}
|
||||||
|
assert "new_key1::" in note.file_content.strip()
|
||||||
|
|
||||||
note.append(string_to_append=string)
|
assert (
|
||||||
assert string in note.file_content
|
note.add_metadata(MetadataType.INLINE, key="new_key1", location=InsertLocation.BOTTOM)
|
||||||
assert len(re.findall(re.escape(string), note.file_content)) == 1
|
is False
|
||||||
|
)
|
||||||
note.append(string_to_append=string)
|
assert (
|
||||||
assert string in note.file_content
|
note.add_metadata(
|
||||||
assert len(re.findall(re.escape(string), note.file_content)) == 1
|
MetadataType.INLINE, key="new_key2", value="new_value1", location=InsertLocation.TOP
|
||||||
|
)
|
||||||
note.append(string_to_append=string, allow_multiple=True)
|
is True
|
||||||
assert string in note.file_content
|
)
|
||||||
assert len(re.findall(re.escape(string), note.file_content)) == 2
|
assert "new_key2:: new_value1" in note.file_content
|
||||||
|
|
||||||
note.append(string_to_append=string2)
|
|
||||||
assert string2 in note.file_content
|
|
||||||
assert len(re.findall(re.escape(string2), note.file_content)) == 1
|
|
||||||
|
|
||||||
note.append(string_to_append=string2, allow_multiple=True)
|
|
||||||
assert string2 in note.file_content
|
|
||||||
assert len(re.findall(re.escape(string2), note.file_content)) == 2
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_metadata(sample_note) -> None:
|
def test_add_metadata_frontmatter(sample_note) -> None:
|
||||||
"""Test adding metadata."""
|
"""Test adding metadata."""
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
assert note.add_metadata(MetadataType.FRONTMATTER, "frontmatter_Key1") is False
|
assert note.add_metadata(MetadataType.FRONTMATTER, "frontmatter_Key1") is False
|
||||||
@@ -240,7 +237,7 @@ def test_has_changes(sample_note) -> None:
|
|||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
assert note.has_changes() is False
|
assert note.has_changes() is False
|
||||||
note.append("This is a test string.")
|
note.insert("This is a test string.", location=InsertLocation.BOTTOM)
|
||||||
assert note.has_changes() is True
|
assert note.has_changes() is True
|
||||||
|
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
@@ -259,6 +256,146 @@ def test_has_changes(sample_note) -> None:
|
|||||||
assert note.has_changes() is True
|
assert note.has_changes() is True
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_bottom(short_note) -> None:
|
||||||
|
"""Test inserting metadata to bottom of note."""
|
||||||
|
path1, path2 = short_note
|
||||||
|
note = Note(note_path=str(path1))
|
||||||
|
note2 = Note(note_path=str(path2))
|
||||||
|
|
||||||
|
string1 = "This is a test string."
|
||||||
|
string2 = "This is"
|
||||||
|
|
||||||
|
correct_content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
|
This is a test string.
|
||||||
|
"""
|
||||||
|
correct_content2 = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
|
This is a test string.
|
||||||
|
This is
|
||||||
|
"""
|
||||||
|
correct_content3 = """
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
|
This is a test string.
|
||||||
|
"""
|
||||||
|
note.insert(new_string=string1, location=InsertLocation.BOTTOM)
|
||||||
|
assert note.file_content == correct_content.strip()
|
||||||
|
|
||||||
|
note.insert(new_string=string2, location=InsertLocation.BOTTOM)
|
||||||
|
assert note.file_content == correct_content.strip()
|
||||||
|
|
||||||
|
note.insert(new_string=string2, allow_multiple=True, location=InsertLocation.BOTTOM)
|
||||||
|
assert note.file_content == correct_content2.strip()
|
||||||
|
|
||||||
|
note2.insert(new_string=string1, location=InsertLocation.BOTTOM)
|
||||||
|
assert note2.file_content == correct_content3.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_after_frontmatter(short_note) -> None:
|
||||||
|
"""Test inserting metadata to bottom of note."""
|
||||||
|
path1, path2 = short_note
|
||||||
|
note = Note(note_path=path1)
|
||||||
|
note2 = Note(note_path=path2)
|
||||||
|
|
||||||
|
string1 = "This is a test string."
|
||||||
|
string2 = "This is"
|
||||||
|
correct_content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
This is a test string.
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
correct_content2 = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
This is
|
||||||
|
This is a test string.
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
correct_content3 = """
|
||||||
|
This is a test string.
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
note.insert(new_string=string1, location=InsertLocation.TOP)
|
||||||
|
assert note.file_content.strip() == correct_content.strip()
|
||||||
|
|
||||||
|
note.insert(new_string=string2, allow_multiple=True, location=InsertLocation.TOP)
|
||||||
|
assert note.file_content.strip() == correct_content2.strip()
|
||||||
|
|
||||||
|
note2.insert(new_string=string1, location=InsertLocation.TOP)
|
||||||
|
assert note2.file_content.strip() == correct_content3.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def test_insert_after_title(short_note) -> None:
|
||||||
|
"""Test inserting metadata to bottom of note."""
|
||||||
|
path1, path2 = short_note
|
||||||
|
note = Note(note_path=path1)
|
||||||
|
note2 = Note(note_path=path2)
|
||||||
|
|
||||||
|
string1 = "This is a test string."
|
||||||
|
string2 = "This is"
|
||||||
|
correct_content = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
This is a test string.
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
correct_content2 = """
|
||||||
|
---
|
||||||
|
key: value
|
||||||
|
---
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
This is
|
||||||
|
This is a test string.
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
correct_content3 = """
|
||||||
|
This is a test string.
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
|
||||||
|
note.insert(new_string=string1, location=InsertLocation.AFTER_TITLE)
|
||||||
|
assert note.file_content.strip() == correct_content.strip()
|
||||||
|
|
||||||
|
note.insert(new_string=string2, allow_multiple=True, location=InsertLocation.AFTER_TITLE)
|
||||||
|
assert note.file_content.strip() == correct_content2.strip()
|
||||||
|
|
||||||
|
note2.insert(new_string=string1, location=InsertLocation.AFTER_TITLE)
|
||||||
|
assert note2.file_content.strip() == correct_content3.strip()
|
||||||
|
|
||||||
|
|
||||||
def test_print_note(sample_note, capsys) -> None:
|
def test_print_note(sample_note, capsys) -> None:
|
||||||
"""Test printing note."""
|
"""Test printing note."""
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
@@ -273,7 +410,7 @@ def test_print_diff(sample_note, capsys) -> None:
|
|||||||
"""Test printing diff."""
|
"""Test printing diff."""
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
note.append("This is a test string.")
|
note.insert("This is a test string.", location=InsertLocation.BOTTOM)
|
||||||
note.print_diff()
|
note.print_diff()
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert "+ This is a test string." in captured.out
|
assert "+ This is a test string." in captured.out
|
||||||
@@ -362,12 +499,12 @@ def test_rename_metadata(sample_note) -> None:
|
|||||||
assert note.file_content == Regex(r"new_key:: new_value")
|
assert note.file_content == Regex(r"new_key:: new_value")
|
||||||
|
|
||||||
|
|
||||||
def test_replace_frontmatter(sample_note) -> None:
|
def test_update_frontmatter(sample_note) -> None:
|
||||||
"""Test replacing frontmatter."""
|
"""Test replacing frontmatter."""
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
|
|
||||||
note.rename_metadata("frontmatter_Key1", "author name", "some_new_key_here")
|
note.rename_metadata("frontmatter_Key1", "author name", "some_new_key_here")
|
||||||
note.replace_frontmatter()
|
note.update_frontmatter()
|
||||||
new_frontmatter = """---
|
new_frontmatter = """---
|
||||||
date_created: '2022-12-22'
|
date_created: '2022-12-22'
|
||||||
tags:
|
tags:
|
||||||
@@ -387,9 +524,9 @@ shared_key2: shared_key2_value1
|
|||||||
assert "```python" in note.file_content
|
assert "```python" in note.file_content
|
||||||
|
|
||||||
note2 = Note(note_path="tests/fixtures/test_vault/no_metadata.md")
|
note2 = Note(note_path="tests/fixtures/test_vault/no_metadata.md")
|
||||||
note2.replace_frontmatter()
|
note2.update_frontmatter()
|
||||||
note2.frontmatter.dict = {"key1": "value1", "key2": "value2"}
|
note2.frontmatter.dict = {"key1": "value1", "key2": "value2"}
|
||||||
note2.replace_frontmatter()
|
note2.update_frontmatter()
|
||||||
new_frontmatter = """---
|
new_frontmatter = """---
|
||||||
key1: value1
|
key1: value1
|
||||||
key2: value2
|
key2: value2
|
||||||
|
|||||||
@@ -56,10 +56,69 @@ shared_key1: 'shared_key1_value'
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
||||||
def test_regex():
|
def test_top_with_header():
|
||||||
"""Test regexes."""
|
"""Test identifying the top of a note."""
|
||||||
pattern = Patterns()
|
pattern = Patterns()
|
||||||
|
|
||||||
|
no_fm_or_header = """
|
||||||
|
|
||||||
|
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
|
||||||
|
# header 1
|
||||||
|
---
|
||||||
|
horizontal: rule
|
||||||
|
---
|
||||||
|
Lorem ipsum dolor sit amet.
|
||||||
|
"""
|
||||||
|
fm_and_header: str = """
|
||||||
|
---
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: 'shared_key1_value'
|
||||||
|
---
|
||||||
|
|
||||||
|
# Header 1
|
||||||
|
more content
|
||||||
|
|
||||||
|
---
|
||||||
|
horizontal: rule
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
fm_and_header_result = """---
|
||||||
|
tags:
|
||||||
|
- tag_1
|
||||||
|
- tag_2
|
||||||
|
-
|
||||||
|
- 📅/tag_3
|
||||||
|
frontmatter_Key1: "frontmatter_Key1_value"
|
||||||
|
frontmatter_Key2: ["note", "article"]
|
||||||
|
shared_key1: 'shared_key1_value'
|
||||||
|
---
|
||||||
|
|
||||||
|
# Header 1"""
|
||||||
|
no_fm = """
|
||||||
|
|
||||||
|
### Header's number 3 [📅] "+$2.00" 🤷
|
||||||
|
---
|
||||||
|
horizontal: rule
|
||||||
|
---
|
||||||
|
"""
|
||||||
|
no_fm_result = '### Header\'s number 3 [📅] "+$2.00" 🤷'
|
||||||
|
|
||||||
|
assert pattern.top_with_header.search(no_fm_or_header).group("top") == ""
|
||||||
|
assert pattern.top_with_header.search(fm_and_header).group("top") == fm_and_header_result
|
||||||
|
assert pattern.top_with_header.search(no_fm).group("top") == no_fm_result
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_inline_tags():
|
||||||
|
"""Test find_inline_tags regex."""
|
||||||
|
pattern = Patterns()
|
||||||
assert pattern.find_inline_tags.findall(TAG_CONTENT) == [
|
assert pattern.find_inline_tags.findall(TAG_CONTENT) == [
|
||||||
"1",
|
"1",
|
||||||
"2",
|
"2",
|
||||||
@@ -87,6 +146,11 @@ def test_regex():
|
|||||||
"📅/tag",
|
"📅/tag",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def test_find_inline_metadata():
|
||||||
|
"""Test find_inline_metadata regex."""
|
||||||
|
pattern = Patterns()
|
||||||
|
|
||||||
result = pattern.find_inline_metadata.findall(INLINE_METADATA)
|
result = pattern.find_inline_metadata.findall(INLINE_METADATA)
|
||||||
assert result == [
|
assert result == [
|
||||||
("", "", "1", "1**"),
|
("", "", "1", "1**"),
|
||||||
@@ -99,14 +163,26 @@ def test_regex():
|
|||||||
("", "", "emoji_📅_key", "📅emoji_📅_key_value"),
|
("", "", "emoji_📅_key", "📅emoji_📅_key_value"),
|
||||||
]
|
]
|
||||||
|
|
||||||
found = pattern.frontmatt_block_with_separators.search(FRONTMATTER_CONTENT).group("frontmatter")
|
|
||||||
|
def test_find_frontmatter():
|
||||||
|
"""Test regexes."""
|
||||||
|
pattern = Patterns()
|
||||||
|
found = pattern.frontmatter_block.search(FRONTMATTER_CONTENT).group("frontmatter")
|
||||||
assert found == CORRECT_FRONTMATTER_WITH_SEPARATORS
|
assert found == CORRECT_FRONTMATTER_WITH_SEPARATORS
|
||||||
|
|
||||||
found = pattern.frontmatt_block_no_separators.search(FRONTMATTER_CONTENT).group("frontmatter")
|
found = pattern.frontmatt_block_strip_separators.search(FRONTMATTER_CONTENT).group(
|
||||||
|
"frontmatter"
|
||||||
|
)
|
||||||
assert found == CORRECT_FRONTMATTER_NO_SEPARATORS
|
assert found == CORRECT_FRONTMATTER_NO_SEPARATORS
|
||||||
|
|
||||||
with pytest.raises(AttributeError):
|
with pytest.raises(AttributeError):
|
||||||
pattern.frontmatt_block_no_separators.search(TAG_CONTENT).group("frontmatter")
|
pattern.frontmatt_block_strip_separators.search(TAG_CONTENT).group("frontmatter")
|
||||||
|
|
||||||
|
|
||||||
|
def test_validators():
|
||||||
|
"""Test validators."""
|
||||||
|
pattern = Patterns()
|
||||||
|
|
||||||
assert pattern.validate_tag_text.search("test_tag") is None
|
assert pattern.validate_tag_text.search("test_tag") is None
|
||||||
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
||||||
|
assert pattern.validate_tag_text.search("#asdf").group(0) == "#"
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from pathlib import Path
|
|||||||
|
|
||||||
from obsidian_metadata._config import Config
|
from obsidian_metadata._config import Config
|
||||||
from obsidian_metadata.models import Vault, VaultFilter
|
from obsidian_metadata.models import Vault, VaultFilter
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from tests.helpers import Regex
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
|
||||||
@@ -18,6 +18,7 @@ def test_vault_creation(test_vault):
|
|||||||
|
|
||||||
assert vault.name == "vault"
|
assert vault.name == "vault"
|
||||||
assert vault.vault_path == vault_path
|
assert vault.vault_path == vault_path
|
||||||
|
assert vault.insert_location == InsertLocation.BOTTOM
|
||||||
assert vault.backup_path == Path(f"{vault_path}.bak")
|
assert vault.backup_path == Path(f"{vault_path}.bak")
|
||||||
assert vault.dry_run is False
|
assert vault.dry_run is False
|
||||||
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
||||||
@@ -90,140 +91,6 @@ def test_vault_creation(test_vault):
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_get_filtered_notes(sample_vault) -> None:
|
|
||||||
"""Test filtering notes."""
|
|
||||||
vault_path = sample_vault
|
|
||||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
|
|
||||||
filters = [VaultFilter(path_filter="front")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 4
|
|
||||||
|
|
||||||
filters = [VaultFilter(path_filter="mixed")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 1
|
|
||||||
|
|
||||||
filters = [VaultFilter(key_filter="on_one_note")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 1
|
|
||||||
|
|
||||||
filters = [VaultFilter(key_filter="type", value_filter="book")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 10
|
|
||||||
|
|
||||||
filters = [VaultFilter(tag_filter="brunch")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 1
|
|
||||||
|
|
||||||
filters = [VaultFilter(tag_filter="brunch"), VaultFilter(path_filter="inbox")]
|
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
|
||||||
assert len(vault.all_notes) == 13
|
|
||||||
assert len(vault.notes_in_scope) == 0
|
|
||||||
|
|
||||||
|
|
||||||
def test_backup(test_vault, capsys):
|
|
||||||
"""Test backing up the vault."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
vault.backup()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert Path(f"{vault_path}.bak").exists() is True
|
|
||||||
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
|
||||||
|
|
||||||
vault.info()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
|
||||||
|
|
||||||
|
|
||||||
def test_backup_dryrun(test_vault, capsys):
|
|
||||||
"""Test backing up the vault."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config, dry_run=True)
|
|
||||||
|
|
||||||
print(f"vault.dry_run: {vault.dry_run}")
|
|
||||||
vault.backup()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert vault.backup_path.exists() is False
|
|
||||||
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_backup(test_vault, capsys):
|
|
||||||
"""Test deleting the vault backup."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
vault.backup()
|
|
||||||
vault.delete_backup()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex(r"Backup deleted")
|
|
||||||
assert vault.backup_path.exists() is False
|
|
||||||
|
|
||||||
vault.info()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex(r"Backup +\│ None")
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_backup_dryrun(test_vault, capsys):
|
|
||||||
"""Test deleting the vault backup."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config, dry_run=True)
|
|
||||||
|
|
||||||
Path.mkdir(vault.backup_path)
|
|
||||||
vault.delete_backup()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex(r"DRYRUN +| Delete backup")
|
|
||||||
assert vault.backup_path.exists() is True
|
|
||||||
|
|
||||||
|
|
||||||
def test_info(test_vault, capsys):
|
|
||||||
"""Test printing vault information."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
vault.info()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex(r"Vault +\│ /[\d\w]+")
|
|
||||||
assert captured.out == Regex(r"Notes in scope +\│ \d+")
|
|
||||||
assert captured.out == Regex(r"Backup +\│ None")
|
|
||||||
|
|
||||||
|
|
||||||
def test_list_editable_notes(test_vault, capsys) -> None:
|
|
||||||
"""Test listing editable notes."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
vault.list_editable_notes()
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
assert captured.out == Regex("Notes in current scope")
|
|
||||||
assert captured.out == Regex(r"\d +test1\.md")
|
|
||||||
|
|
||||||
|
|
||||||
def test_add_metadata(test_vault) -> None:
|
def test_add_metadata(test_vault) -> None:
|
||||||
"""Test adding metadata to the vault."""
|
"""Test adding metadata to the vault."""
|
||||||
vault_path = test_vault
|
vault_path = test_vault
|
||||||
@@ -327,6 +194,103 @@ def test_add_metadata(test_vault) -> None:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_backup(test_vault, capsys):
|
||||||
|
"""Test backing up the vault."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
|
||||||
|
vault.backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert Path(f"{vault_path}.bak").exists() is True
|
||||||
|
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
||||||
|
|
||||||
|
|
||||||
|
def test_commit(test_vault, tmp_path):
|
||||||
|
"""Test committing changes to content in the vault."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
|
||||||
|
def test_commit_dry_run(test_vault, tmp_path):
|
||||||
|
"""Test committing changes to content in the vault in dry run mode."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config, dry_run=True)
|
||||||
|
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
|
||||||
|
def test_backup_dryrun(test_vault, capsys):
|
||||||
|
"""Test backing up the vault."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config, dry_run=True)
|
||||||
|
|
||||||
|
print(f"vault.dry_run: {vault.dry_run}")
|
||||||
|
vault.backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert vault.backup_path.exists() is False
|
||||||
|
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_backup(test_vault, capsys):
|
||||||
|
"""Test deleting the vault backup."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
|
||||||
|
vault.backup()
|
||||||
|
vault.delete_backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup deleted")
|
||||||
|
assert vault.backup_path.exists() is False
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_backup_dryrun(test_vault, capsys):
|
||||||
|
"""Test deleting the vault backup."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config, dry_run=True)
|
||||||
|
|
||||||
|
Path.mkdir(vault.backup_path)
|
||||||
|
vault.delete_backup()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"DRYRUN +| Delete backup")
|
||||||
|
assert vault.backup_path.exists() is True
|
||||||
|
|
||||||
|
|
||||||
def test_delete_inline_tag(test_vault) -> None:
|
def test_delete_inline_tag(test_vault) -> None:
|
||||||
"""Test deleting an inline tag."""
|
"""Test deleting an inline tag."""
|
||||||
vault_path = test_vault
|
vault_path = test_vault
|
||||||
@@ -364,6 +328,97 @@ def test_delete_metadata(test_vault) -> None:
|
|||||||
assert "top_key2" not in vault.metadata.dict
|
assert "top_key2" not in vault.metadata.dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_csv(tmp_path, test_vault):
|
||||||
|
"""Test exporting the vault to a CSV file."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
export_file = Path(f"{tmp_path}/export.csv")
|
||||||
|
|
||||||
|
vault.export_metadata(path=export_file, format="csv")
|
||||||
|
assert export_file.exists() is True
|
||||||
|
assert "frontmatter,date_created,2022-12-22" in export_file.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_json(tmp_path, test_vault):
|
||||||
|
"""Test exporting the vault to a CSV file."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
export_file = Path(f"{tmp_path}/export.json")
|
||||||
|
|
||||||
|
vault.export_metadata(path=export_file, format="json")
|
||||||
|
assert export_file.exists() is True
|
||||||
|
assert '"frontmatter": {' in export_file.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes(sample_vault) -> None:
|
||||||
|
"""Test filtering notes."""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
|
||||||
|
filters = [VaultFilter(path_filter="front")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 4
|
||||||
|
|
||||||
|
filters = [VaultFilter(path_filter="mixed")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
filters = [VaultFilter(key_filter="on_one_note")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
filters = [VaultFilter(key_filter="type", value_filter="book")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 10
|
||||||
|
|
||||||
|
filters = [VaultFilter(tag_filter="brunch")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
filters = [VaultFilter(tag_filter="brunch"), VaultFilter(path_filter="inbox")]
|
||||||
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
|
assert len(vault.all_notes) == 13
|
||||||
|
assert len(vault.notes_in_scope) == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_info(test_vault, capsys):
|
||||||
|
"""Test printing vault information."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
|
||||||
|
vault.info()
|
||||||
|
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex(r"Vault +\│ /[\d\w]+")
|
||||||
|
assert captured.out == Regex(r"Notes in scope +\│ \d+")
|
||||||
|
assert captured.out == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
|
def test_list_editable_notes(test_vault, capsys) -> None:
|
||||||
|
"""Test listing editable notes."""
|
||||||
|
vault_path = test_vault
|
||||||
|
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
vault = Vault(config=vault_config)
|
||||||
|
|
||||||
|
vault.list_editable_notes()
|
||||||
|
captured = capsys.readouterr()
|
||||||
|
assert captured.out == Regex("Notes in current scope")
|
||||||
|
assert captured.out == Regex(r"\d +test1\.md")
|
||||||
|
|
||||||
|
|
||||||
def test_rename_inline_tag(test_vault) -> None:
|
def test_rename_inline_tag(test_vault) -> None:
|
||||||
"""Test renaming an inline tag."""
|
"""Test renaming an inline tag."""
|
||||||
vault_path = test_vault
|
vault_path = test_vault
|
||||||
|
|||||||
Reference in New Issue
Block a user