mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-17 09:23:40 -05:00
feat: move inline metadata to specific location in note (#27)
This commit is contained in:
@@ -12,7 +12,7 @@ from rich.table import Table
|
|||||||
from obsidian_metadata._config import VaultConfig
|
from obsidian_metadata._config import VaultConfig
|
||||||
from obsidian_metadata._utils import alerts
|
from obsidian_metadata._utils import alerts
|
||||||
from obsidian_metadata._utils.console import console
|
from obsidian_metadata._utils.console import console
|
||||||
from obsidian_metadata.models import Vault, VaultFilter
|
from obsidian_metadata.models import InsertLocation, Vault, VaultFilter
|
||||||
from obsidian_metadata.models.enums import MetadataType
|
from obsidian_metadata.models.enums import MetadataType
|
||||||
from obsidian_metadata.models.questions import Questions
|
from obsidian_metadata.models.questions import Questions
|
||||||
|
|
||||||
@@ -63,8 +63,8 @@ class Application:
|
|||||||
self.application_rename_metadata()
|
self.application_rename_metadata()
|
||||||
case "delete_metadata":
|
case "delete_metadata":
|
||||||
self.application_delete_metadata()
|
self.application_delete_metadata()
|
||||||
case "transpose_metadata":
|
case "reorganize_metadata":
|
||||||
self.application_transpose_metadata()
|
self.application_reorganize_metadata()
|
||||||
case "review_changes":
|
case "review_changes":
|
||||||
self.review_changes()
|
self.review_changes()
|
||||||
case "commit_changes":
|
case "commit_changes":
|
||||||
@@ -332,11 +332,23 @@ class Application:
|
|||||||
case _:
|
case _:
|
||||||
return
|
return
|
||||||
|
|
||||||
def application_transpose_metadata(self) -> None:
|
def application_reorganize_metadata(self) -> None:
|
||||||
"""Transpose metadata."""
|
"""Reorganize metadata.
|
||||||
alerts.usage("Move metadata between types. i.e. from frontmatter to inline or vice versa.")
|
|
||||||
|
This portion of the application deals with moving metadata between types (inline to frontmatter, etc.) and moving the location of inline metadata within a note.
|
||||||
|
|
||||||
|
"""
|
||||||
|
alerts.usage("Move metadata within notes.")
|
||||||
|
alerts.usage(" 1. Transpose frontmatter to inline or vice versa.")
|
||||||
|
alerts.usage(" 2. Move the location of inline metadata within a note.")
|
||||||
|
|
||||||
choices = [
|
choices = [
|
||||||
|
{"name": "Move inline metadata to top of note", "value": "move_to_top"},
|
||||||
|
{
|
||||||
|
"name": "Move inline metadata beneath the first header",
|
||||||
|
"value": "move_to_after_header",
|
||||||
|
},
|
||||||
|
{"name": "Move inline metadata to bottom of the note", "value": "move_to_bottom"},
|
||||||
{"name": "Transpose frontmatter to inline", "value": "frontmatter_to_inline"},
|
{"name": "Transpose frontmatter to inline", "value": "frontmatter_to_inline"},
|
||||||
{"name": "Transpose inline to frontmatter", "value": "inline_to_frontmatter"},
|
{"name": "Transpose inline to frontmatter", "value": "inline_to_frontmatter"},
|
||||||
questionary.Separator(),
|
questionary.Separator(),
|
||||||
@@ -349,6 +361,12 @@ class Application:
|
|||||||
self.transpose_metadata(begin=MetadataType.FRONTMATTER, end=MetadataType.INLINE)
|
self.transpose_metadata(begin=MetadataType.FRONTMATTER, end=MetadataType.INLINE)
|
||||||
case "inline_to_frontmatter":
|
case "inline_to_frontmatter":
|
||||||
self.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER)
|
self.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER)
|
||||||
|
case "move_to_top":
|
||||||
|
self.move_inline_metadata(location=InsertLocation.TOP)
|
||||||
|
case "move_to_after_header":
|
||||||
|
self.move_inline_metadata(location=InsertLocation.AFTER_TITLE)
|
||||||
|
case "move_to_bottom":
|
||||||
|
self.move_inline_metadata(location=InsertLocation.BOTTOM)
|
||||||
case _: # pragma: no cover
|
case _: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
|
||||||
@@ -453,6 +471,15 @@ class Application:
|
|||||||
|
|
||||||
return
|
return
|
||||||
|
|
||||||
|
def move_inline_metadata(self, location: InsertLocation) -> None:
|
||||||
|
"""Move inline metadata to the selected location."""
|
||||||
|
num_changed = self.vault.move_inline_metadata(location)
|
||||||
|
if num_changed == 0:
|
||||||
|
alerts.warning("No notes were changed")
|
||||||
|
return
|
||||||
|
|
||||||
|
alerts.success(f"Moved inline metadata to {location.value} in {num_changed} notes")
|
||||||
|
|
||||||
def noninteractive_export_csv(self, path: Path) -> None:
|
def noninteractive_export_csv(self, path: Path) -> None:
|
||||||
"""Export the vault metadata to CSV."""
|
"""Export the vault metadata to CSV."""
|
||||||
self._load_vault()
|
self._load_vault()
|
||||||
|
|||||||
@@ -448,19 +448,26 @@ class Note:
|
|||||||
Returns:
|
Returns:
|
||||||
bool: Whether the note was updated.
|
bool: Whether the note was updated.
|
||||||
"""
|
"""
|
||||||
|
if self.inline_metadata.dict != {}:
|
||||||
|
if key is None:
|
||||||
|
for _k, _v in self.inline_metadata.dict.items():
|
||||||
|
for _value in _v:
|
||||||
|
_k = re.escape(_k)
|
||||||
|
_value = re.escape(_value)
|
||||||
|
self.sub(rf"\[?{_k}:: ?\[?\[?{_value}\]?\]?", "", is_regex=True)
|
||||||
|
return True
|
||||||
|
|
||||||
for _k, _v in self.inline_metadata.dict.items():
|
for _k, _v in self.inline_metadata.dict.items():
|
||||||
if re.search(key, _k):
|
if re.search(key, _k):
|
||||||
for _value in _v:
|
for _value in _v:
|
||||||
if value is None:
|
if value is None:
|
||||||
_k = re.escape(_k)
|
_k = re.escape(_k)
|
||||||
_value = re.escape(_value)
|
_value = re.escape(_value)
|
||||||
self.sub(rf"\[?{_k}:: ?{_value}]?", "", is_regex=True)
|
self.sub(rf"\[?{_k}:: \[?\[?{_value}\]?\]?", "", is_regex=True)
|
||||||
return True
|
elif re.search(value, _value):
|
||||||
|
|
||||||
if re.search(value, _value):
|
|
||||||
_k = re.escape(_k)
|
_k = re.escape(_k)
|
||||||
_value = re.escape(_value)
|
_value = re.escape(_value)
|
||||||
self.sub(rf"({_k}::) ?{_value}", r"\1", is_regex=True)
|
self.sub(rf"\[?({_k}::) ?\[?\[?{_value}\]?\]?", r"\1", is_regex=True)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|||||||
@@ -282,7 +282,7 @@ class Questions:
|
|||||||
{"name": "Add Metadata", "value": "add_metadata"},
|
{"name": "Add Metadata", "value": "add_metadata"},
|
||||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||||
{"name": "Transpose Metadata", "value": "transpose_metadata"},
|
{"name": "Reorganize Metadata", "value": "reorganize_metadata"},
|
||||||
questionary.Separator("-------------------------------"),
|
questionary.Separator("-------------------------------"),
|
||||||
{"name": "Review Changes", "value": "review_changes"},
|
{"name": "Review Changes", "value": "review_changes"},
|
||||||
{"name": "Commit Changes", "value": "commit_changes"},
|
{"name": "Commit Changes", "value": "commit_changes"},
|
||||||
|
|||||||
@@ -8,6 +8,7 @@ from dataclasses import dataclass
|
|||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
import rich.repr
|
import rich.repr
|
||||||
|
import typer
|
||||||
from rich import box
|
from rich import box
|
||||||
from rich.progress import Progress, SpinnerColumn, TextColumn
|
from rich.progress import Progress, SpinnerColumn, TextColumn
|
||||||
from rich.prompt import Confirm
|
from rich.prompt import Confirm
|
||||||
@@ -53,8 +54,9 @@ class Vault:
|
|||||||
self.insert_location: InsertLocation = self._find_insert_location()
|
self.insert_location: InsertLocation = self._find_insert_location()
|
||||||
self.dry_run: bool = dry_run
|
self.dry_run: bool = dry_run
|
||||||
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
self.backup_path: Path = self.vault_path.parent / f"{self.vault_path.name}.bak"
|
||||||
self.exclude_paths: list[Path] = []
|
|
||||||
self.metadata = VaultMetadata()
|
self.metadata = VaultMetadata()
|
||||||
|
self.exclude_paths: list[Path] = []
|
||||||
|
|
||||||
for p in config.exclude_paths:
|
for p in config.exclude_paths:
|
||||||
self.exclude_paths.append(Path(self.vault_path / p))
|
self.exclude_paths.append(Path(self.vault_path / p))
|
||||||
|
|
||||||
@@ -76,13 +78,16 @@ class Vault:
|
|||||||
|
|
||||||
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
def __rich_repr__(self) -> rich.repr.Result: # pragma: no cover
|
||||||
"""Define rich representation of Vault."""
|
"""Define rich representation of Vault."""
|
||||||
yield "vault_path", self.vault_path
|
|
||||||
yield "dry_run", self.dry_run
|
|
||||||
yield "backup_path", self.backup_path
|
yield "backup_path", self.backup_path
|
||||||
yield "num_notes", len(self.all_notes)
|
yield "config", self.config
|
||||||
yield "num_notes_in_scope", len(self.notes_in_scope)
|
yield "dry_run", self.dry_run
|
||||||
yield "exclude_paths", self.exclude_paths
|
yield "exclude_paths", self.exclude_paths
|
||||||
|
yield "filters", self.filters
|
||||||
yield "insert_location", self.insert_location
|
yield "insert_location", self.insert_location
|
||||||
|
yield "name", self.name
|
||||||
|
yield "num_notes_in_scope", len(self.notes_in_scope)
|
||||||
|
yield "num_notes", len(self.all_notes)
|
||||||
|
yield "vault_path", self.vault_path
|
||||||
|
|
||||||
def _filter_notes(self) -> list[Note]:
|
def _filter_notes(self) -> list[Note]:
|
||||||
"""Filter notes by path and metadata using the filters defined in self.filters.
|
"""Filter notes by path and metadata using the filters defined in self.filters.
|
||||||
@@ -209,6 +214,7 @@ class Vault:
|
|||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.add_metadata(area=area, key=key, value=value, location=location):
|
if _note.add_metadata(area=area, key=key, value=value, location=location):
|
||||||
|
log.trace(f"Added metadata to {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -279,6 +285,7 @@ class Vault:
|
|||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.delete_inline_tag(tag):
|
if _note.delete_inline_tag(tag):
|
||||||
|
log.trace(f"Deleted tag from {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -300,6 +307,7 @@ class Vault:
|
|||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.delete_metadata(key, value):
|
if _note.delete_metadata(key, value):
|
||||||
|
log.trace(f"Deleted metadata from {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -315,6 +323,9 @@ class Vault:
|
|||||||
export_format (str, optional): Export as 'csv' or 'json'. Defaults to "csv".
|
export_format (str, optional): Export as 'csv' or 'json'. Defaults to "csv".
|
||||||
"""
|
"""
|
||||||
export_file = Path(path).expanduser().resolve()
|
export_file = Path(path).expanduser().resolve()
|
||||||
|
if not export_file.parent.exists():
|
||||||
|
alerts.error(f"Path does not exist: {export_file.parent}")
|
||||||
|
raise typer.Exit(code=1)
|
||||||
|
|
||||||
match export_format:
|
match export_format:
|
||||||
case "csv":
|
case "csv":
|
||||||
@@ -350,7 +361,7 @@ class Vault:
|
|||||||
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
||||||
|
|
||||||
def get_changed_notes(self) -> list[Note]:
|
def get_changed_notes(self) -> list[Note]:
|
||||||
"""Returns a list of notes that have changes.
|
"""Return a list of notes that have changes.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
list[Note]: List of notes that have changes.
|
list[Note]: List of notes that have changes.
|
||||||
@@ -386,6 +397,29 @@ class Vault:
|
|||||||
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
table.add_row(str(_n), str(_note.note_path.relative_to(self.vault_path)))
|
||||||
console.print(table)
|
console.print(table)
|
||||||
|
|
||||||
|
def move_inline_metadata(self, location: InsertLocation) -> int:
|
||||||
|
"""Move all inline metadata to the selected location.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
location (InsertLocation): Location to move inline metadata to.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of notes that had inline metadata moved.
|
||||||
|
"""
|
||||||
|
num_changed = 0
|
||||||
|
|
||||||
|
for _note in self.notes_in_scope:
|
||||||
|
if _note.write_delete_inline_metadata():
|
||||||
|
log.trace(f"Deleted inline metadata from {_note.note_path}")
|
||||||
|
num_changed += 1
|
||||||
|
_note.write_all_inline_metadata(location)
|
||||||
|
log.trace(f"Wrote all inline metadata to {_note.note_path}")
|
||||||
|
|
||||||
|
if num_changed > 0:
|
||||||
|
self._rebuild_vault_metadata()
|
||||||
|
|
||||||
|
return num_changed
|
||||||
|
|
||||||
def num_excluded_notes(self) -> int:
|
def num_excluded_notes(self) -> int:
|
||||||
"""Count number of excluded notes."""
|
"""Count number of excluded notes."""
|
||||||
return len(self.all_notes) - len(self.notes_in_scope)
|
return len(self.all_notes) - len(self.notes_in_scope)
|
||||||
@@ -404,6 +438,7 @@ class Vault:
|
|||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.rename_inline_tag(old_tag, new_tag):
|
if _note.rename_inline_tag(old_tag, new_tag):
|
||||||
|
log.trace(f"Renamed inline tag in {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -412,7 +447,7 @@ class Vault:
|
|||||||
return num_changed
|
return num_changed
|
||||||
|
|
||||||
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
def rename_metadata(self, key: str, value_1: str, value_2: str = None) -> int:
|
||||||
"""Renames a key or key-value pair in the note's metadata.
|
"""Rename a key or key-value pair in the note's metadata.
|
||||||
|
|
||||||
If no value is provided, will rename an entire key.
|
If no value is provided, will rename an entire key.
|
||||||
|
|
||||||
@@ -428,6 +463,7 @@ class Vault:
|
|||||||
|
|
||||||
for _note in self.notes_in_scope:
|
for _note in self.notes_in_scope:
|
||||||
if _note.rename_metadata(key, value_1, value_2):
|
if _note.rename_metadata(key, value_1, value_2):
|
||||||
|
log.trace(f"Renamed metadata in {_note.note_path}")
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
@@ -468,6 +504,7 @@ class Vault:
|
|||||||
location=location,
|
location=location,
|
||||||
):
|
):
|
||||||
num_changed += 1
|
num_changed += 1
|
||||||
|
log.trace(f"Transposed metadata in {_note.note_path}")
|
||||||
|
|
||||||
if num_changed > 0:
|
if num_changed > 0:
|
||||||
self._rebuild_vault_metadata()
|
self._rebuild_vault_metadata()
|
||||||
|
|||||||
@@ -17,7 +17,12 @@ from tests.helpers import Regex, remove_ansi
|
|||||||
|
|
||||||
|
|
||||||
def test_instantiate_application(test_application) -> None:
|
def test_instantiate_application(test_application) -> None:
|
||||||
"""Test application."""
|
"""Test application.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the application is instantiated
|
||||||
|
THEN check the attributes are set correctly
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
|
|
||||||
@@ -29,7 +34,12 @@ def test_instantiate_application(test_application) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_abort(test_application, mocker, capsys) -> None:
|
def test_abort(test_application, mocker, capsys) -> None:
|
||||||
"""Test renaming a key."""
|
"""Test aborting the application.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the users selects "abort" from the main menu
|
||||||
|
THEN check the application exits
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -43,7 +53,12 @@ def test_abort(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
||||||
"""Test adding new metadata to the vault."""
|
"""Test adding new metadata to the vault.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the wants to update a key in the frontmatter
|
||||||
|
THEN check the application updates the key
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -70,7 +85,12 @@ def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
||||||
"""Test adding new metadata to the vault."""
|
"""Test adding new metadata to the vault.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the user wants to add a key in the inline metadata
|
||||||
|
THEN check the application updates the key
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -97,7 +117,12 @@ def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||||
"""Test adding new metadata to the vault."""
|
"""Test adding new metadata to the vault.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the user wants to add a tag
|
||||||
|
THEN check the application adds the tag
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -119,8 +144,41 @@ def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
|||||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
def test_delete_inline_tag_1(test_application, mocker, capsys) -> None:
|
||||||
"""Test renaming an inline tag."""
|
"""Test renaming an inline tag.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the user wants to delete an inline tag
|
||||||
|
THEN check the application deletes the tag
|
||||||
|
"""
|
||||||
|
app = test_application
|
||||||
|
app._load_vault()
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
|
side_effect=["delete_metadata", KeyError],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||||
|
side_effect=["delete_inline_tag", "back"],
|
||||||
|
)
|
||||||
|
mocker.patch(
|
||||||
|
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
||||||
|
return_value="breakfast",
|
||||||
|
)
|
||||||
|
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
app.application_main()
|
||||||
|
captured = remove_ansi(capsys.readouterr().out)
|
||||||
|
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_inline_tag_2(test_application, mocker, capsys) -> None:
|
||||||
|
"""Test renaming an inline tag.
|
||||||
|
|
||||||
|
GIVEN an application
|
||||||
|
WHEN the user wants to delete an inline tag that does not exist
|
||||||
|
THEN check the application does not update any notes
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
@@ -141,24 +199,6 @@ def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
|||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = remove_ansi(capsys.readouterr().out)
|
||||||
assert "WARNING | No notes were changed" in captured
|
assert "WARNING | No notes were changed" in captured
|
||||||
|
|
||||||
mocker.patch(
|
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
|
||||||
side_effect=["delete_metadata", KeyError],
|
|
||||||
)
|
|
||||||
mocker.patch(
|
|
||||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
|
||||||
side_effect=["delete_inline_tag", "back"],
|
|
||||||
)
|
|
||||||
mocker.patch(
|
|
||||||
"obsidian_metadata.models.application.Questions.ask_existing_inline_tag",
|
|
||||||
return_value="breakfast",
|
|
||||||
)
|
|
||||||
|
|
||||||
with pytest.raises(KeyError):
|
|
||||||
app.application_main()
|
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
|
||||||
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
|
||||||
|
|
||||||
|
|
||||||
def test_delete_key(test_application, mocker, capsys) -> None:
|
def test_delete_key(test_application, mocker, capsys) -> None:
|
||||||
"""Test renaming an inline tag."""
|
"""Test renaming an inline tag."""
|
||||||
@@ -545,15 +585,20 @@ def test_review_changes(test_application, mocker, capsys) -> None:
|
|||||||
assert "+ new_tags:" in captured
|
assert "+ new_tags:" in captured
|
||||||
|
|
||||||
|
|
||||||
def test_transpose_metadata(test_application, mocker, capsys) -> None:
|
def test_transpose_metadata_1(test_application, mocker, capsys) -> None:
|
||||||
"""Transpose metadata."""
|
"""Transpose metadata.
|
||||||
|
|
||||||
|
GIVEN a test application
|
||||||
|
WHEN the user wants to transpose all inline metadata to frontmatter
|
||||||
|
THEN the metadata is transposed
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
|
|
||||||
assert app.vault.metadata.inline_metadata["inline_key"] == ["inline_key_value"]
|
assert app.vault.metadata.inline_metadata["inline_key"] == ["inline_key_value"]
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
side_effect=["transpose_metadata", KeyError],
|
side_effect=["reorganize_metadata", KeyError],
|
||||||
)
|
)
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||||
@@ -561,18 +606,27 @@ def test_transpose_metadata(test_application, mocker, capsys) -> None:
|
|||||||
)
|
)
|
||||||
with pytest.raises(KeyError):
|
with pytest.raises(KeyError):
|
||||||
app.application_main()
|
app.application_main()
|
||||||
|
|
||||||
assert app.vault.metadata.inline_metadata == {}
|
assert app.vault.metadata.inline_metadata == {}
|
||||||
assert app.vault.metadata.frontmatter["inline_key"] == ["inline_key_value"]
|
assert app.vault.metadata.frontmatter["inline_key"] == ["inline_key_value"]
|
||||||
captured = remove_ansi(capsys.readouterr().out)
|
captured = remove_ansi(capsys.readouterr().out)
|
||||||
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
||||||
|
|
||||||
|
|
||||||
|
def test_transpose_metadata_2(test_application, mocker, capsys) -> None:
|
||||||
|
"""Transpose metadata.
|
||||||
|
|
||||||
|
GIVEN a test application
|
||||||
|
WHEN the user wants to transpose all frontmatter to inline metadata
|
||||||
|
THEN the metadata is transposed
|
||||||
|
"""
|
||||||
app = test_application
|
app = test_application
|
||||||
app._load_vault()
|
app._load_vault()
|
||||||
|
|
||||||
assert app.vault.metadata.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
assert app.vault.metadata.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||||
side_effect=["transpose_metadata", KeyError],
|
side_effect=["reorganize_metadata", KeyError],
|
||||||
)
|
)
|
||||||
mocker.patch(
|
mocker.patch(
|
||||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
# type: ignore
|
# type: ignore
|
||||||
"""Test obsidian-metadata CLI."""
|
"""Test obsidian-metadata CLI."""
|
||||||
|
|
||||||
|
import shutil
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
from typer.testing import CliRunner
|
from typer.testing import CliRunner
|
||||||
|
|
||||||
from obsidian_metadata.cli import app
|
from obsidian_metadata.cli import app
|
||||||
@@ -17,13 +20,20 @@ def test_version() -> None:
|
|||||||
assert result.output == Regex(r"obsidian_metadata: v\d+\.\d+\.\d+$")
|
assert result.output == Regex(r"obsidian_metadata: v\d+\.\d+\.\d+$")
|
||||||
|
|
||||||
|
|
||||||
def test_application(test_vault, tmp_path) -> None:
|
def test_application(tmp_path) -> None:
|
||||||
"""Test the application."""
|
"""Test the application."""
|
||||||
vault_path = test_vault
|
source_dir = Path(__file__).parent / "fixtures" / "test_vault"
|
||||||
|
dest_dir = Path(tmp_path / "vault")
|
||||||
|
|
||||||
|
if not source_dir.exists():
|
||||||
|
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||||
|
|
||||||
|
shutil.copytree(source_dir, dest_dir)
|
||||||
|
|
||||||
config_path = tmp_path / "config.toml"
|
config_path = tmp_path / "config.toml"
|
||||||
result = runner.invoke(
|
result = runner.invoke(
|
||||||
app,
|
app,
|
||||||
["--vault-path", vault_path, "--config-file", config_path],
|
["--vault-path", dest_dir, "--config-file", config_path],
|
||||||
# input=KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.ENTER, # noqa: ERA001
|
# input=KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.DOWN + KeyInputs.ENTER, # noqa: ERA001
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,13 @@ import pytest
|
|||||||
from obsidian_metadata._config import Config
|
from obsidian_metadata._config import Config
|
||||||
from obsidian_metadata.models.application import Application
|
from obsidian_metadata.models.application import Application
|
||||||
|
|
||||||
|
CONFIG_1 = """
|
||||||
|
["Test Vault"]
|
||||||
|
exclude_paths = [".git", ".obsidian", "ignore_folder"]
|
||||||
|
insert_location = "TOP"
|
||||||
|
path = "TMPDIR_VAULT_PATH"
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
def remove_all(root: Path):
|
def remove_all(root: Path):
|
||||||
"""Remove all files and directories in a directory."""
|
"""Remove all files and directories in a directory."""
|
||||||
@@ -95,10 +102,16 @@ def test_vault(tmp_path) -> Path:
|
|||||||
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
raise FileNotFoundError(f"Sample vault not found: {source_dir}")
|
||||||
|
|
||||||
shutil.copytree(source_dir, dest_dir)
|
shutil.copytree(source_dir, dest_dir)
|
||||||
yield dest_dir
|
config_path = Path(tmp_path / "config.toml")
|
||||||
|
config_path.write_text(CONFIG_1.replace("TMPDIR_VAULT_PATH", str(dest_dir)))
|
||||||
|
config = Config(config_path=config_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
|
||||||
|
yield vault_config
|
||||||
|
|
||||||
# after test - remove fixtures
|
# after test - remove fixtures
|
||||||
shutil.rmtree(dest_dir)
|
shutil.rmtree(dest_dir)
|
||||||
|
config_path.unlink()
|
||||||
|
|
||||||
if backup_dir.exists():
|
if backup_dir.exists():
|
||||||
shutil.rmtree(backup_dir)
|
shutil.rmtree(backup_dir)
|
||||||
|
|||||||
1
tests/fixtures/test_vault/test1.md
vendored
1
tests/fixtures/test_vault/test1.md
vendored
@@ -4,7 +4,6 @@ tags:
|
|||||||
- shared_tag
|
- shared_tag
|
||||||
- frontmatter_tag1
|
- frontmatter_tag1
|
||||||
- frontmatter_tag2
|
- frontmatter_tag2
|
||||||
-
|
|
||||||
- 📅/frontmatter_tag3
|
- 📅/frontmatter_tag3
|
||||||
frontmatter_Key1: author name
|
frontmatter_Key1: author name
|
||||||
frontmatter_Key2: ["article", "note"]
|
frontmatter_Key2: ["article", "note"]
|
||||||
|
|||||||
@@ -824,7 +824,7 @@ def test_write_delete_inline_metadata_2(sample_note) -> None:
|
|||||||
|
|
||||||
GIVEN a note object with write_delete_inline_metadata() called
|
GIVEN a note object with write_delete_inline_metadata() called
|
||||||
WHEN a key is specified that is within a body of text
|
WHEN a key is specified that is within a body of text
|
||||||
THEN the key/value is removed from the note content
|
THEN the key and all associated values are removed from the note content
|
||||||
|
|
||||||
"""
|
"""
|
||||||
note = Note(note_path=sample_note)
|
note = Note(note_path=sample_note)
|
||||||
@@ -847,6 +847,26 @@ def test_write_delete_inline_metadata_3(sample_note) -> None:
|
|||||||
assert note.file_content != Regex(r"bottom_key1::")
|
assert note.file_content != Regex(r"bottom_key1::")
|
||||||
|
|
||||||
|
|
||||||
|
def test_write_delete_inline_metadata_4(sample_note) -> None:
|
||||||
|
"""Twrite_delete_inline_metadata() method.
|
||||||
|
|
||||||
|
GIVEN a note object with write_delete_inline_metadata() called
|
||||||
|
WHEN no key or value is specified
|
||||||
|
THEN all inline metadata is removed from the note content
|
||||||
|
"""
|
||||||
|
note = Note(note_path=sample_note)
|
||||||
|
note.write_delete_inline_metadata()
|
||||||
|
assert note.file_content == Regex(r"codeblock_key::")
|
||||||
|
assert note.file_content != Regex(r"key📅::")
|
||||||
|
assert note.file_content != Regex(r"top_key1::")
|
||||||
|
assert note.file_content != Regex(r"top_key3::")
|
||||||
|
assert note.file_content != Regex(r"intext_key::")
|
||||||
|
assert note.file_content != Regex(r"shared_key1::")
|
||||||
|
assert note.file_content != Regex(r"shared_key2::")
|
||||||
|
assert note.file_content != Regex(r"bottom_key1::")
|
||||||
|
assert note.file_content != Regex(r"bottom_key2::")
|
||||||
|
|
||||||
|
|
||||||
def test_write_frontmatter_1(sample_note) -> None:
|
def test_write_frontmatter_1(sample_note) -> None:
|
||||||
"""Test writing frontmatter.
|
"""Test writing frontmatter.
|
||||||
|
|
||||||
|
|||||||
@@ -3,35 +3,38 @@
|
|||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import typer
|
||||||
|
from rich import print
|
||||||
|
|
||||||
from obsidian_metadata._config import Config
|
from obsidian_metadata._config import Config
|
||||||
from obsidian_metadata.models import Vault, VaultFilter
|
from obsidian_metadata.models import Vault, VaultFilter
|
||||||
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
from obsidian_metadata.models.enums import InsertLocation, MetadataType
|
||||||
from tests.helpers import Regex
|
from tests.helpers import Regex
|
||||||
|
|
||||||
|
|
||||||
def test_vault_creation(test_vault):
|
def test_vault_creation(test_vault, tmp_path):
|
||||||
"""Test creating a Vault object."""
|
"""Test creating a Vault object.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a Config object
|
||||||
vault_config = config.vaults[0]
|
WHEN a Vault object is created
|
||||||
vault = Vault(config=vault_config)
|
THEN the Vault object is created with the correct attributes.
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.name == "vault"
|
assert vault.name == "vault"
|
||||||
assert vault.vault_path == vault_path
|
assert vault.insert_location == InsertLocation.TOP
|
||||||
assert vault.insert_location == InsertLocation.BOTTOM
|
assert vault.backup_path == Path(tmp_path, "vault.bak")
|
||||||
assert vault.backup_path == Path(f"{vault_path}.bak")
|
|
||||||
assert vault.dry_run is False
|
assert vault.dry_run is False
|
||||||
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
assert str(vault.exclude_paths[0]) == Regex(r".*\.git")
|
||||||
assert len(vault.all_notes) == 3
|
assert len(vault.all_notes) == 2
|
||||||
|
|
||||||
assert vault.metadata.dict == {
|
assert vault.metadata.dict == {
|
||||||
"author": ["author name"],
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"intext_key": ["intext_value"],
|
"intext_key": ["intext_value"],
|
||||||
"key📅": ["📅_key_value"],
|
"key📅": ["📅_key_value"],
|
||||||
"shared_key1": [
|
"shared_key1": [
|
||||||
@@ -43,19 +46,15 @@ def test_vault_creation(test_vault):
|
|||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"top_key1": ["top_key1_value"],
|
"top_key1": ["top_key1_value"],
|
||||||
"top_key2": ["top_key2_value"],
|
"top_key2": ["top_key2_value"],
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
assert vault.metadata.tags == [
|
assert vault.metadata.tags == [
|
||||||
"ignored_file_tag2",
|
|
||||||
"inline_tag_bottom1",
|
"inline_tag_bottom1",
|
||||||
"inline_tag_bottom2",
|
"inline_tag_bottom2",
|
||||||
"inline_tag_top1",
|
"inline_tag_top1",
|
||||||
@@ -76,55 +75,51 @@ def test_vault_creation(test_vault):
|
|||||||
"top_key3": ["top_key3_value_as_link"],
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
}
|
}
|
||||||
assert vault.metadata.frontmatter == {
|
assert vault.metadata.frontmatter == {
|
||||||
"author": ["author name"],
|
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||||
"shared_key2": ["shared_key2_value1"],
|
"shared_key2": ["shared_key2_value1"],
|
||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def set_insert_location(test_vault):
|
def set_insert_location(test_vault):
|
||||||
"""Test setting a new insert location."""
|
"""Test setting a new insert location.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the insert location is changed
|
||||||
vault = Vault(config=vault_config)
|
THEN the insert location is changed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.name == "vault"
|
assert vault.name == "vault"
|
||||||
assert vault.vault_path == vault_path
|
|
||||||
assert vault.insert_location == InsertLocation.BOTTOM
|
|
||||||
vault.insert_location = InsertLocation.TOP
|
|
||||||
assert vault.insert_location == InsertLocation.TOP
|
assert vault.insert_location == InsertLocation.TOP
|
||||||
|
vault.insert_location = InsertLocation.BOTTOM
|
||||||
|
assert vault.insert_location == InsertLocation.BOTTOM
|
||||||
|
|
||||||
|
|
||||||
def test_add_metadata(test_vault) -> None:
|
def test_add_metadata_1(test_vault) -> None:
|
||||||
"""Test adding metadata to the vault."""
|
"""Test adding metadata to the vault.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key") == 3
|
GIVEN a vault object
|
||||||
|
WHEN a new metadata key is added
|
||||||
|
THEN the metadata is added to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key") == 2
|
||||||
assert vault.metadata.dict == {
|
assert vault.metadata.dict == {
|
||||||
"author": ["author name"],
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"intext_key": ["intext_value"],
|
"intext_key": ["intext_value"],
|
||||||
"key📅": ["📅_key_value"],
|
"key📅": ["📅_key_value"],
|
||||||
"new_key": [],
|
"new_key": [],
|
||||||
@@ -137,47 +132,46 @@ def test_add_metadata(test_vault) -> None:
|
|||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"top_key1": ["top_key1_value"],
|
"top_key1": ["top_key1_value"],
|
||||||
"top_key2": ["top_key2_value"],
|
"top_key2": ["top_key2_value"],
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
assert vault.metadata.frontmatter == {
|
assert vault.metadata.frontmatter == {
|
||||||
"author": ["author name"],
|
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"new_key": [],
|
"new_key": [],
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||||
"shared_key2": ["shared_key2_value1"],
|
"shared_key2": ["shared_key2_value1"],
|
||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key2", "new_key2_value") == 3
|
|
||||||
|
|
||||||
|
def test_add_metadata_2(test_vault) -> None:
|
||||||
|
"""Test adding metadata to the vault.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN a new metadata key and value is added
|
||||||
|
THEN the metadata is added to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
assert vault.add_metadata(MetadataType.FRONTMATTER, "new_key2", "new_key2_value") == 2
|
||||||
assert vault.metadata.dict == {
|
assert vault.metadata.dict == {
|
||||||
"author": ["author name"],
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"intext_key": ["intext_value"],
|
"intext_key": ["intext_value"],
|
||||||
"key📅": ["📅_key_value"],
|
"key📅": ["📅_key_value"],
|
||||||
"new_key": [],
|
|
||||||
"new_key2": ["new_key2_value"],
|
"new_key2": ["new_key2_value"],
|
||||||
"shared_key1": [
|
"shared_key1": [
|
||||||
"shared_key1_value",
|
"shared_key1_value",
|
||||||
@@ -188,49 +182,76 @@ def test_add_metadata(test_vault) -> None:
|
|||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"top_key1": ["top_key1_value"],
|
"top_key1": ["top_key1_value"],
|
||||||
"top_key2": ["top_key2_value"],
|
"top_key2": ["top_key2_value"],
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
assert vault.metadata.frontmatter == {
|
assert vault.metadata.frontmatter == {
|
||||||
"author": ["author name"],
|
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"new_key": [],
|
|
||||||
"new_key2": ["new_key2_value"],
|
"new_key2": ["new_key2_value"],
|
||||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||||
"shared_key2": ["shared_key2_value1"],
|
"shared_key2": ["shared_key2_value1"],
|
||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def test_backup(test_vault, capsys):
|
def test_commit_changes_1(test_vault, tmp_path):
|
||||||
"""Test backing up the vault."""
|
"""Test committing changes to content in the vault.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the commit_changes method is called
|
||||||
vault = Vault(config=vault_config)
|
THEN the changes are committed to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" in committed_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_commit_changes_2(test_vault, tmp_path):
|
||||||
|
"""Test committing changes to content in the vault in dry run mode.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN dry_run is set to True
|
||||||
|
THEN no changes are committed to the vault
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault, dry_run=True)
|
||||||
|
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in content
|
||||||
|
|
||||||
|
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
||||||
|
vault.commit_changes()
|
||||||
|
committed_content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
||||||
|
assert "new_key: new_key_value" not in committed_content
|
||||||
|
|
||||||
|
|
||||||
|
def test_backup_1(test_vault, tmp_path, capsys):
|
||||||
|
"""Test the backup method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the backup method is called
|
||||||
|
THEN the vault is backed up
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
vault.backup()
|
vault.backup()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
assert Path(f"{vault_path}.bak").exists() is True
|
assert vault.backup_path.exists() is True
|
||||||
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
assert captured.out == Regex(r"SUCCESS +| backed up to")
|
||||||
|
|
||||||
vault.info()
|
vault.info()
|
||||||
@@ -239,42 +260,15 @@ def test_backup(test_vault, capsys):
|
|||||||
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
assert captured.out == Regex(r"Backup path +\│[\s ]+/[\d\w]+")
|
||||||
|
|
||||||
|
|
||||||
def test_commit(test_vault, tmp_path):
|
def test_backup_2(test_vault, capsys):
|
||||||
"""Test committing changes to content in the vault."""
|
"""Test the backup method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" not in content
|
|
||||||
|
|
||||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
GIVEN a vault object
|
||||||
vault.commit_changes()
|
WHEN dry_run is set to True and the backup method is called
|
||||||
assert "new_key: new_key_value" not in content
|
THEN the vault is not backed up
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault, dry_run=True)
|
||||||
|
|
||||||
|
|
||||||
def test_commit_dry_run(test_vault, tmp_path):
|
|
||||||
"""Test committing changes to content in the vault in dry run mode."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config, dry_run=True)
|
|
||||||
content = Path(f"{tmp_path}/vault/test1.md").read_text()
|
|
||||||
assert "new_key: new_key_value" not in content
|
|
||||||
|
|
||||||
vault.add_metadata(MetadataType.FRONTMATTER, "new_key", "new_key_value")
|
|
||||||
vault.commit_changes()
|
|
||||||
assert "new_key: new_key_value" not in content
|
|
||||||
|
|
||||||
|
|
||||||
def test_backup_dryrun(test_vault, capsys):
|
|
||||||
"""Test backing up the vault."""
|
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config, dry_run=True)
|
|
||||||
|
|
||||||
print(f"vault.dry_run: {vault.dry_run}")
|
|
||||||
vault.backup()
|
vault.backup()
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
@@ -282,12 +276,14 @@ def test_backup_dryrun(test_vault, capsys):
|
|||||||
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
assert captured.out == Regex(r"DRYRUN +| Backup up vault to")
|
||||||
|
|
||||||
|
|
||||||
def test_delete_backup(test_vault, capsys):
|
def test_delete_backup_1(test_vault, capsys):
|
||||||
"""Test deleting the vault backup."""
|
"""Test deleting the vault backup.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the delete_backup method is called
|
||||||
vault = Vault(config=vault_config)
|
THEN the backup is deleted
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
vault.backup()
|
vault.backup()
|
||||||
vault.delete_backup()
|
vault.delete_backup()
|
||||||
@@ -302,12 +298,14 @@ def test_delete_backup(test_vault, capsys):
|
|||||||
assert captured.out == Regex(r"Backup +\│ None")
|
assert captured.out == Regex(r"Backup +\│ None")
|
||||||
|
|
||||||
|
|
||||||
def test_delete_backup_dryrun(test_vault, capsys):
|
def test_delete_backup_2(test_vault, capsys):
|
||||||
"""Test deleting the vault backup."""
|
"""Test delete_backup method in dry run mode.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the dry_run is True and the delete_backup method is called
|
||||||
vault = Vault(config=vault_config, dry_run=True)
|
THEN the backup is not deleted
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault, dry_run=True)
|
||||||
|
|
||||||
Path.mkdir(vault.backup_path)
|
Path.mkdir(vault.backup_path)
|
||||||
vault.delete_backup()
|
vault.delete_backup()
|
||||||
@@ -317,17 +315,17 @@ def test_delete_backup_dryrun(test_vault, capsys):
|
|||||||
assert vault.backup_path.exists() is True
|
assert vault.backup_path.exists() is True
|
||||||
|
|
||||||
|
|
||||||
def test_delete_inline_tag(test_vault) -> None:
|
def test_delete_inline_tag_1(test_vault) -> None:
|
||||||
"""Test deleting an inline tag."""
|
"""Test delete_inline_tag() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
assert vault.delete_inline_tag("no tag") == 0
|
GIVEN a vault object
|
||||||
assert vault.delete_inline_tag("intext_tag2") == 2
|
WHEN the delete_inline_tag method is called
|
||||||
|
THEN the inline tag is deleted
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.delete_inline_tag("intext_tag2") == 1
|
||||||
assert vault.metadata.tags == [
|
assert vault.metadata.tags == [
|
||||||
"ignored_file_tag2",
|
|
||||||
"inline_tag_bottom1",
|
"inline_tag_bottom1",
|
||||||
"inline_tag_bottom2",
|
"inline_tag_bottom2",
|
||||||
"inline_tag_top1",
|
"inline_tag_top1",
|
||||||
@@ -337,29 +335,65 @@ def test_delete_inline_tag(test_vault) -> None:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_delete_metadata(test_vault) -> None:
|
def test_delete_inline_tag_2(test_vault) -> None:
|
||||||
"""Test deleting a metadata key/value."""
|
"""Test delete_inline_tag() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the delete_inline_tag method is called with a tag that does not exist
|
||||||
vault = Vault(config=vault_config)
|
THEN no changes are made
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.delete_inline_tag("no tag") == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_metadata_1(test_vault) -> None:
|
||||||
|
"""Test deleting a metadata key/value.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the delete_metadata method is called with a key and value
|
||||||
|
THEN the specified metadata key/value is deleted
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.delete_metadata("top_key1", "top_key1_value") == 1
|
||||||
|
assert vault.metadata.dict["top_key1"] == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_metadata_2(test_vault) -> None:
|
||||||
|
"""Test deleting a metadata key/value.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the delete_metadata method is called with a key
|
||||||
|
THEN the specified metadata key is deleted
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.delete_metadata("top_key2") == 1
|
||||||
|
assert "top_key2" not in vault.metadata.dict
|
||||||
|
|
||||||
|
|
||||||
|
def test_delete_metadata_3(test_vault) -> None:
|
||||||
|
"""Test deleting a metadata key/value.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the delete_metadata method is called with a key and/or value that does not exist
|
||||||
|
THEN no changes are made
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.delete_metadata("no key") == 0
|
assert vault.delete_metadata("no key") == 0
|
||||||
assert vault.delete_metadata("top_key1", "no_value") == 0
|
assert vault.delete_metadata("top_key1", "no_value") == 0
|
||||||
|
|
||||||
assert vault.delete_metadata("top_key1", "top_key1_value") == 2
|
|
||||||
assert vault.metadata.dict["top_key1"] == []
|
|
||||||
|
|
||||||
assert vault.delete_metadata("top_key2") == 2
|
def test_export_csv_1(tmp_path, test_vault):
|
||||||
assert "top_key2" not in vault.metadata.dict
|
"""Test exporting the vault to a CSV file.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
def test_export_csv(tmp_path, test_vault):
|
WHEN the export_metadata method is called with a path and export_format of csv
|
||||||
"""Test exporting the vault to a CSV file."""
|
THEN the vault metadata is exported to a CSV file
|
||||||
vault_path = test_vault
|
"""
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
vault = Vault(config=test_vault)
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
export_file = Path(f"{tmp_path}/export.csv")
|
export_file = Path(f"{tmp_path}/export.csv")
|
||||||
|
|
||||||
vault.export_metadata(path=export_file, export_format="csv")
|
vault.export_metadata(path=export_file, export_format="csv")
|
||||||
@@ -367,12 +401,29 @@ def test_export_csv(tmp_path, test_vault):
|
|||||||
assert "frontmatter,date_created,2022-12-22" in export_file.read_text()
|
assert "frontmatter,date_created,2022-12-22" in export_file.read_text()
|
||||||
|
|
||||||
|
|
||||||
|
def test_export_csv_2(tmp_path, test_vault):
|
||||||
|
"""Test exporting the vault to a CSV file.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the export_metadata method is called with a path that does not exist and export_format of csv
|
||||||
|
THEN an error is raised
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
export_file = Path(f"{tmp_path}/does_not_exist/export.csv")
|
||||||
|
|
||||||
|
with pytest.raises(typer.Exit):
|
||||||
|
vault.export_metadata(path=export_file, export_format="csv")
|
||||||
|
assert export_file.exists() is False
|
||||||
|
|
||||||
|
|
||||||
def test_export_json(tmp_path, test_vault):
|
def test_export_json(tmp_path, test_vault):
|
||||||
"""Test exporting the vault to a CSV file."""
|
"""Test exporting the vault to a JSON file.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the export_metadata method is called with a path and export_format of csv
|
||||||
vault = Vault(config=vault_config)
|
THEN the vault metadata is exported to a JSON file
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
export_file = Path(f"{tmp_path}/export.json")
|
export_file = Path(f"{tmp_path}/export.json")
|
||||||
|
|
||||||
vault.export_metadata(path=export_file, export_format="json")
|
vault.export_metadata(path=export_file, export_format="json")
|
||||||
@@ -380,8 +431,13 @@ def test_export_json(tmp_path, test_vault):
|
|||||||
assert '"frontmatter": {' in export_file.read_text()
|
assert '"frontmatter": {' in export_file.read_text()
|
||||||
|
|
||||||
|
|
||||||
def test_get_filtered_notes(sample_vault) -> None:
|
def test_get_filtered_notes_1(sample_vault) -> None:
|
||||||
"""Test filtering notes."""
|
"""Test filtering notes.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_filtered_notes method is called with a path filter
|
||||||
|
THEN the notes in scope are filtered
|
||||||
|
"""
|
||||||
vault_path = sample_vault
|
vault_path = sample_vault
|
||||||
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
vault_config = config.vaults[0]
|
vault_config = config.vaults[0]
|
||||||
@@ -396,21 +452,66 @@ def test_get_filtered_notes(sample_vault) -> None:
|
|||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
assert len(vault.notes_in_scope) == 1
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes_2(sample_vault) -> None:
|
||||||
|
"""Test filtering notes.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_filtered_notes method is called with a key filter
|
||||||
|
THEN the notes in scope are filtered
|
||||||
|
"""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
|
|
||||||
filters = [VaultFilter(key_filter="on_one_note")]
|
filters = [VaultFilter(key_filter="on_one_note")]
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
assert len(vault.notes_in_scope) == 1
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes_3(sample_vault) -> None:
|
||||||
|
"""Test filtering notes.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_filtered_notes method is called with a key and a value filter
|
||||||
|
THEN the notes in scope are filtered
|
||||||
|
"""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
filters = [VaultFilter(key_filter="type", value_filter="book")]
|
filters = [VaultFilter(key_filter="type", value_filter="book")]
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
assert len(vault.notes_in_scope) == 10
|
assert len(vault.notes_in_scope) == 10
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes_4(sample_vault) -> None:
|
||||||
|
"""Test filtering notes.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_filtered_notes method is called with a tag filter
|
||||||
|
THEN the notes in scope are filtered
|
||||||
|
"""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
filters = [VaultFilter(tag_filter="brunch")]
|
filters = [VaultFilter(tag_filter="brunch")]
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
assert len(vault.notes_in_scope) == 1
|
assert len(vault.notes_in_scope) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_get_filtered_notes_5(sample_vault) -> None:
|
||||||
|
"""Test filtering notes.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the get_filtered_notes method is called with a tag and a path filter
|
||||||
|
THEN the notes in scope are filtered
|
||||||
|
"""
|
||||||
|
vault_path = sample_vault
|
||||||
|
config = Config(config_path="tests/fixtures/sample_vault_config.toml", vault_path=vault_path)
|
||||||
|
vault_config = config.vaults[0]
|
||||||
filters = [VaultFilter(tag_filter="brunch"), VaultFilter(path_filter="inbox")]
|
filters = [VaultFilter(tag_filter="brunch"), VaultFilter(path_filter="inbox")]
|
||||||
vault = Vault(config=vault_config, filters=filters)
|
vault = Vault(config=vault_config, filters=filters)
|
||||||
assert len(vault.all_notes) == 13
|
assert len(vault.all_notes) == 13
|
||||||
@@ -418,11 +519,13 @@ def test_get_filtered_notes(sample_vault) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_info(test_vault, capsys):
|
def test_info(test_vault, capsys):
|
||||||
"""Test printing vault information."""
|
"""Test info() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the info method is called
|
||||||
vault = Vault(config=vault_config)
|
THEN the vault info is printed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
vault.info()
|
vault.info()
|
||||||
|
|
||||||
@@ -433,11 +536,13 @@ def test_info(test_vault, capsys):
|
|||||||
|
|
||||||
|
|
||||||
def test_list_editable_notes(test_vault, capsys) -> None:
|
def test_list_editable_notes(test_vault, capsys) -> None:
|
||||||
"""Test listing editable notes."""
|
"""Test list_editable_notes() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the list_editable_notes() method is called
|
||||||
vault = Vault(config=vault_config)
|
THEN the editable notes in scope are printed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
vault.list_editable_notes()
|
vault.list_editable_notes()
|
||||||
captured = capsys.readouterr()
|
captured = capsys.readouterr()
|
||||||
@@ -445,17 +550,29 @@ def test_list_editable_notes(test_vault, capsys) -> None:
|
|||||||
assert captured.out == Regex(r"\d +test1\.md")
|
assert captured.out == Regex(r"\d +test1\.md")
|
||||||
|
|
||||||
|
|
||||||
def test_rename_inline_tag(test_vault) -> None:
|
def test_move_inline_metadata_1(test_vault) -> None:
|
||||||
"""Test renaming an inline tag."""
|
"""Test move_inline_metadata() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
assert vault.rename_inline_tag("no tag", "new_tag") == 0
|
GIVEN a vault with inline metadata.
|
||||||
assert vault.rename_inline_tag("intext_tag2", "new_tag") == 2
|
WHEN the move_inline_metadata() method is called.
|
||||||
|
THEN the inline metadata is moved to the top of the file.
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.move_inline_metadata(location=InsertLocation.TOP) == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_inline_tag_1(test_vault) -> None:
|
||||||
|
"""Test rename_inline_tag() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the rename_inline_tag() method is called with a tag that is found
|
||||||
|
THEN the inline tag is renamed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.rename_inline_tag("intext_tag2", "new_tag") == 1
|
||||||
assert vault.metadata.tags == [
|
assert vault.metadata.tags == [
|
||||||
"ignored_file_tag2",
|
|
||||||
"inline_tag_bottom1",
|
"inline_tag_bottom1",
|
||||||
"inline_tag_bottom2",
|
"inline_tag_bottom2",
|
||||||
"inline_tag_top1",
|
"inline_tag_top1",
|
||||||
@@ -466,32 +583,62 @@ def test_rename_inline_tag(test_vault) -> None:
|
|||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def test_rename_metadata(test_vault) -> None:
|
def test_rename_inline_tag_2(test_vault) -> None:
|
||||||
"""Test renaming a metadata key/value."""
|
"""Test rename_inline_tag() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
GIVEN a vault object
|
||||||
vault_config = config.vaults[0]
|
WHEN the rename_inline_tag() method is called with a tag that is not found
|
||||||
vault = Vault(config=vault_config)
|
THEN the inline tag is not renamed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.rename_inline_tag("no tag", "new_tag") == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_rename_metadata_1(test_vault) -> None:
|
||||||
|
"""Test rename_metadata() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the rename_metadata() method is called with a key or key/value that is found
|
||||||
|
THEN the metadata is not renamed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
assert vault.rename_metadata("no key", "new_key") == 0
|
assert vault.rename_metadata("no key", "new_key") == 0
|
||||||
assert vault.rename_metadata("tags", "nonexistent_value", "new_vaule") == 0
|
assert vault.rename_metadata("tags", "nonexistent_value", "new_vaule") == 0
|
||||||
|
|
||||||
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") == 2
|
|
||||||
assert vault.metadata.dict["tags"] == [
|
def test_rename_metadata_2(test_vault) -> None:
|
||||||
|
"""Test rename_metadata() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the rename_metadata() method with a key and no value
|
||||||
|
THEN the metadata key is renamed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.rename_metadata("tags", "new_key") == 1
|
||||||
|
assert "tags" not in vault.metadata.dict
|
||||||
|
assert vault.metadata.dict["new_key"] == [
|
||||||
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"new_vaule",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
]
|
]
|
||||||
|
|
||||||
assert vault.rename_metadata("tags", "new_key") == 2
|
|
||||||
assert "tags" not in vault.metadata.dict
|
def test_rename_metadata_3(test_vault) -> None:
|
||||||
assert vault.metadata.dict["new_key"] == [
|
"""Test rename_metadata() method.
|
||||||
|
|
||||||
|
GIVEN a vault object
|
||||||
|
WHEN the rename_metadata() method is called with a key and value
|
||||||
|
THEN the metadata key/value is renamed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.rename_metadata("tags", "frontmatter_tag1", "new_vaule") == 1
|
||||||
|
assert vault.metadata.dict["tags"] == [
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"new_vaule",
|
"new_vaule",
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
@@ -499,23 +646,23 @@ def test_rename_metadata(test_vault) -> None:
|
|||||||
|
|
||||||
|
|
||||||
def test_transpose_metadata(test_vault) -> None:
|
def test_transpose_metadata(test_vault) -> None:
|
||||||
"""Test transposing metadata."""
|
"""Test transpose_metadata() method.
|
||||||
vault_path = test_vault
|
|
||||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
|
||||||
vault_config = config.vaults[0]
|
|
||||||
vault = Vault(config=vault_config)
|
|
||||||
|
|
||||||
assert vault.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER) == 2
|
GIVEN a vault object
|
||||||
|
WHEN the transpose_metadata() method is called
|
||||||
|
THEN the metadata is transposed
|
||||||
|
"""
|
||||||
|
vault = Vault(config=test_vault)
|
||||||
|
|
||||||
|
assert vault.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER) == 1
|
||||||
|
|
||||||
assert vault.metadata.inline_metadata == {}
|
assert vault.metadata.inline_metadata == {}
|
||||||
assert vault.metadata.frontmatter == {
|
assert vault.metadata.frontmatter == {
|
||||||
"author": ["author name"],
|
|
||||||
"bottom_key1": ["bottom_key1_value"],
|
"bottom_key1": ["bottom_key1_value"],
|
||||||
"bottom_key2": ["bottom_key2_value"],
|
"bottom_key2": ["bottom_key2_value"],
|
||||||
"date_created": ["2022-12-22"],
|
"date_created": ["2022-12-22"],
|
||||||
"frontmatter_Key1": ["author name"],
|
"frontmatter_Key1": ["author name"],
|
||||||
"frontmatter_Key2": ["article", "note"],
|
"frontmatter_Key2": ["article", "note"],
|
||||||
"ignored_frontmatter": ["ignore_me"],
|
|
||||||
"intext_key": ["intext_value"],
|
"intext_key": ["intext_value"],
|
||||||
"key📅": ["📅_key_value"],
|
"key📅": ["📅_key_value"],
|
||||||
"shared_key1": [
|
"shared_key1": [
|
||||||
@@ -527,15 +674,12 @@ def test_transpose_metadata(test_vault) -> None:
|
|||||||
"tags": [
|
"tags": [
|
||||||
"frontmatter_tag1",
|
"frontmatter_tag1",
|
||||||
"frontmatter_tag2",
|
"frontmatter_tag2",
|
||||||
"frontmatter_tag3",
|
|
||||||
"ignored_file_tag1",
|
|
||||||
"shared_tag",
|
"shared_tag",
|
||||||
"📅/frontmatter_tag3",
|
"📅/frontmatter_tag3",
|
||||||
],
|
],
|
||||||
"top_key1": ["top_key1_value"],
|
"top_key1": ["top_key1_value"],
|
||||||
"top_key2": ["top_key2_value"],
|
"top_key2": ["top_key2_value"],
|
||||||
"top_key3": ["top_key3_value_as_link"],
|
"top_key3": ["top_key3_value_as_link"],
|
||||||
"type": ["article", "note"],
|
|
||||||
}
|
}
|
||||||
|
|
||||||
assert (
|
assert (
|
||||||
|
|||||||
Reference in New Issue
Block a user