mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-18 18:03:39 -05:00
feat: transpose metadata (#18)
* feat: transpose between frontmatter and inline metadata * ci: improve codecode patch thresholds * test: remove ansi escape sequences from `capsys.errout` * test: improve fixture for shared keys * build(deps): update dependencies * refactor: use deepcopy * docs: add transpose metadata
This commit is contained in:
@@ -13,7 +13,7 @@ from pathlib import Path
|
||||
import pytest
|
||||
|
||||
from obsidian_metadata.models.enums import MetadataType
|
||||
from tests.helpers import Regex
|
||||
from tests.helpers import Regex, remove_ansi
|
||||
|
||||
|
||||
def test_instantiate_application(test_application) -> None:
|
||||
@@ -38,8 +38,8 @@ def test_abort(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "Done!" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "Done!" in captured
|
||||
|
||||
|
||||
def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
||||
@@ -65,8 +65,8 @@ def test_add_metadata_frontmatter(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
||||
@@ -92,8 +92,8 @@ def test_add_metadata_inline(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||
@@ -115,8 +115,8 @@ def test_add_metadata_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Added metadata to.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Added metadata to \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||
@@ -138,8 +138,8 @@ def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -156,8 +156,8 @@ def test_delete_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Deleted.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Deleted inline tag: breakfast in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
@@ -179,8 +179,8 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes found with a.*key.*matching", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"WARNING | No notes found with a key matching: \d{7}" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -197,10 +197,8 @@ def test_delete_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\|.*Deleted.*keys.*matching:.*d\\w\+.*from.*10", re.DOTALL
|
||||
)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS \| Deleted keys matching: d\\w\+ from \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
@@ -225,8 +223,8 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes found matching:", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"WARNING | No notes found matching: area: \d{7}" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -246,10 +244,8 @@ def test_delete_value(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\| Deleted value.*\^front\\w\+\$.*from.*key.*area.*in.*\d+.*notes", re.DOTALL
|
||||
)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert r"SUCCESS | Deleted value ^front\w+$ from key area in 8 notes" in captured
|
||||
|
||||
|
||||
def test_filter_notes(test_application, mocker, capsys) -> None:
|
||||
@@ -271,10 +267,10 @@ def test_filter_notes(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Loaded.*\d+.*notes from.*\d+.*total", re.DOTALL)
|
||||
assert "02 inline/inline 2.md" in captured.out
|
||||
assert "03 mixed/mixed 1.md" not in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Loaded \d+ notes from \d+ total", re.DOTALL)
|
||||
assert "02 inline/inline 2.md" in captured
|
||||
assert "03 mixed/mixed 1.md" not in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -326,11 +322,11 @@ def test_filter_clear(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "02 inline/inline 2.md" in captured.out
|
||||
assert "03 mixed/mixed 1.md" in captured.out
|
||||
assert "01 frontmatter/frontmatter 4.md" in captured.out
|
||||
assert "04 no metadata/no_metadata_1.md " in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "02 inline/inline 2.md" in captured
|
||||
assert "03 mixed/mixed 1.md" in captured
|
||||
assert "01 frontmatter/frontmatter 4.md" in captured
|
||||
assert "04 no metadata/no_metadata_1.md " in captured
|
||||
|
||||
|
||||
def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
||||
@@ -348,8 +344,8 @@ def test_inspect_metadata_all(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"type +│ article", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"type +│ article", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
@@ -375,8 +371,8 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -397,8 +393,8 @@ def test_rename_inline_tag(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"Renamed.*breakfast.*to.*new_tag.*in.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"Renamed breakfast to new_tag in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
@@ -424,8 +420,8 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert "WARNING | No notes were changed" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -446,8 +442,8 @@ def test_rename_key(test_application, mocker, capsys) -> None:
|
||||
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"Renamed.*tags.*to.*new_tags.*in.*\d+.*notes", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"Renamed tags to new_tags in \d+ notes", re.DOTALL)
|
||||
|
||||
|
||||
def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
@@ -476,8 +472,8 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"WARNING +\| No notes were changed", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | No notes were changed" in captured
|
||||
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
@@ -501,11 +497,10 @@ def test_rename_value_fail(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(
|
||||
r"SUCCESS +\| Renamed.*'area:frontmatter'.*to.*'area:new_key'", re.DOTALL
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(
|
||||
r"SUCCESS +\| Renamed 'area:frontmatter' to 'area:new_key' in \d+ notes", re.DOTALL
|
||||
)
|
||||
assert captured.out == Regex(r".*in.*\d+.*notes.*", re.DOTALL)
|
||||
|
||||
|
||||
def test_review_no_changes(test_application, mocker, capsys) -> None:
|
||||
@@ -518,8 +513,8 @@ def test_review_no_changes(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"INFO +\| No changes to review", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "INFO | No changes to review" in captured
|
||||
|
||||
|
||||
def test_review_changes(test_application, mocker, capsys) -> None:
|
||||
@@ -544,10 +539,49 @@ def test_review_changes(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r".*Found.*\d+.*changed notes in the vault", re.DOTALL)
|
||||
assert "- tags:" in captured.out
|
||||
assert "+ new_tags:" in captured.out
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r".*Found \d+ changed notes in the vault", re.DOTALL)
|
||||
assert "- tags:" in captured
|
||||
assert "+ new_tags:" in captured
|
||||
|
||||
|
||||
def test_transpose_metadata(test_application, mocker, capsys) -> None:
|
||||
"""Transpose metadata."""
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
|
||||
assert app.vault.metadata.inline_metadata["inline_key"] == ["inline_key_value"]
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["transpose_metadata", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["inline_to_frontmatter", "transpose_all"],
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
assert app.vault.metadata.inline_metadata == {}
|
||||
assert app.vault.metadata.frontmatter["inline_key"] == ["inline_key_value"]
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "SUCCESS | Transposed Inline Metadata to Frontmatter in 5 notes" in captured
|
||||
|
||||
app = test_application
|
||||
app._load_vault()
|
||||
|
||||
assert app.vault.metadata.frontmatter["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_application_main",
|
||||
side_effect=["transpose_metadata", KeyError],
|
||||
)
|
||||
mocker.patch(
|
||||
"obsidian_metadata.models.application.Questions.ask_selection",
|
||||
side_effect=["frontmatter_to_inline", "transpose_all"],
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
assert app.vault.metadata.inline_metadata["date_created"] == ["2022-12-21", "2022-12-22"]
|
||||
assert app.vault.metadata.frontmatter == {}
|
||||
|
||||
|
||||
def test_vault_backup(test_application, mocker, capsys) -> None:
|
||||
@@ -565,8 +599,10 @@ def test_vault_backup(test_application, mocker, capsys) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\|.*application\.bak", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(
|
||||
r"SUCCESS +\| Vault backed up to:[-\w\d\/\s]+application\.bak", re.DOTALL
|
||||
)
|
||||
|
||||
|
||||
def test_vault_delete(test_application, mocker, capsys, tmp_path) -> None:
|
||||
@@ -586,5 +622,5 @@ def test_vault_delete(test_application, mocker, capsys, tmp_path) -> None:
|
||||
)
|
||||
with pytest.raises(KeyError):
|
||||
app.application_main()
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert captured == Regex(r"SUCCESS +\| Backup deleted", re.DOTALL)
|
||||
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-12-22
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note", "one-off"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-11-14
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,25 +3,16 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-10-01
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
---
|
||||
|
||||
# Page Title H1
|
||||
|
||||
# Headings
|
||||
|
||||
@@ -3,21 +3,11 @@ area: frontmatter
|
||||
date_created: 2022-12-22
|
||||
date_modified: 2022-12-22
|
||||
tags:
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
thoughts:
|
||||
rating: 8
|
||||
reviewable: false
|
||||
levels:
|
||||
level1:
|
||||
- level1a
|
||||
- level1b
|
||||
level2:
|
||||
- level2a
|
||||
- level2b
|
||||
- food/fruit/apple
|
||||
- food/fruit/pear
|
||||
- dinner
|
||||
- lunch
|
||||
- breakfast
|
||||
author: John Doe
|
||||
status: new
|
||||
type: ["book", "article", "note"]
|
||||
|
||||
@@ -6,10 +6,6 @@ tags:
|
||||
- breakfast
|
||||
- not_food
|
||||
author: John Doe
|
||||
nested_list:
|
||||
nested_list_one:
|
||||
- nested_list_one_a
|
||||
- nested_list_one_b
|
||||
type:
|
||||
- article
|
||||
- note
|
||||
|
||||
18
tests/fixtures/test_vault/test1.md
vendored
18
tests/fixtures/test_vault/test1.md
vendored
@@ -1,14 +1,16 @@
|
||||
---
|
||||
date_created: 2022-12-22
|
||||
tags:
|
||||
- shared_tag
|
||||
- frontmatter_tag1
|
||||
- frontmatter_tag2
|
||||
-
|
||||
- 📅/frontmatter_tag3
|
||||
- shared_tag
|
||||
- frontmatter_tag1
|
||||
- frontmatter_tag2
|
||||
-
|
||||
- 📅/frontmatter_tag3
|
||||
frontmatter_Key1: author name
|
||||
frontmatter_Key2: ["article", "note"]
|
||||
shared_key1: shared_key1_value
|
||||
shared_key1:
|
||||
- shared_key1_value
|
||||
- shared_key1_value3
|
||||
shared_key2: shared_key2_value1
|
||||
---
|
||||
|
||||
@@ -18,10 +20,12 @@ top_key1:: top_key1_value
|
||||
**top_key2:: top_key2_value**
|
||||
top_key3:: [[top_key3_value_as_link]]
|
||||
shared_key1:: shared_key1_value
|
||||
shared_key1:: shared_key1_value2
|
||||
shared_key2:: shared_key2_value2
|
||||
emoji_📅_key:: emoji_📅_key_value
|
||||
key📅:: 📅_key_value
|
||||
|
||||
# Heading 1
|
||||
|
||||
Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. #intext_tag1 Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu [intext_key:: intext_value] fugiat nulla (#intext_tag2) pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est lab
|
||||
|
||||
```python
|
||||
|
||||
@@ -22,6 +22,19 @@ class KeyInputs:
|
||||
THREE = "3"
|
||||
|
||||
|
||||
def remove_ansi(text) -> str:
|
||||
"""Remove ANSI escape sequences from a string.
|
||||
|
||||
Args:
|
||||
text (str): String to remove ANSI escape sequences from.
|
||||
|
||||
Returns:
|
||||
str: String without ANSI escape sequences.
|
||||
"""
|
||||
ansi_chars = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]")
|
||||
return ansi_chars.sub("", text)
|
||||
|
||||
|
||||
class Regex:
|
||||
"""Assert that a given string meets some expectations.
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ def test_note_create(sample_note) -> None:
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -58,9 +58,9 @@ def test_note_create(sample_note) -> None:
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"intext_key": ["intext_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
@@ -127,7 +127,7 @@ def test_add_metadata_frontmatter(sample_note) -> None:
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"new_key1": [],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -143,7 +143,7 @@ def test_add_metadata_frontmatter(sample_note) -> None:
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"new_key1": [],
|
||||
"new_key2": ["new_key2_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -164,7 +164,7 @@ def test_add_metadata_frontmatter(sample_note) -> None:
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"new_key1": [],
|
||||
"new_key2": ["new_key2_value", "new_key2_value2", "new_key2_value3"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -271,6 +271,14 @@ def test_delete_metadata(sample_note) -> Note:
|
||||
assert "bottom_key2" not in note.inline_metadata.dict
|
||||
assert note.file_content != Regex(r"bottom_key2")
|
||||
|
||||
assert note.delete_metadata("shared_key1", area=MetadataType.INLINE) is True
|
||||
assert note.frontmatter.dict["shared_key1"] == ["shared_key1_value", "shared_key1_value3"]
|
||||
assert "shared_key1" not in note.inline_metadata.dict
|
||||
|
||||
assert note.delete_metadata("shared_key2", area=MetadataType.FRONTMATTER) is True
|
||||
assert note.inline_metadata.dict["shared_key2"] == ["shared_key2_value2"]
|
||||
assert "shared_key2" not in note.frontmatter.dict
|
||||
|
||||
|
||||
def test_has_changes(sample_note) -> None:
|
||||
"""Test has changes."""
|
||||
@@ -506,9 +514,9 @@ def test_rename_inline_metadata(sample_note) -> None:
|
||||
assert note.file_content != Regex(r"bottom_key1::")
|
||||
assert note.file_content == Regex(r"new_key::")
|
||||
|
||||
note._rename_inline_metadata("emoji_📅_key", "emoji_📅_key_value", "new_value")
|
||||
assert note.file_content != Regex(r"emoji_📅_key:: ?emoji_📅_key_value")
|
||||
assert note.file_content == Regex(r"emoji_📅_key:: ?new_value")
|
||||
note._rename_inline_metadata("key📅", "📅_key_value", "new_value")
|
||||
assert note.file_content != Regex(r"key📅:: ?📅_key_value")
|
||||
assert note.file_content == Regex(r"key📅:: ?new_value")
|
||||
|
||||
|
||||
def test_rename_metadata(sample_note) -> None:
|
||||
@@ -539,6 +547,251 @@ def test_rename_metadata(sample_note) -> None:
|
||||
assert note.file_content == Regex(r"new_key:: new_value")
|
||||
|
||||
|
||||
def test_transpose_frontmatter(sample_note) -> None:
|
||||
"""Test transposing metadata."""
|
||||
note = Note(note_path=sample_note)
|
||||
note.frontmatter.dict = {}
|
||||
assert note.transpose_metadata(begin=MetadataType.FRONTMATTER, end=MetadataType.INLINE) is False
|
||||
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="not_a_key",
|
||||
)
|
||||
is False
|
||||
)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="frontmatter_Key2",
|
||||
value="not_a_value",
|
||||
)
|
||||
is False
|
||||
)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="frontmatter_Key2",
|
||||
value=["not_a_value", "not_a_value2"],
|
||||
)
|
||||
is False
|
||||
)
|
||||
|
||||
# Transpose all frontmatter metadata to inline metadata
|
||||
assert note.transpose_metadata(begin=MetadataType.FRONTMATTER, end=MetadataType.INLINE) is True
|
||||
assert note.frontmatter.dict == {}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value2", "shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
# Transpose when key exists in both frontmatter and inline metadata
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="shared_key1",
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert note.frontmatter.dict == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
# Transpose a single key and it's respective values
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.INLINE,
|
||||
end=MetadataType.FRONTMATTER,
|
||||
key="top_key1",
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert note.frontmatter.dict == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
# Transpose a key when it's value is a list
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="frontmatter_Key2",
|
||||
value=["article", "note"],
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert note.frontmatter.dict == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
# Transpose a string value from a key
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="frontmatter_Key2",
|
||||
value="note",
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert note.frontmatter.dict == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"frontmatter_Key2": ["note"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
# Transpose list values from a key
|
||||
note = Note(note_path=sample_note)
|
||||
assert (
|
||||
note.transpose_metadata(
|
||||
begin=MetadataType.FRONTMATTER,
|
||||
end=MetadataType.INLINE,
|
||||
key="frontmatter_Key2",
|
||||
value=["note", "article"],
|
||||
)
|
||||
is True
|
||||
)
|
||||
assert note.frontmatter.dict == {
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
}
|
||||
assert note.inline_metadata.dict == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"frontmatter_Key2": ["note", "article"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
}
|
||||
|
||||
|
||||
def test_update_frontmatter(sample_note) -> None:
|
||||
"""Test replacing frontmatter."""
|
||||
note = Note(note_path=sample_note)
|
||||
@@ -556,7 +809,9 @@ frontmatter_Key1: some_new_key_here
|
||||
frontmatter_Key2:
|
||||
- article
|
||||
- note
|
||||
shared_key1: shared_key1_value
|
||||
shared_key1:
|
||||
- shared_key1_value
|
||||
- shared_key1_value3
|
||||
shared_key2: shared_key2_value1
|
||||
---"""
|
||||
assert new_frontmatter in note.file_content
|
||||
|
||||
@@ -29,12 +29,16 @@ def test_vault_creation(test_vault):
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -63,9 +67,9 @@ def test_vault_creation(test_vault):
|
||||
assert vault.metadata.inline_metadata == {
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"intext_key": ["intext_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value2"],
|
||||
"shared_key2": ["shared_key2_value2"],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
@@ -77,7 +81,7 @@ def test_vault_creation(test_vault):
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -104,13 +108,17 @@ def test_add_metadata(test_vault) -> None:
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"new_key": [],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -132,7 +140,7 @@ def test_add_metadata(test_vault) -> None:
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"new_key": [],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -150,14 +158,18 @@ def test_add_metadata(test_vault) -> None:
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"emoji_📅_key": ["emoji_📅_key_value"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"new_key": [],
|
||||
"new_key2": ["new_key2_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -180,7 +192,7 @@ def test_add_metadata(test_vault) -> None:
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"new_key": [],
|
||||
"new_key2": ["new_key2_value"],
|
||||
"shared_key1": ["shared_key1_value"],
|
||||
"shared_key1": ["shared_key1_value", "shared_key1_value3"],
|
||||
"shared_key2": ["shared_key2_value1"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
@@ -470,3 +482,51 @@ def test_rename_metadata(test_vault) -> None:
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
]
|
||||
|
||||
|
||||
def test_transpose_metadata(test_vault) -> None:
|
||||
"""Test transposing metadata."""
|
||||
vault_path = test_vault
|
||||
config = Config(config_path="tests/fixtures/test_vault_config.toml", vault_path=vault_path)
|
||||
vault_config = config.vaults[0]
|
||||
vault = Vault(config=vault_config)
|
||||
|
||||
assert vault.transpose_metadata(begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER) == 2
|
||||
|
||||
assert vault.metadata.inline_metadata == {}
|
||||
assert vault.metadata.frontmatter == {
|
||||
"author": ["author name"],
|
||||
"bottom_key1": ["bottom_key1_value"],
|
||||
"bottom_key2": ["bottom_key2_value"],
|
||||
"date_created": ["2022-12-22"],
|
||||
"frontmatter_Key1": ["author name"],
|
||||
"frontmatter_Key2": ["article", "note"],
|
||||
"ignored_frontmatter": ["ignore_me"],
|
||||
"intext_key": ["intext_value"],
|
||||
"key📅": ["📅_key_value"],
|
||||
"shared_key1": [
|
||||
"shared_key1_value",
|
||||
"shared_key1_value2",
|
||||
"shared_key1_value3",
|
||||
],
|
||||
"shared_key2": ["shared_key2_value1", "shared_key2_value2"],
|
||||
"tags": [
|
||||
"frontmatter_tag1",
|
||||
"frontmatter_tag2",
|
||||
"frontmatter_tag3",
|
||||
"ignored_file_tag1",
|
||||
"shared_tag",
|
||||
"📅/frontmatter_tag3",
|
||||
],
|
||||
"top_key1": ["top_key1_value"],
|
||||
"top_key2": ["top_key2_value"],
|
||||
"top_key3": ["top_key3_value_as_link"],
|
||||
"type": ["article", "note"],
|
||||
}
|
||||
|
||||
assert (
|
||||
vault.transpose_metadata(
|
||||
begin=MetadataType.INLINE, end=MetadataType.FRONTMATTER, location=InsertLocation.TOP
|
||||
)
|
||||
== 0
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user