mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-08 05:03:47 -05:00
feat: bulk update metadata from a CSV file
This commit is contained in:
36
README.md
36
README.md
@@ -43,13 +43,18 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- Backup: Create a backup of the vault.
|
||||
- Delete Backup: Delete a backup of the vault.
|
||||
|
||||
**Export Metadata**
|
||||
|
||||
- Export all metadata to a CSV organized by metadata type
|
||||
- Export all metadata to a CSV organized by note path
|
||||
- Export all metadata to a JSON file organized by metadata type
|
||||
|
||||
**Inspect Metadata**
|
||||
|
||||
- **View all metadata in the vault**
|
||||
- View all **frontmatter**
|
||||
- View all **inline metadata**
|
||||
- View all **inline tags**
|
||||
- **Export all metadata to CSV or JSON file**
|
||||
|
||||
**Filter Notes in Scope**: Limit the scope of notes to be processed with one or more filters.
|
||||
|
||||
@@ -59,6 +64,8 @@ Once installed, run `obsidian-metadata` in your terminal to enter an interactive
|
||||
- **List and clear filters**: List all current filters and clear one or all
|
||||
- **List notes in scope**: List notes that will be processed.
|
||||
|
||||
**Bulk Edit Metadata** from a CSV file (See the _making bulk edits_ section below)
|
||||
|
||||
**Add Metadata**: Add new metadata to your vault.
|
||||
|
||||
When adding a new key to inline metadata, the `insert location` value in the config file will specify where in the note it will be inserted.
|
||||
@@ -132,6 +139,29 @@ Below is an example with two vaults.
|
||||
|
||||
To bypass the configuration file and specify a vault to use at runtime use the `--vault-path` option.
|
||||
|
||||
### Making bulk edits
|
||||
|
||||
Bulk edits are supported by importing a CSV file containing the following columns
|
||||
|
||||
1. `Path` - Path to note relative to the vault root folder
|
||||
2. `Type` - Type of metadata. One of `frontmatter`, `inline_metadata`, or `tag`
|
||||
3. `Key` - The key to add (leave blank for a tag)
|
||||
4. `Value` - the value to add to the key
|
||||
|
||||
Notes which match a Path in the file will be updated to contain ONLY the information in the CSV file. Notes which do not match a path will be left untouched. The example CSV below will remove any frontmatter, inline metadata, or tags within with `vault/folder 01/note1.md` and then add the specified metadata.
|
||||
|
||||
```csv
|
||||
path,type,key,value
|
||||
folder 1/note1.md,frontmatter,fruits,apple
|
||||
folder 1/note1.md,frontmatter,fruits,banana
|
||||
folder 1/note1.md,inline_metadata,cars,toyota
|
||||
folder 1/note1.md,inline_metadata,cars,honda
|
||||
folder 1/note1.md,tag,,tag1
|
||||
folder 1/note1.md,tag,,tag2
|
||||
```
|
||||
|
||||
You can export all your notes with their associated metadata in this format from the "Export Metadata" section of the script to be used as a template for your bulk changes.
|
||||
|
||||
# Contributing
|
||||
|
||||
## Setup: Once per project
|
||||
@@ -163,3 +193,7 @@ There are two ways to contribute to this project.
|
||||
- Run `poetry add {package}` from within the development environment to install a run time dependency and add it to `pyproject.toml` and `poetry.lock`.
|
||||
- Run `poetry remove {package}` from within the development environment to uninstall a run time dependency and remove it from `pyproject.toml` and `poetry.lock`.
|
||||
- Run `poetry update` from within the development environment to upgrade all dependencies to the latest versions allowed by `pyproject.toml`.
|
||||
|
||||
```
|
||||
|
||||
```
|
||||
|
||||
22
poetry.lock
generated
22
poetry.lock
generated
@@ -4,7 +4,7 @@
|
||||
name = "argcomplete"
|
||||
version = "2.0.6"
|
||||
description = "Bash tab completion for argparse"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -100,7 +100,7 @@ files = [
|
||||
name = "charset-normalizer"
|
||||
version = "2.1.1"
|
||||
description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.0"
|
||||
files = [
|
||||
@@ -142,7 +142,7 @@ files = [
|
||||
name = "commitizen"
|
||||
version = "2.42.1"
|
||||
description = "Python commitizen client tool"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6.2,<4.0.0"
|
||||
files = [
|
||||
@@ -231,7 +231,7 @@ toml = ["tomli"]
|
||||
name = "decli"
|
||||
version = "0.5.2"
|
||||
description = "Minimal, easy-to-use, declarative cli tool"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -354,7 +354,7 @@ tests = ["pytest", "pytest-cov", "pytest-mock"]
|
||||
name = "jinja2"
|
||||
version = "3.1.2"
|
||||
description = "A very fast and expressive template engine."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -416,7 +416,7 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
|
||||
name = "markupsafe"
|
||||
version = "2.1.2"
|
||||
description = "Safely add untrusted strings to HTML/XML markup."
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -562,7 +562,7 @@ setuptools = "*"
|
||||
name = "packaging"
|
||||
version = "23.0"
|
||||
description = "Core utilities for Python packages"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -839,7 +839,7 @@ testing = ["filelock"]
|
||||
name = "pyyaml"
|
||||
version = "6.0"
|
||||
description = "YAML parser and emitter for Python"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.6"
|
||||
files = [
|
||||
@@ -1172,7 +1172,7 @@ widechars = ["wcwidth"]
|
||||
name = "termcolor"
|
||||
version = "2.2.0"
|
||||
description = "ANSI color formatting for output in terminal"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -1275,7 +1275,7 @@ files = [
|
||||
name = "typing-extensions"
|
||||
version = "4.5.0"
|
||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
||||
category = "dev"
|
||||
category = "main"
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
@@ -1349,4 +1349,4 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.10"
|
||||
content-hash = "45e6b76d4b9d0851c885c86136e0721cc90506953bc8e4969b65496aa98587d9"
|
||||
content-hash = "22aeb1e69b50ce41bcec085e2323e743675f917d617f3e6afa987b5cb98b7fb8"
|
||||
|
||||
@@ -26,6 +26,7 @@
|
||||
shellingham = "^1.5.0.post1"
|
||||
tomlkit = "^0.11.6"
|
||||
typer = "^0.7.0"
|
||||
commitizen = "^2.42.1"
|
||||
|
||||
[tool.poetry.group.test.dependencies]
|
||||
pytest = "^7.2.2"
|
||||
|
||||
@@ -87,13 +87,16 @@ def info(msg: str) -> None:
|
||||
console.print(f"INFO | {msg}")
|
||||
|
||||
|
||||
def usage(msg: str, width: int = 80) -> None:
|
||||
def usage(msg: str, width: int = None) -> None:
|
||||
"""Print a usage message without using logging.
|
||||
|
||||
Args:
|
||||
msg: Message to print
|
||||
width (optional): Width of the message
|
||||
"""
|
||||
if width is None:
|
||||
width = console.width - 15
|
||||
|
||||
for _n, line in enumerate(wrap(msg, width=width)):
|
||||
if _n == 0:
|
||||
console.print(f"[dim]USAGE | {line}")
|
||||
|
||||
@@ -91,74 +91,7 @@ def main(
|
||||
[bold underline]Configuration:[/]
|
||||
Configuration is specified in a configuration file. On First run, this file will be created at [tan]~/.{0}.env[/]. Any options specified on the command line will override the configuration file.
|
||||
|
||||
[bold underline]Usage:[/]
|
||||
[tan]Obsidian-metadata[/] provides a menu of sub-commands.
|
||||
|
||||
[bold underline]Vault Actions[/]
|
||||
Create or delete a backup of your vault.
|
||||
• Backup: Create a backup of the vault.
|
||||
• Delete Backup: Delete a backup of the vault.
|
||||
|
||||
[bold underline]Inspect Metadata[/]
|
||||
Inspect the metadata in your vault.
|
||||
• View all metadata in the vault
|
||||
• View all frontmatter
|
||||
• View all inline metadata
|
||||
• View all inline tags
|
||||
• Export all metadata to CSV or JSON file
|
||||
|
||||
[bold underline]Filter Notes in Scope[/]
|
||||
Limit the scope of notes to be processed with one or more filters.
|
||||
• Path filter (regex): Limit scope based on the path or filename
|
||||
• Metadata filter: Limit scope based on a key or key/value pair
|
||||
• Tag filter: Limit scope based on an in-text tag
|
||||
• List and clear filters: List all current filters and clear one or all
|
||||
• List notes in scope: List notes that will be processed.
|
||||
|
||||
[bold underline]Add Metadata[/]
|
||||
Add new metadata to your vault.
|
||||
• Add new metadata to the frontmatter
|
||||
• Add new inline metadata - Set `insert_location` in the config to
|
||||
control where the new metadata is inserted. (Default: Bottom)
|
||||
• Add new inline tag - Set `insert_location` in the config to
|
||||
control where the new tag is inserted. (Default: Bottom)
|
||||
|
||||
[bold underline]Rename Metadata[/]
|
||||
Rename either a key and all associated values, a specific value within a key. or an in-text tag.
|
||||
• Rename a key
|
||||
• Rename a value
|
||||
• rename an inline tag
|
||||
|
||||
[bold underline]Delete Metadata[/]
|
||||
Delete either a key and all associated values, or a specific value.
|
||||
• Delete a key and associated values
|
||||
• Delete a value from a key
|
||||
• Delete an inline tag
|
||||
|
||||
[bold underline]Move Inline Metadata[/]
|
||||
Move inline metadata to a specified location with a note
|
||||
|
||||
• Move to Top - Move all inline metadata beneath the frontmatter
|
||||
• Move to After Title* - Move all inline metadata beneath the first markdown header
|
||||
• Move to Bottom - Move all inline metadata to the bottom of the note
|
||||
|
||||
[bold underline]Transpose Metadata[/]
|
||||
Move metadata from inline to frontmatter or the reverse. When transposing to inline metadata,
|
||||
the `insert location` value in the config file will specify where in the
|
||||
note it will be inserted.
|
||||
|
||||
• Transpose all metadata - Moves all frontmatter to inline
|
||||
metadata, or the reverse
|
||||
• Transpose key - Transposes a specific key and all it's values
|
||||
• Transpose value - Transpose a specific key:value pair
|
||||
|
||||
[bold underline]Review Changes[/]
|
||||
Prior to committing changes, review all changes that will be made.
|
||||
• View a diff of the changes that will be made
|
||||
|
||||
[bold underline]Commit Changes[/]
|
||||
Write the changes to disk. This step is not undoable.
|
||||
• Commit changes to the vault
|
||||
Full usage information is available at https://github.com/natelandau/obsidian-metadata
|
||||
|
||||
"""
|
||||
# Instantiate logger
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
"""Questions for the cli."""
|
||||
|
||||
|
||||
import csv
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
@@ -53,8 +54,12 @@ class Application:
|
||||
match self.questions.ask_application_main():
|
||||
case "vault_actions":
|
||||
self.application_vault()
|
||||
case "export_metadata":
|
||||
self.application_export_metadata()
|
||||
case "inspect_metadata":
|
||||
self.application_inspect_metadata()
|
||||
case "import_from_csv":
|
||||
self.application_import_csv()
|
||||
case "filter_notes":
|
||||
self.application_filter()
|
||||
case "add_metadata":
|
||||
@@ -124,6 +129,7 @@ class Application:
|
||||
alerts.usage("Delete either a key and all associated values, or a specific value.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Delete inline tag", "value": "delete_inline_tag"},
|
||||
{"name": "Delete key", "value": "delete_key"},
|
||||
{"name": "Delete value", "value": "delete_value"},
|
||||
@@ -147,6 +153,7 @@ class Application:
|
||||
alerts.usage("Select the type of metadata to rename.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Rename inline tag", "value": "rename_inline_tag"},
|
||||
{"name": "Rename key", "value": "rename_key"},
|
||||
{"name": "Rename value", "value": "rename_value"},
|
||||
@@ -170,6 +177,7 @@ class Application:
|
||||
alerts.usage("Limit the scope of notes to be processed with one or more filters.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Apply new regex path filter", "value": "apply_path_filter"},
|
||||
{"name": "Apply new metadata filter", "value": "apply_metadata_filter"},
|
||||
{"name": "Apply new in-text tag filter", "value": "apply_tag_filter"},
|
||||
@@ -276,6 +284,82 @@ class Application:
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_import_csv(self) -> None:
|
||||
"""Import CSV for bulk changes to metadata."""
|
||||
alerts.usage(
|
||||
"Import CSV to make build changes to metadata. The CSV must have the following columns: path, type, key, value. Where type is one of 'frontmatter', 'inline_metadata', or 'tag'. Note: this will not create new notes."
|
||||
)
|
||||
|
||||
path = self.questions.ask_path(question="Enter path to a CSV file", valid_file=True)
|
||||
|
||||
if path is None:
|
||||
return
|
||||
|
||||
csv_path = Path(path).expanduser()
|
||||
|
||||
if "csv" not in csv_path.suffix.lower():
|
||||
alerts.error("File must be a CSV file")
|
||||
return
|
||||
|
||||
csv_dict: dict[str, Any] = {}
|
||||
with csv_path.open("r") as csv_file:
|
||||
csv_reader = csv.DictReader(csv_file, delimiter=",")
|
||||
for row in csv_reader:
|
||||
if row["path"] not in csv_dict:
|
||||
csv_dict[row["path"]] = []
|
||||
|
||||
csv_dict[row["path"]].append(
|
||||
{"type": row["type"], "key": row["key"], "value": row["value"]}
|
||||
)
|
||||
|
||||
num_changed = self.vault.update_from_dict(csv_dict)
|
||||
|
||||
if num_changed == 0:
|
||||
alerts.warning("No notes were changed")
|
||||
return
|
||||
|
||||
alerts.success(f"Rewrote metadata for {num_changed} notes.")
|
||||
|
||||
def application_export_metadata(self) -> None:
|
||||
"""Export metadata to various formats."""
|
||||
alerts.usage(
|
||||
"Export the metadata in your vault. Note, uncommitted changes will be reflected in these files. The notes csv export can be used as template for importing bulk changes"
|
||||
)
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Metadata by type to CSV", "value": "export_csv"},
|
||||
{"name": "Metadata by type to JSON", "value": "export_json"},
|
||||
{
|
||||
"name": "Metadata by note to CSV [Bulk import template]",
|
||||
"value": "export_notes_csv",
|
||||
},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
while True:
|
||||
match self.questions.ask_selection(choices=choices, question="Export format"):
|
||||
case "export_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="csv")
|
||||
alerts.success(f"CSV written to {path}")
|
||||
case "export_json":
|
||||
path = self.questions.ask_path(question="Enter a path for the JSON file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="json")
|
||||
alerts.success(f"JSON written to {path}")
|
||||
case "export_notes_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_notes_to_csv(path=path)
|
||||
alerts.success(f"CSV written to {path}")
|
||||
return
|
||||
case _:
|
||||
return
|
||||
|
||||
def application_inspect_metadata(self) -> None:
|
||||
"""View metadata."""
|
||||
alerts.usage(
|
||||
@@ -283,19 +367,17 @@ class Application:
|
||||
)
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "View all frontmatter", "value": "all_frontmatter"},
|
||||
{"name": "View all inline metadata", "value": "all_inline"},
|
||||
{"name": "View all inline tags", "value": "all_tags"},
|
||||
{"name": "View all keys", "value": "all_keys"},
|
||||
{"name": "View all metadata", "value": "all_metadata"},
|
||||
questionary.Separator(),
|
||||
{"name": "Write all metadata to CSV", "value": "export_csv"},
|
||||
{"name": "Write all metadata to JSON file", "value": "export_json"},
|
||||
questionary.Separator(),
|
||||
{"name": "Back", "value": "back"},
|
||||
]
|
||||
while True:
|
||||
match self.questions.ask_selection(choices=choices, question="Select a vault action"):
|
||||
match self.questions.ask_selection(choices=choices, question="Select an action"):
|
||||
case "all_metadata":
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.ALL)
|
||||
@@ -316,18 +398,6 @@ class Application:
|
||||
console.print("")
|
||||
self.vault.metadata.print_metadata(area=MetadataType.TAGS)
|
||||
console.print("")
|
||||
case "export_csv":
|
||||
path = self.questions.ask_path(question="Enter a path for the CSV file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="csv")
|
||||
alerts.success(f"Metadata written to {path}")
|
||||
case "export_json":
|
||||
path = self.questions.ask_path(question="Enter a path for the JSON file")
|
||||
if path is None:
|
||||
return
|
||||
self.vault.export_metadata(path=path, export_format="json")
|
||||
alerts.success(f"Metadata written to {path}")
|
||||
case _:
|
||||
return
|
||||
|
||||
@@ -342,6 +412,7 @@ class Application:
|
||||
alerts.usage(" 2. Move the location of inline metadata within a note.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Move inline metadata to top of note", "value": "move_to_top"},
|
||||
{
|
||||
"name": "Move inline metadata beneath the first header",
|
||||
@@ -374,6 +445,7 @@ class Application:
|
||||
alerts.usage("Create or delete a backup of your vault.")
|
||||
|
||||
choices = [
|
||||
questionary.Separator(),
|
||||
{"name": "Backup vault", "value": "backup_vault"},
|
||||
{"name": "Delete vault backup", "value": "delete_backup"},
|
||||
questionary.Separator(),
|
||||
@@ -564,6 +636,7 @@ class Application:
|
||||
|
||||
alerts.info(f"Found {len(changed_notes)} changed notes in the vault")
|
||||
choices: list[dict[str, Any] | questionary.Separator] = []
|
||||
choices.append(questionary.Separator())
|
||||
for n, note in enumerate(changed_notes, start=1):
|
||||
_selection = {
|
||||
"name": f"{n}: {note.note_path.relative_to(self.vault.vault_path)}",
|
||||
|
||||
@@ -245,6 +245,9 @@ class Frontmatter:
|
||||
except Exception as e: # noqa: BLE001
|
||||
raise AttributeError(e) from e
|
||||
|
||||
if frontmatter is None or frontmatter == [None]:
|
||||
return {}
|
||||
|
||||
for k in frontmatter:
|
||||
if frontmatter[k] is None:
|
||||
frontmatter[k] = []
|
||||
@@ -326,6 +329,10 @@ class Frontmatter:
|
||||
|
||||
return False
|
||||
|
||||
def delete_all(self) -> None:
|
||||
"""Delete all Frontmatter from the note."""
|
||||
self.dict = {}
|
||||
|
||||
def has_changes(self) -> bool:
|
||||
"""Check if the frontmatter has changes.
|
||||
|
||||
|
||||
@@ -190,6 +190,17 @@ class Note:
|
||||
|
||||
return False
|
||||
|
||||
def delete_all_metadata(self) -> None:
|
||||
"""Delete all metadata from the note. Removes all frontmatter and inline metadata and tags from the body of the note and from the associated metadata objects."""
|
||||
for key in self.inline_metadata.dict:
|
||||
self.delete_metadata(key=key, area=MetadataType.INLINE)
|
||||
|
||||
for tag in self.inline_tags.list:
|
||||
self.delete_inline_tag(tag=tag)
|
||||
|
||||
self.frontmatter.delete_all()
|
||||
self.write_frontmatter()
|
||||
|
||||
def delete_inline_tag(self, tag: str) -> bool:
|
||||
"""Delete an inline tag from the `inline_tags` attribute AND removes the tag from the text of the note if it exists.
|
||||
|
||||
|
||||
@@ -200,6 +200,23 @@ class Questions:
|
||||
|
||||
return True
|
||||
|
||||
def _validate_path_is_file(self, text: str) -> bool | str:
|
||||
"""Validate a path is a file.
|
||||
|
||||
Args:
|
||||
text (str): The path to validate.
|
||||
|
||||
Returns:
|
||||
bool | str: True if the path is valid, otherwise a string with the error message.
|
||||
"""
|
||||
path_to_validate: Path = Path(text).expanduser().resolve()
|
||||
if not path_to_validate.exists():
|
||||
return f"Path does not exist: {path_to_validate}"
|
||||
if not path_to_validate.is_file():
|
||||
return f"Path is not a file: {path_to_validate}"
|
||||
|
||||
return True
|
||||
|
||||
def _validate_valid_vault_regex(self, text: str) -> bool | str:
|
||||
"""Validate a valid regex.
|
||||
|
||||
@@ -276,9 +293,11 @@ class Questions:
|
||||
choices=[
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Vault Actions", "value": "vault_actions"},
|
||||
{"name": "Export Metadata", "value": "export_metadata"},
|
||||
{"name": "Inspect Metadata", "value": "inspect_metadata"},
|
||||
{"name": "Filter Notes in Scope", "value": "filter_notes"},
|
||||
questionary.Separator("-------------------------------"),
|
||||
{"name": "Bulk changes from imported CSV", "value": "import_from_csv"},
|
||||
{"name": "Add Metadata", "value": "add_metadata"},
|
||||
{"name": "Delete Metadata", "value": "delete_metadata"},
|
||||
{"name": "Rename Metadata", "value": "rename_metadata"},
|
||||
@@ -475,15 +494,27 @@ class Questions:
|
||||
question, validate=self._validate_number, style=self.style, qmark="INPUT |"
|
||||
).ask()
|
||||
|
||||
def ask_path(self, question: str = "Enter a path") -> str: # pragma: no cover
|
||||
def ask_path(
|
||||
self, question: str = "Enter a path", valid_file: bool = False
|
||||
) -> str: # pragma: no cover
|
||||
"""Ask the user for a path.
|
||||
|
||||
Args:
|
||||
question (str, optional): The question to ask. Defaults to "Enter a path".
|
||||
valid_file (bool, optional): Whether the path should be a valid file. Defaults to False.
|
||||
|
||||
Returns:
|
||||
str: A path.
|
||||
"""
|
||||
if valid_file:
|
||||
return questionary.path(
|
||||
question,
|
||||
only_directories=False,
|
||||
style=self.style,
|
||||
validate=self._validate_path_is_file,
|
||||
qmark="INPUT |",
|
||||
).ask()
|
||||
|
||||
return questionary.path(question, style=self.style, qmark="INPUT |").ask()
|
||||
|
||||
def ask_selection(
|
||||
@@ -498,7 +529,6 @@ class Questions:
|
||||
Returns:
|
||||
any: The selected item value.
|
||||
"""
|
||||
choices.insert(0, questionary.Separator())
|
||||
return questionary.select(
|
||||
question,
|
||||
choices=choices,
|
||||
|
||||
@@ -6,6 +6,7 @@ import re
|
||||
import shutil
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import rich.repr
|
||||
import typer
|
||||
@@ -360,6 +361,44 @@ class Vault:
|
||||
with export_file.open(mode="w", encoding="UTF8") as f:
|
||||
json.dump(dict_to_dump, f, indent=4, ensure_ascii=False, sort_keys=True)
|
||||
|
||||
def export_notes_to_csv(self, path: str) -> None:
|
||||
"""Export notes and their associated metadata to a csv file. This is useful as a template for importing metadata changes to a vault.
|
||||
|
||||
Args:
|
||||
path (str): Path to write csv file to.
|
||||
"""
|
||||
export_file = Path(path).expanduser().resolve()
|
||||
if not export_file.parent.exists():
|
||||
alerts.error(f"Path does not exist: {export_file.parent}")
|
||||
raise typer.Exit(code=1)
|
||||
|
||||
with export_file.open(mode="w", encoding="UTF8") as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(["path", "type", "key", "value"])
|
||||
|
||||
for _note in self.all_notes:
|
||||
for key, value in _note.frontmatter.dict.items():
|
||||
for v in value:
|
||||
writer.writerow(
|
||||
[_note.note_path.relative_to(self.vault_path), "frontmatter", key, v]
|
||||
)
|
||||
|
||||
for key, value in _note.inline_metadata.dict.items():
|
||||
for v in value:
|
||||
writer.writerow(
|
||||
[
|
||||
_note.note_path.relative_to(self.vault_path),
|
||||
"inline_metadata",
|
||||
key,
|
||||
v,
|
||||
]
|
||||
)
|
||||
|
||||
for tag in _note.inline_tags.list:
|
||||
writer.writerow(
|
||||
[_note.note_path.relative_to(self.vault_path), "tag", "", f"{tag}"]
|
||||
)
|
||||
|
||||
def get_changed_notes(self) -> list[Note]:
|
||||
"""Return a list of notes that have changes.
|
||||
|
||||
@@ -510,3 +549,55 @@ class Vault:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
def update_from_dict(self, dictionary: dict[str, Any]) -> int:
|
||||
"""Update note metadata from a dictionary. This is a destructive operation. All metadata in the specified notes not in the dictionary will be removed.
|
||||
|
||||
Requires a dictionary with the note path as the key and a dictionary of metadata as the value. Each key must have a list of associated dictionaries in the following format:
|
||||
|
||||
{
|
||||
'type': 'frontmatter|inline_metadata|tag',
|
||||
'key': 'string',
|
||||
'value': 'string'
|
||||
}
|
||||
|
||||
Args:
|
||||
dictionary (dict[str, Any]): Dictionary to update metadata from.
|
||||
|
||||
Returns:
|
||||
int: Number of notes that had metadata updated.
|
||||
"""
|
||||
num_changed = 0
|
||||
|
||||
for _note in self.all_notes:
|
||||
path = _note.note_path.relative_to(self.vault_path)
|
||||
if str(path) in dictionary:
|
||||
log.debug(f"Updating metadata for {path}")
|
||||
num_changed += 1
|
||||
_note.delete_all_metadata()
|
||||
for row in dictionary[str(path)]:
|
||||
if row["type"].lower() == "frontmatter":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.FRONTMATTER, key=row["key"], value=row["value"]
|
||||
)
|
||||
|
||||
if row["type"].lower() == "inline_metadata":
|
||||
_note.add_metadata(
|
||||
area=MetadataType.INLINE,
|
||||
key=row["key"],
|
||||
value=row["value"],
|
||||
location=self.insert_location,
|
||||
)
|
||||
|
||||
if row["type"].lower() == "tag" or row["type"].lower() == "tags":
|
||||
console.print(f"Adding tag {row['value']}")
|
||||
_note.add_metadata(
|
||||
area=MetadataType.TAGS,
|
||||
value=row["value"],
|
||||
location=self.insert_location,
|
||||
)
|
||||
|
||||
if num_changed > 0:
|
||||
self._rebuild_vault_metadata()
|
||||
|
||||
return num_changed
|
||||
|
||||
@@ -58,7 +58,8 @@ def test_usage(capsys):
|
||||
assert captured.out == "USAGE | This prints in usage\n"
|
||||
|
||||
alerts.usage(
|
||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua"
|
||||
"Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua",
|
||||
width=80,
|
||||
)
|
||||
captured = capsys.readouterr()
|
||||
assert "USAGE | Lorem ipsum dolor sit amet" in captured.out
|
||||
|
||||
@@ -68,8 +68,13 @@ repeated_key:: repeated_key_value2
|
||||
"""
|
||||
|
||||
|
||||
def test_frontmatter_create() -> None:
|
||||
"""Test frontmatter creation."""
|
||||
def test_frontmatter_create_1() -> None:
|
||||
"""Test frontmatter creation.
|
||||
|
||||
GIVEN valid frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN parse the YAML frontmatter and add it to the object
|
||||
"""
|
||||
frontmatter = Frontmatter(INLINE_CONTENT)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
@@ -88,11 +93,11 @@ def test_frontmatter_create() -> None:
|
||||
}
|
||||
|
||||
|
||||
def test_frontmatter_create_error() -> None:
|
||||
def test_frontmatter_create_2() -> None:
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN frontmatter content
|
||||
WHEN frontmatter is invalid
|
||||
GIVEN invalid frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN raise ValueError
|
||||
"""
|
||||
fn = """---
|
||||
@@ -104,21 +109,118 @@ invalid = = "content"
|
||||
Frontmatter(fn)
|
||||
|
||||
|
||||
def test_frontmatter_contains() -> None:
|
||||
"""Test frontmatter contains."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
def test_frontmatter_create_3():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content
|
||||
WHEN a Frontmatter object is created
|
||||
THEN set the dict to an empty dict
|
||||
"""
|
||||
content = "---\n\n---"
|
||||
frontmatter = Frontmatter(content)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_create_4():
|
||||
"""Test frontmatter creation error.
|
||||
|
||||
GIVEN empty frontmatter content with a yaml marker
|
||||
WHEN a Frontmatter object is created
|
||||
THEN set the dict to an empty dict
|
||||
"""
|
||||
content = "---\n-\n---"
|
||||
frontmatter = Frontmatter(content)
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_contains_1():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return True if the key is found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key1") is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_2():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key
|
||||
THEN return False if the key is not found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("no_key") is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_3():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return True if the key and value is found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key2", "article") is True
|
||||
assert frontmatter.contains("frontmatter_Key3") is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_4():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and a value
|
||||
THEN return False if the key and value is not found
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("frontmatter_Key2", "no value") is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_5():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return True if a key matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains(r"\d$", is_regex=True) is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_6():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key regex
|
||||
THEN return False if no key matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains(r"^\d", is_regex=True) is False
|
||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_contains_7():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return True if a value matches the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("key", r"\w\d_", is_regex=True) is True
|
||||
|
||||
|
||||
def test_frontmatter_contains_8():
|
||||
"""Test frontmatter contains() method.
|
||||
|
||||
GIVEN a Frontmatter object
|
||||
WHEN the contains() method is called with a key and value regex
|
||||
THEN return False if a value does not match the regex
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
assert frontmatter.contains("key", r"_\d", is_regex=True) is False
|
||||
|
||||
|
||||
def test_frontmatter_add() -> None:
|
||||
"""Test frontmatter add."""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
@@ -233,6 +335,18 @@ def test_frontmatter_delete() -> None:
|
||||
assert frontmatter.dict == {"shared_key1": []}
|
||||
|
||||
|
||||
def test_frontmatter_delete_all():
|
||||
"""Test Frontmatter delete_all method.
|
||||
|
||||
GIVEN Frontmatter with multiple keys
|
||||
WHEN delete_all is called
|
||||
THEN all keys and values are deleted
|
||||
"""
|
||||
frontmatter = Frontmatter(FRONTMATTER_CONTENT)
|
||||
frontmatter.delete_all()
|
||||
assert frontmatter.dict == {}
|
||||
|
||||
|
||||
def test_frontmatter_yaml_conversion():
|
||||
"""Test Frontmatter to_yaml method."""
|
||||
new_frontmatter: str = """\
|
||||
|
||||
@@ -229,16 +229,17 @@ def test_add_metadata_method_10(sample_note):
|
||||
"""Test add_metadata() method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN add_metadata() is with a new tag
|
||||
WHEN add_metadata() is called with a new tag
|
||||
THEN the tag is added to the InlineTags object and the file content
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
assert "new_tag" not in note.inline_tags.list
|
||||
assert "new_tag2" not in note.inline_tags.list
|
||||
assert (
|
||||
note.add_metadata(MetadataType.TAGS, value="new_tag", location=InsertLocation.TOP) is True
|
||||
note.add_metadata(MetadataType.TAGS, value="new_tag2", location=InsertLocation.BOTTOM)
|
||||
is True
|
||||
)
|
||||
assert "new_tag" in note.inline_tags.list
|
||||
assert "#new_tag" in note.file_content
|
||||
assert "new_tag2" in note.inline_tags.list
|
||||
assert "#new_tag2" in note.file_content
|
||||
|
||||
|
||||
def test_commit_1(sample_note, tmp_path) -> None:
|
||||
@@ -313,6 +314,24 @@ def test_contains_metadata(sample_note) -> None:
|
||||
assert note.contains_metadata(r"bottom_key\d$", r"bottom_key\d_value", is_regex=True) is True
|
||||
|
||||
|
||||
def test_delete_all_metadata(sample_note):
|
||||
"""Test delete_all_metadata() method.
|
||||
|
||||
GIVEN a note object
|
||||
WHEN delete_all_metadata() is called
|
||||
THEN all tags, frontmatter, and inline metadata are deleted
|
||||
"""
|
||||
note = Note(note_path=sample_note)
|
||||
note.delete_all_metadata()
|
||||
assert note.inline_tags.list == []
|
||||
assert note.frontmatter.dict == {}
|
||||
assert note.inline_metadata.dict == {}
|
||||
assert note.file_content == Regex("consequat. Duis")
|
||||
assert "codeblock_key:: some text" in note.file_content
|
||||
assert "#ffffff" in note.file_content
|
||||
assert "---" not in note.file_content
|
||||
|
||||
|
||||
def test_delete_inline_tag(sample_note) -> None:
|
||||
"""Test delete_inline_tag method.
|
||||
|
||||
|
||||
@@ -431,6 +431,38 @@ def test_export_json(tmp_path, test_vault):
|
||||
assert '"frontmatter": {' in export_file.read_text()
|
||||
|
||||
|
||||
def test_export_notes_to_csv_1(tmp_path, test_vault):
|
||||
"""Test export_notes_to_csv() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_notes_to_csv method is called with a path
|
||||
THEN the notes are exported to a CSV file
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path(f"{tmp_path}/export.csv")
|
||||
vault.export_notes_to_csv(path=export_file)
|
||||
assert export_file.exists() is True
|
||||
assert "path,type,key,value" in export_file.read_text()
|
||||
assert "test1.md,frontmatter,shared_key1,shared_key1_value" in export_file.read_text()
|
||||
assert "test1.md,inline_metadata,shared_key1,shared_key1_value" in export_file.read_text()
|
||||
assert "test1.md,tag,,shared_tag" in export_file.read_text()
|
||||
assert "test1.md,frontmatter,tags,📅/frontmatter_tag3" in export_file.read_text()
|
||||
assert "test1.md,inline_metadata,key📅,📅_key_value" in export_file.read_text()
|
||||
|
||||
|
||||
def test_export_notes_to_csv_2(test_vault):
|
||||
"""Test export_notes_to_csv() method.
|
||||
|
||||
GIVEN a vault object
|
||||
WHEN the export_notes_to_csv method is called with a path where the parent directory does not exist
|
||||
THEN an error is raised
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
export_file = Path("/I/do/not/exist/export.csv")
|
||||
with pytest.raises(typer.Exit):
|
||||
vault.export_notes_to_csv(path=export_file)
|
||||
|
||||
|
||||
def test_get_filtered_notes_1(sample_vault) -> None:
|
||||
"""Test filtering notes.
|
||||
|
||||
@@ -688,3 +720,60 @@ def test_transpose_metadata(test_vault) -> None:
|
||||
)
|
||||
== 0
|
||||
)
|
||||
|
||||
|
||||
def test_update_from_dict_1(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN no dictionary keys match paths in the vault
|
||||
THEN no notes are updated and 0 is returned
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
update_dict = {
|
||||
"path1": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
"path2": {"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
}
|
||||
|
||||
assert vault.update_from_dict(update_dict) == 0
|
||||
assert vault.get_changed_notes() == []
|
||||
|
||||
|
||||
def test_update_from_dict_2(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN the dictionary is empty
|
||||
THEN no notes are updated and 0 is returned
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
update_dict = {}
|
||||
|
||||
assert vault.update_from_dict(update_dict) == 0
|
||||
assert vault.get_changed_notes() == []
|
||||
|
||||
|
||||
def test_update_from_dict_3(test_vault):
|
||||
"""Test update_from_dict() method.
|
||||
|
||||
GIVEN a vault object and an update dictionary
|
||||
WHEN a dictionary key matches a path in the vault
|
||||
THEN the note is updated to match the dictionary values
|
||||
"""
|
||||
vault = Vault(config=test_vault)
|
||||
|
||||
update_dict = {
|
||||
"test1.md": [
|
||||
{"type": "frontmatter", "key": "new_key", "value": "new_value"},
|
||||
{"type": "inline_metadata", "key": "new_key2", "value": "new_value"},
|
||||
{"type": "tags", "key": "", "value": "new_tag"},
|
||||
]
|
||||
}
|
||||
assert vault.update_from_dict(update_dict) == 1
|
||||
assert vault.get_changed_notes()[0].note_path.name == "test1.md"
|
||||
assert vault.get_changed_notes()[0].frontmatter.dict == {"new_key": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].inline_metadata.dict == {"new_key2": ["new_value"]}
|
||||
assert vault.get_changed_notes()[0].inline_tags.list == ["new_tag"]
|
||||
assert vault.metadata.frontmatter == {"new_key": ["new_value"]}
|
||||
assert vault.metadata.inline_metadata == {"new_key2": ["new_value"]}
|
||||
assert vault.metadata.tags == ["new_tag"]
|
||||
|
||||
Reference in New Issue
Block a user