mirror of
https://github.com/natelandau/obsidian-metadata.git
synced 2025-11-12 23:13:48 -05:00
fix(csv-import): fail if type does not validate
This commit is contained in:
@@ -8,8 +8,6 @@ from typing import Any
|
||||
import typer
|
||||
|
||||
from obsidian_metadata.__version__ import __version__
|
||||
from obsidian_metadata._utils import alerts
|
||||
from obsidian_metadata._utils.alerts import logger as log
|
||||
from obsidian_metadata._utils.console import console
|
||||
|
||||
|
||||
@@ -224,6 +222,11 @@ def validate_csv_bulk_imports(csv_path: Path, note_paths: list) -> dict[str, lis
|
||||
raise typer.BadParameter("Missing 'value' column in CSV file")
|
||||
row_num += 1
|
||||
|
||||
if row_num > 0 and row["type"] not in ["tag", "frontmatter", "inline_metadata"]:
|
||||
raise typer.BadParameter(
|
||||
f"Invalid type '{row['type']}' in CSV file. Must be one of 'tag', 'frontmatter', 'inline_metadata'"
|
||||
)
|
||||
|
||||
if row["path"] not in csv_dict:
|
||||
csv_dict[row["path"]] = []
|
||||
|
||||
@@ -237,12 +240,9 @@ def validate_csv_bulk_imports(csv_path: Path, note_paths: list) -> dict[str, lis
|
||||
paths_to_remove = [x for x in csv_dict if x not in note_paths]
|
||||
|
||||
for _path in paths_to_remove:
|
||||
alerts.warning(f"'{_path}' does not exist in vault. Skipping...")
|
||||
del csv_dict[_path]
|
||||
|
||||
if len(csv_dict) == 0:
|
||||
log.error("No paths in the CSV file matched paths in the vault")
|
||||
raise typer.Exit(1)
|
||||
raise typer.BadParameter(
|
||||
f"'{_path}' in CSV does not exist in vault. Ensure all paths are relative to the vault root."
|
||||
)
|
||||
|
||||
return csv_dict
|
||||
|
||||
|
||||
@@ -134,7 +134,7 @@ def test_validate_csv_bulk_imports_1(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
PATH,type,key,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -151,7 +151,7 @@ def test_validate_csv_bulk_imports_2(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,Type,key,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -168,7 +168,7 @@ def test_validate_csv_bulk_imports_3(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,value
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -185,7 +185,7 @@ def test_validate_csv_bulk_imports_4(tmp_path):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,values
|
||||
note1.md,type,key,value"""
|
||||
note1.md,frontmatter,key,value"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
with pytest.raises(typer.BadParameter):
|
||||
@@ -207,7 +207,7 @@ def test_validate_csv_bulk_imports_5(tmp_path):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_6(tmp_path, capsys):
|
||||
def test_validate_csv_bulk_imports_6(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
@@ -217,30 +217,77 @@ def test_validate_csv_bulk_imports_6(tmp_path, capsys):
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,type,key,value
|
||||
note2.md,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
|
||||
csv_dict = validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md"])
|
||||
captured = remove_ansi(capsys.readouterr().out)
|
||||
assert "WARNING | 'note2.md' does not exist in vault." in captured
|
||||
assert csv_dict == {"note1.md": [{"key": "key", "type": "type", "value": "value"}]}
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_7(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN no paths match paths in the vault
|
||||
WHEN if a type is not 'frontmatter' or 'inline_metadata', 'tag'
|
||||
THEN exit the program
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,type,key,value
|
||||
note2.md,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note2.md,notvalid,key,value
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
with pytest.raises(typer.Exit):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=[])
|
||||
with pytest.raises(typer.BadParameter):
|
||||
validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
|
||||
|
||||
def test_validate_csv_bulk_imports_8(tmp_path):
|
||||
"""Test the validate_csv_bulk_imports function.
|
||||
|
||||
GIVEN a valid csv file
|
||||
WHEN more than one row has the same path
|
||||
THEN add the row to the list of rows for that path
|
||||
"""
|
||||
csv_path = tmp_path / "test.csv"
|
||||
csv_content = """\
|
||||
path,type,key,value
|
||||
note1.md,frontmatter,key,value
|
||||
note1.md,tag,key,value
|
||||
note1.md,inline_metadata,key,value
|
||||
note1.md,inline_metadata,key2,value
|
||||
note1.md,inline_metadata,key2,value2
|
||||
note2.md,frontmatter,key,value
|
||||
note2.md,tag,key,value
|
||||
note2.md,inline_metadata,key,value
|
||||
note2.md,inline_metadata,key2,value
|
||||
note2.md,inline_metadata,key2,value2
|
||||
"""
|
||||
csv_path.write_text(csv_content)
|
||||
csv_dict = validate_csv_bulk_imports(csv_path=csv_path, note_paths=["note1.md", "note2.md"])
|
||||
assert csv_dict == {
|
||||
"note1.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
"note2.md": [
|
||||
{"key": "key", "type": "frontmatter", "value": "value"},
|
||||
{"key": "key", "type": "tag", "value": "value"},
|
||||
{"key": "key", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value"},
|
||||
{"key": "key2", "type": "inline_metadata", "value": "value2"},
|
||||
],
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user