Skip to content

Commit

Permalink
Merge pull request #280 from datalad/issue-275-validate-aggregate-arg…
Browse files Browse the repository at this point in the history
…uments

Improve `meta-aggregate` argument handling
  • Loading branch information
christian-monch authored Aug 31, 2022
2 parents 6ba8ec3 + 750c070 commit b47b593
Show file tree
Hide file tree
Showing 5 changed files with 16 additions and 8 deletions.
15 changes: 11 additions & 4 deletions datalad_metalad/aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,10 +156,13 @@ class Aggregate(Interface):
constraints=EnsureDataset() | EnsureNone()),
path=Parameter(
args=("path",),
metavar="PATH",
metavar="SUB_DATASET_PATH",
doc=r"""
PATH to a sub-dataset whose metadata shall be aggregated into
the topmost dataset (ROOT_DATASET)""",
SUB_DATASET_PATH is a path to a sub-dataset whose metadata shall be
aggregated into the topmost dataset (ROOT_DATASET). The sub-dataset
must be located within the directory of the topmost dataset. Note:
if SUB_DATASET_PATH is relative, it is resolved against the current
working directory, not against the path of the topmost dataset""",
nargs="+",
constraints=EnsureStr() | EnsureNone()))

Expand Down Expand Up @@ -249,7 +252,11 @@ def process_path_spec(root_dataset: Dataset,

result = []
for path in paths:
sub_dataset = check_dataset(path, "meta_aggregate")
path_object = Path(path).absolute()
if path_object == root_dataset.pathobj:
raise ValueError(
f"Cannot aggregate {path_object} into itself")
sub_dataset = check_dataset(str(path_object), "meta_aggregate")
result.append((
MetadataPath(sub_dataset.pathobj.relative_to(root_dataset.pathobj)),
sub_dataset.pathobj))
Expand Down
3 changes: 2 additions & 1 deletion datalad_metalad/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
import pkg_resources
import sys
from itertools import islice
from pathlib import Path
from typing import Dict, List, Union

from datalad.distribution.dataset import (
Expand Down Expand Up @@ -57,7 +58,7 @@ def check_dataset(dataset_or_path: Union[Dataset, str],
if ve.args and ve.args[0].startswith("No installed dataset found "):
raise NoDatasetFound(
"No valid datalad dataset found at: "
f"{dataset_or_path}")
f"{Path(dataset_or_path).absolute()}")
else:
raise

Expand Down
2 changes: 1 addition & 1 deletion requirements-devel.txt
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ coverage
sphinx>=1.7.8
sphinx-rtd-theme
pyyaml
datalad-metadata-model>=0.3.0,<0.4.0
datalad-metadata-model>=0.3.5,<0.4.0
pytest
pytest-cov

Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@ datalad>=0.15.6
sphinx>=1.7.8
sphinx-rtd-theme
pyyaml
datalad-metadata-model>=0.3.1,<0.4.0
datalad-metadata-model>=0.3.5,<0.4.0
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ python_requires = >= 3.5
install_requires =
six
datalad >=0.15.6
datalad-metadata-model >=0.3.0,<0.4.0
datalad-metadata-model >=0.3.5,<0.4.0
pyyaml
test_requires =
coverage
Expand Down

0 comments on commit b47b593

Please sign in to comment.