Skip to content

Commit

Permalink
The code uses Pydantic V2
Browse files Browse the repository at this point in the history
* requirements files are regenerated with newer versions
  of pydantic and pypi_simple
* a lot of deprecated decorators, methods are replaced
* almost every error message for validation is updated

JIRA: STONEBLD-1571

Signed-off-by: Michal Šoltis <[email protected]>
  • Loading branch information
slimreaper35 authored and chmeliik committed Oct 10, 2023
1 parent 9a257ad commit 52a2172
Show file tree
Hide file tree
Showing 14 changed files with 409 additions and 234 deletions.
2 changes: 1 addition & 1 deletion cachi2/core/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,4 @@ def set_config(path: Path) -> None:
"""Set global config variable using input from file."""
global config

config = parse_user_input(Config.parse_obj, yaml.safe_load(path.read_text()))
config = parse_user_input(Config.model_validate, yaml.safe_load(path.read_text()))
52 changes: 28 additions & 24 deletions cachi2/core/models/input.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class _PackageInputBase(pydantic.BaseModel, extra="forbid"):
type: PackageManagerType
path: Path = Path(".")

@pydantic.validator("path")
@pydantic.field_validator("path")
def _path_is_relative(cls, path: Path) -> Path:
return check_sane_relpath(path)

Expand All @@ -87,17 +87,19 @@ class PipPackageInput(_PackageInputBase):
requirements_build_files: Optional[list[Path]] = None
allow_binary: bool = False

@pydantic.validator("requirements_files", "requirements_build_files")
@pydantic.field_validator("requirements_files", "requirements_build_files")
def _no_explicit_none(cls, paths: Optional[list[Path]]) -> list[Path]:
"""Fail if the user explicitly passes None."""
if paths is None:
# Note: same error message as pydantic's default
raise TypeError("none is not an allowed value")
raise ValueError("none is not an allowed value")
return paths

@pydantic.validator("requirements_files", "requirements_build_files", each_item=True)
def _requirements_file_path_is_relative(cls, path: Path) -> Path:
return check_sane_relpath(path)
@pydantic.field_validator("requirements_files", "requirements_build_files")
def _requirements_file_path_is_relative(cls, paths: list[Path]) -> list[Path]:
for p in paths:
check_sane_relpath(p)
return paths


class YarnPackageInput(_PackageInputBase):
Expand All @@ -121,30 +123,32 @@ class Request(pydantic.BaseModel):
packages: list[PackageInput]
flags: frozenset[Flag] = frozenset()

@pydantic.validator("packages")
@pydantic.field_validator("packages")
def _unique_packages(cls, packages: list[PackageInput]) -> list[PackageInput]:
"""De-duplicate the packages to be processed."""
return unique(packages, by=lambda pkg: (pkg.type, pkg.path))

@pydantic.validator("packages", each_item=True)
def _check_package_paths(cls, package: PackageInput, values: dict) -> PackageInput:
@pydantic.field_validator("packages")
def _check_packages_paths(
cls, packages: list[PackageInput], info: pydantic.ValidationInfo
) -> list[PackageInput]:
"""Check that package paths are existing subdirectories."""
source_dir = values.get("source_dir")
# Don't run validation if source_dir failed to validate
source_dir: RootedPath = info.data.get("source_dir", None)
if source_dir is not None:
try:
abspath = source_dir.join_within_root(package.path).path
except PathOutsideRoot:
raise ValueError(
f"package path (a symlink?) leads outside source directory: {package.path}"
)
if not abspath.is_dir():
raise ValueError(
f"package path does not exist (or is not a directory): {package.path}"
)
return package

@pydantic.validator("packages")
for p in packages:
try:
abspath = source_dir.join_within_root(p.path)
except PathOutsideRoot:
raise ValueError(
f"package path (a symlink?) leads outside source directory: {p.path}"
)
if not abspath.path.is_dir():
raise ValueError(
f"package path does not exist (or is not a directory): {p.path}"
)
return packages

@pydantic.field_validator("packages")
def _packages_not_empty(cls, packages: list[PackageInput]) -> list[PackageInput]:
"""Check that the packages list is not empty."""
if len(packages) == 0:
Expand Down
4 changes: 2 additions & 2 deletions cachi2/core/models/output.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,12 @@ class BuildConfig(pydantic.BaseModel):
environment_variables: list[EnvironmentVariable] = []
project_files: list[ProjectFile] = []

@pydantic.validator("environment_variables")
@pydantic.field_validator("environment_variables")
def _unique_env_vars(cls, env_vars: list[EnvironmentVariable]) -> list[EnvironmentVariable]:
"""Sort and de-duplicate environment variables by name."""
return unique_sorted(env_vars, by=lambda env_var: env_var.name)

@pydantic.validator("project_files")
@pydantic.field_validator("project_files")
def _unique_project_files(cls, project_files: list[ProjectFile]) -> list[ProjectFile]:
"""Sort and de-duplicate project files by path."""
return unique_sorted(project_files, by=lambda f: f.abspath)
Expand Down
2 changes: 1 addition & 1 deletion cachi2/core/models/property_semantics.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ def merge_component_group(component_group: Iterable[Component]) -> Component:
prop_sets = (PropertySet.from_properties(c.properties) for c in component_group)
merged_prop_set = functools.reduce(PropertySet.merge, prop_sets)
component = component_group[0]
return component.copy(update={"properties": merged_prop_set.to_properties()})
return component.model_copy(update={"properties": merged_prop_set.to_properties()})

return [merge_component_group(g) for _, g in grouped_components]

Expand Down
6 changes: 3 additions & 3 deletions cachi2/core/models/sbom.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ class Component(pydantic.BaseModel):
name: str
purl: str
version: Optional[str] = None
properties: list[Property] = []
properties: list[Property] = pydantic.Field(default_factory=list, validate_default=True)
type: Literal["library"] = "library"

def key(self) -> str:
Expand All @@ -42,7 +42,7 @@ def key(self) -> str:
"""
return self.purl

@pydantic.validator("properties", always=True)
@pydantic.field_validator("properties")
def _add_found_by_property(cls, properties: list[Property]) -> list[Property]:
if FOUND_BY_CACHI2_PROPERTY not in properties:
properties.append(FOUND_BY_CACHI2_PROPERTY)
Expand Down Expand Up @@ -88,7 +88,7 @@ class Sbom(pydantic.BaseModel):
spec_version: str = pydantic.Field(alias="specVersion", default="1.4")
version: int = 1

@pydantic.validator("components")
@pydantic.field_validator("components")
def _unique_components(cls, components: list[Component]) -> list[Component]:
"""Sort and de-duplicate components."""
return unique_sorted(components, by=lambda component: component.key())
10 changes: 5 additions & 5 deletions cachi2/core/package_managers/gomod.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ class _ParsedModel(pydantic.BaseModel):
>>> class SomeModel(_GolangModel):
some_attribute: str
>>> SomeModel.parse_obj({"SomeAttribute": "hello"})
>>> SomeModel.model_validate({"SomeAttribute": "hello"})
SomeModel(some_attribute="hello")
"""

Expand All @@ -68,7 +68,7 @@ def alias_generator(attr_name: str) -> str:
return "".join(word.capitalize() for word in attr_name.split("_"))

# allow SomeModel(some_attribute="hello"), not just SomeModel(SomeAttribute="hello")
allow_population_by_field_name = True
populate_by_name = True


class ParsedModule(_ParsedModel):
Expand All @@ -94,7 +94,7 @@ class ParsedPackage(_ParsedModel):

import_path: str
standard: bool = False
module: Optional[ParsedModule]
module: Optional[ParsedModule] = None


class ResolvedGoModule(NamedTuple):
Expand Down Expand Up @@ -569,7 +569,7 @@ def _resolve_gomod(
log.info("Downloading the gomod dependencies")
download_cmd = ["go", "mod", "download", "-json"]
downloaded_modules = (
ParsedModule.parse_obj(obj)
ParsedModule.model_validate(obj)
for obj in load_json_stream(_run_download_cmd(download_cmd, run_params))
)

Expand Down Expand Up @@ -597,7 +597,7 @@ def go_list_deps(pattern: Literal["./...", "all"]) -> Iterator[ParsedPackage]:
complete module list (roughly matching the list of downloaded modules).
"""
cmd = [*go_list, "-deps", "-json=ImportPath,Module,Standard,Deps", pattern]
return map(ParsedPackage.parse_obj, load_json_stream(_run_gomod_cmd(cmd, run_params)))
return map(ParsedPackage.model_validate, load_json_stream(_run_gomod_cmd(cmd, run_params)))

package_modules = (
module for pkg in go_list_deps("all") if (module := pkg.module) and not module.main
Expand Down
23 changes: 13 additions & 10 deletions cachi2/core/rooted_path.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from os import PathLike
from pathlib import Path
from typing import Any, Callable, Iterator, TypeVar, Union
from typing import Any, TypeVar, Union

from pydantic_core import CoreSchema, core_schema

from cachi2.core.errors import UsageError

Expand Down Expand Up @@ -115,13 +117,14 @@ def join_within_root(self: RootedPathT, *other: StrPath) -> RootedPathT:
new._root = self.root
return new

# pydantic integration
@classmethod
def __get_validators__(cls: type[RootedPathT]) -> Iterator[Callable[[Any], RootedPathT]]:
yield cls._validate

@classmethod
def _validate(cls: type[RootedPathT], v: Any) -> RootedPathT:
if not isinstance(v, (str, PathLike)):
raise TypeError(f"expected str or os.PathLike, got {type(v).__name__}")
return cls(v)
def __get_pydantic_core_schema__(cls, source: Any, handler: Any) -> CoreSchema:
return core_schema.no_info_before_validator_function(
cls._validate, core_schema.any_schema()
)

@staticmethod
def _validate(value: Any) -> "RootedPath":
if not isinstance(value, (str, PathLike)):
raise ValueError(f"expected str or os.PathLike, got {type(value).__name__}")
return RootedPath(path=value)
10 changes: 5 additions & 5 deletions cachi2/interface/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -225,10 +225,10 @@ def combine_option_and_json_flags(json_flags: list[Flag]) -> list[str]:

return flags

input = parse_user_input(_Input.parse_obj, normalize_input())
input = parse_user_input(_Input.model_validate, normalize_input())

request = parse_user_input(
Request.parse_obj,
Request.model_validate,
{
"source_dir": source,
"output_dir": output,
Expand All @@ -241,13 +241,13 @@ def combine_option_and_json_flags(json_flags: list[Flag]) -> list[str]:

request.output_dir.path.mkdir(parents=True, exist_ok=True)
request.output_dir.join_within_root(".build-config.json").path.write_text(
request_output.build_config.json()
request_output.build_config.model_dump_json()
)

sbom = request_output.generate_sbom()
request.output_dir.join_within_root("bom.json").path.write_text(
# the Sbom model has camelCase aliases in some fields
sbom.json(by_alias=True, exclude_none=True)
sbom.model_dump_json(by_alias=True, exclude_none=True)
)

log.info(r"All dependencies fetched successfully \o/")
Expand Down Expand Up @@ -328,4 +328,4 @@ def _get_build_config(output_dir: Path) -> BuildConfig:
f"No .build-config.json found in {output_dir}. "
"Please use a directory populated by a previous fetch-deps command."
)
return BuildConfig.parse_raw(build_config_json.read_text())
return BuildConfig.model_validate_json(build_config_json.read_text())
Loading

0 comments on commit 52a2172

Please sign in to comment.