Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Expand config validation to most of the config #73

Merged
merged 3 commits into from
Sep 14, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion buildrunner/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
load_config,
)

from buildrunner.validation.config_model import validate_config
from buildrunner.validation.config import validate_config

from . import fetch

Expand Down
40 changes: 20 additions & 20 deletions buildrunner/docker/multiplatform_image_builder.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

from buildrunner.docker import get_dockerfile

LOGGER = logging.getLogger(__name__)
logger = logging.getLogger(__name__)


class ImageInfo:
Expand Down Expand Up @@ -139,14 +139,14 @@ def __exit__(self, exc_type, exc_value, traceback):
for name, images in self._intermediate_built_images.items():
for image in images:
for tag in image.tags:
LOGGER.debug(f"Removing image {image.repo}:{tag} for {name}")
logger.debug(f"Removing image {image.repo}:{tag} for {name}")
docker.image.remove(f"{image.repo}:{tag}", force=True)

# Removes all tagged images if keep_images is False
if self._tagged_images_names and not self._keep_images:
for name, images in self._tagged_images_names.items():
for image in images:
LOGGER.debug(f"Removing tagged image {image} for {name}")
logger.debug(f"Removing tagged image {image} for {name}")
docker.image.remove(image, force=True)

@property
Expand Down Expand Up @@ -180,7 +180,7 @@ def _start_local_registry(self):
str: The name of the registry container
"""
if not self._local_registry_is_running:
LOGGER.debug("Starting local docker registry")
logger.debug("Starting local docker registry")
container = docker.run("registry", detach=True, publish_all=True)
ports = container.network_settings.ports

Expand All @@ -194,23 +194,23 @@ def _start_local_registry(self):

self._registry_info = RegistryInfo(container.name, "localhost", ports.get("5000/tcp")[0].get("HostPort"))
self._local_registry_is_running = True
LOGGER.debug(f"Started local registry {self._registry_info}")
logger.debug(f"Started local registry {self._registry_info}")
else:
LOGGER.warning("Local registry is already running")
logger.warning("Local registry is already running")

def _stop_local_registry(self):
"""
Stops and removes the local registry along with any images
"""
if self._local_registry_is_running:
LOGGER.debug(f"Stopping and removing local registry {self._registry_info}")
logger.debug(f"Stopping and removing local registry {self._registry_info}")
try:
docker.remove(self._registry_info.name, volumes=True, force=True)
except python_on_whales.exceptions.NoSuchContainer as err:
LOGGER.error(f"Failed to stop and remove local registry {self._registry_info.name}: {err}")
logger.error(f"Failed to stop and remove local registry {self._registry_info.name}: {err}")
self._local_registry_is_running = False
else:
LOGGER.warning("Local registry is not running when attempting to stop it")
logger.warning("Local registry is not running when attempting to stop it")

# pylint: disable=too-many-arguments
@retry(python_on_whales.exceptions.DockerException, tries=5, delay=1)
Expand Down Expand Up @@ -248,7 +248,7 @@ def _build_single_image(self,
f"'{file}'({os.path.exists(f'{file}')}) does not exist!"

tagged_names = [f"{name}:{tag}" for tag in tags]
LOGGER.debug(f"Building {tagged_names} for {platform}")
logger.debug(f"Building {tagged_names} for {platform}")

# Build the image with the specified tags
image = docker.buildx.build(path,
Expand All @@ -270,9 +270,9 @@ def _build_single_image(self,
try:
docker.image.remove(images, force=True)
except python_on_whales.exceptions.DockerException as err:
LOGGER.warning(f"Failed to remove {images}: {err}")
logger.warning(f"Failed to remove {images}: {err}")
except python_on_whales.exceptions.DockerException as err:
LOGGER.error(f"Failed to build {tag_name}: {err}")
logger.error(f"Failed to build {tag_name}: {err}")
raise err

built_images.append(ImageInfo(repo=name,
Expand Down Expand Up @@ -321,7 +321,7 @@ def build_multiple_images(self,

dockerfile, cleanup_dockerfile = get_dockerfile(file)

LOGGER.debug(f"Building {name}:{tags} for platforms {platforms} from {dockerfile}")
logger.debug(f"Building {name}:{tags} for platforms {platforms} from {dockerfile}")

if self._use_local_registry and not self._local_registry_is_running:
# Starts local registry container to do ephemeral image storage
Expand All @@ -341,7 +341,7 @@ def build_multiple_images(self,
processes = []
for platform in platforms:
curr_name = f"{base_image_name}-{platform.replace('/', '-')}"
LOGGER.debug(f"Building {curr_name} for {platform}")
logger.debug(f"Building {curr_name} for {platform}")
if do_multiprocessing:
processes.append(Process(target=self._build_single_image,
args=(curr_name,
Expand Down Expand Up @@ -412,7 +412,7 @@ def push(self, name: str, dest_names: List[str] = None) -> None:
timeout_seconds = initial_timeout_seconds
while retries > 0:
retries -= 1
LOGGER.debug(f"Creating manifest list {name} with timeout {timeout_seconds} seconds")
logger.debug(f"Creating manifest list {name} with timeout {timeout_seconds} seconds")
curr_process = Process(target=docker.buildx.imagetools.create,
kwargs={"sources": src_names, "tags": tagged_names})
curr_process.start()
Expand All @@ -424,7 +424,7 @@ def push(self, name: str, dest_names: List[str] = None) -> None:
f" and {timeout_seconds} seconds each try")
else:
# Process finished within timeout
LOGGER.info(f"Successfully created multiplatform images {dest_names}")
logger.info(f"Successfully created multiplatform images {dest_names}")
break
timeout_seconds += timeout_step_seconds

Expand All @@ -444,7 +444,7 @@ def _find_native_platform_images(self, name: str) -> str:
"""
host_os = system()
host_arch = machine()
LOGGER.debug(f"Finding native platform for {name} for {host_os}/{host_arch}")
logger.debug(f"Finding native platform for {name} for {host_os}/{host_arch}")
pattern = f"{host_os}-{host_arch}"

# No images built for this name
Expand Down Expand Up @@ -481,7 +481,7 @@ def tag_single_platform(self, name: str, tags: List[str] = None, dest_name: str
# This is to handle pylint's "dangerous-default-value" error
if tags is None:
tags = ["latest"]
LOGGER.debug(f"Tagging {name} with tags {tags} - Dest name: {dest_name}")
logger.debug(f"Tagging {name} with tags {tags} - Dest name: {dest_name}")
source_image = self._find_native_platform_images(name)
if dest_name is None:
dest_name = name
Expand All @@ -497,11 +497,11 @@ def tag_single_platform(self, name: str, tags: List[str] = None, dest_name: str
for tag in tags:
dest_tag = f"{dest_name}:{tag}"
docker.tag(image, dest_tag)
LOGGER.debug(f"Tagged {image} as {dest_tag}")
logger.debug(f"Tagged {image} as {dest_tag}")
self._tagged_images_names[name].append(dest_tag)
docker.image.remove(image, force=True)
except python_on_whales.exceptions.DockerException as err:
LOGGER.error(f"Failed while tagging {dest_name}: {err}")
logger.error(f"Failed while tagging {dest_name}: {err}")
raise err

def get_built_images(self, name: str) -> List[str]:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,68 +9,45 @@
from typing import Dict, List, Optional, Set, Union

# pylint: disable=no-name-in-module
from pydantic import BaseModel, validator, ValidationError
from pydantic import BaseModel, Field, validator, ValidationError

from buildrunner.validation.errors import Errors, get_validation_errors
from buildrunner.validation.step import Step, StepPushCommitDict

class Errors:
""" Error class for storing validation errors """
class Error:
""" Error class for storing validation error """
def __init__(self, field: str, message: str):
self.field: str = field
self.message: Union[str, None] = message

def __init__(self):
self.errors = []

def add(self, field: str, message: str):
""" Add an error """
self.errors.append(self.Error(field, message))

def count(self):
""" Return the number of errors """
return len(self.errors)

def __str__(self):
return '\n'.join([f' {error.field}: {error.message}' for error in self.errors])

def __repr__(self):
return self.__str__()


class StepBuild(BaseModel):
""" Build model within a step """
path: Optional[str]
dockerfile: Optional[str]
pull: Optional[bool]
platform: Optional[str]
platforms: Optional[List[str]]


class StepPushDict(BaseModel):
""" Push model within a step """
repository: str
tags: Optional[List[str]]


class Step(BaseModel):
""" Step model """
build: Optional[Union[StepBuild, str]]
push: Optional[Union[StepPushDict, List[Union[str, StepPushDict]], str]]

def is_multi_platform(self):
"""
Check if the step is a multi-platform build step
"""
return isinstance(self.build, StepBuild) and \
self.build.platforms is not None
class Config(BaseModel, extra='forbid'):
""" Top level config model """

# Unclear if this is actively used
class GithubModel(BaseModel, extra='forbid'):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is cool syntax, didn't know that was possible. Should the Config top-level class also forbid extra?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I was just afraid that I may have missed something. I'll add it in to the top-level config. Then if it fails we can address where the issue is, either update the validation code, the documentation or the buildrunner config with the issue.

""" Github model """
endpoint: str
version: str
username: str
app_token: str

class SSHKey(BaseModel, extra='forbid'):
""" SSH key model """
file: Optional[str]
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Are these working without assigning "None" to the value? That's surprising to me, I thought newer pydantic versions required it

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yes it appears to be working. But I don't quite understand what you are saying. Can you give me an example of what you mean?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I've had to do file: Optional[str] = None to get it working in some cases (newer pydantic versions I thought)

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is using an older version of pydantic version 1.10.11 because that is the highest that python-on-whales allows currently. That may be why I am able to get away with using that syntax.

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

ah, yes, I knew that

key: Optional[str]
password: Optional[str]
prompt_password: Optional[bool] = Field(alias='prompt-password')
aliases: Optional[List[str]]

class Config(BaseModel):
""" Top level config model """
version: Optional[float]
steps: Optional[Dict[str, Step]]

github: Optional[Dict[str, GithubModel]]
# Global config attributes
env: Optional[Dict[str, str]]
build_servers: Optional[Dict[str, Union[str, List[str]]]] = Field(alias='build-servers')
# Intentionally has loose restrictions on ssh-keys since documentation isn't clear
ssh_keys: Optional[Union[SSHKey, List[SSHKey]]] = Field(alias='ssh-keys')
local_files: Optional[Dict[str, str]] = Field(alias='local-files')
caches_root: Optional[str] = Field(alias='caches-root')
docker_registry: Optional[str] = Field(alias='docker-registry')
temp_dir: Optional[str] = Field(alias='temp-dir')

# Note this is pydantic version 1.10 syntax
@validator('steps')
@classmethod
Expand All @@ -82,7 +59,7 @@ def validate_steps(cls, values) -> None:
ValueError | pydantic.ValidationError : If the config file is invalid
"""

def validate_push(push: Union[StepPushDict, List[Union[str, StepPushDict]], str],
def validate_push(push: Union[StepPushCommitDict, str, List[Union[str, StepPushCommitDict]]],
mp_push_tags: Set[str],
step_name: str,
update_mp_push_tags: bool = True):
Expand All @@ -107,7 +84,7 @@ def validate_push(push: Union[StepPushDict, List[Union[str, StepPushDict]], str]
if ":" not in name:
name = f'{name}:latest'

if isinstance(push, StepPushDict):
if isinstance(push, StepPushCommitDict):
names = [f"{push.repository}:{tag}" for tag in push.tags]

if names is not None:
Expand Down Expand Up @@ -169,14 +146,6 @@ def validate_multi_platform_build(mp_push_tags: Set[str]):
return values


def _add_validation_errors(exc: ValidationError) -> Errors:
errors = Errors()
for error in exc.errors():
loc = [str(item) for item in error["loc"]]
errors.add(field='.'.join(loc), message=f'{error["msg"]} ({error["type"]})')
return errors


def validate_config(**kwargs) -> Errors:
"""
Check if the config file is valid
Expand All @@ -188,5 +157,5 @@ def validate_config(**kwargs) -> Errors:
try:
Config(**kwargs)
except ValidationError as exc:
errors = _add_validation_errors(exc)
errors = get_validation_errors(exc)
return errors
48 changes: 48 additions & 0 deletions buildrunner/validation/errors.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
"""
Copyright 2023 Adobe
All Rights Reserved.

NOTICE: Adobe permits you to use, modify, and distribute this file in accordance
with the terms of the Adobe license agreement accompanying it.
"""
from typing import Union

from pydantic import ValidationError


class Errors:
""" Error class for storing validation errors """
class Error:
""" Error class for storing validation error """
def __init__(self, field: str, message: str):
self.field: str = field
self.message: Union[str, None] = message

def __init__(self):
self.errors = []

def add(self, field: str, message: str):
""" Add an error """
self.errors.append(self.Error(field, message))

def count(self):
""" Return the number of errors """
return len(self.errors)

def __str__(self):
return '\n'.join([f' {error.field}: {error.message}' for error in self.errors])

def __repr__(self):
return self.__str__()


def get_validation_errors(exc: ValidationError) -> Errors:
""" Get validation errors to an Errors object """
errors = Errors()
for error in exc.errors():
loc = [str(item) for item in error["loc"]]
if error["type"] == "value_error.extra":
errors.add(field='.'.join(loc), message='not a valid field, please check the spelling and documentation')
else:
errors.add(field='.'.join(loc), message=f'{error["msg"]} ({error["type"]})')
return errors
Loading
Loading