diff --git a/.github/workflows/release-publish.yaml b/.github/workflows/release-publish.yaml
index 05fed8c0..006dbec3 100644
--- a/.github/workflows/release-publish.yaml
+++ b/.github/workflows/release-publish.yaml
@@ -64,6 +64,8 @@ jobs:
- name: Release
uses: softprops/action-gh-release@v2
with:
+ # Generate release notes on the new GH release
+ generate_release_notes: true
# Add wheel and source tarball
files: |
*.whl
diff --git a/.github/workflows/security-scan.yaml b/.github/workflows/security-scan.yaml
new file mode 100644
index 00000000..35b2bed9
--- /dev/null
+++ b/.github/workflows/security-scan.yaml
@@ -0,0 +1,15 @@
+name: Security scan
+on:
+ pull_request:
+ push:
+ branches:
+ - main
+ - hotfix/*
+ - work/secscan # For development
+
+jobs:
+ python-scans:
+ name: Scan Python project
+ uses: canonical/starflow/.github/workflows/scan-python.yaml@main
+ with:
+ packages: python-apt-dev
diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml
index a402e28b..dabdad2a 100644
--- a/.github/workflows/tests.yaml
+++ b/.github/workflows/tests.yaml
@@ -60,10 +60,6 @@ jobs:
3.10
3.12
cache: 'pip'
- - name: Setup LXD
- uses: canonical/setup-lxd@v0.1.1
- with:
- channel: latest/stable
- name: Configure environment
run: |
echo "::group::apt-get"
@@ -111,10 +107,12 @@ jobs:
cache: 'pip'
- name: Setup LXD
uses: canonical/setup-lxd@v0.1.1
- with:
- channel: latest/stable
- name: Configure environment
run: |
+ echo "::group::Begin snap install"
+ echo "Installing snaps in the background while running apt and pip..."
+ sudo snap install --no-wait --channel=candidate fetch-service
+ echo "::endgroup::"
echo "::group::apt-get"
sudo apt update
sudo apt-get install -y libapt-pkg-dev
@@ -123,6 +121,9 @@ jobs:
python -m pip install tox
echo "::endgroup::"
mkdir -p results
+ echo "::group::Wait for snap to complete"
+ snap watch --last=install
+ echo "::endgroup::"
- name: Setup Tox environments
run: tox run -e integration-${{ matrix.python }} --notest
- name: Integration tests
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 5a9b3a40..7570dc9e 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,7 +2,7 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.6.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@@ -14,17 +14,17 @@ repos:
- id: mixed-line-ending
- repo: https://github.com/charliermarsh/ruff-pre-commit
# renovate: datasource=pypi;depName=ruff
- rev: "v0.0.267"
+ rev: "v0.6.3"
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
# renovate: datasource=pypi;depName=black
- rev: "23.3.0"
+ rev: "24.8.0"
hooks:
- id: black
- repo: https://github.com/adrienverge/yamllint.git
# renovate: datasource=pypi;depName=yamllint
- rev: "v1.31.0"
+ rev: "v1.35.1"
hooks:
- id: yamllint
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index d5fd1555..2971ddc0 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -12,7 +12,7 @@ sphinx:
build:
os: ubuntu-22.04
tools:
- python: "3"
+ python: "3.12"
python:
install:
diff --git a/craft_application/__init__.py b/craft_application/__init__.py
index b7bd325b..8848d94b 100644
--- a/craft_application/__init__.py
+++ b/craft_application/__init__.py
@@ -25,6 +25,7 @@
ProviderService,
ServiceFactory,
)
+from craft_application._config import ConfigModel
try:
from ._version import __version__
@@ -42,6 +43,7 @@
"AppFeatures",
"AppMetadata",
"AppService",
+ "ConfigModel",
"models",
"ProjectService",
"LifecycleService",
diff --git a/craft_application/_config.py b/craft_application/_config.py
new file mode 100644
index 00000000..c9619e12
--- /dev/null
+++ b/craft_application/_config.py
@@ -0,0 +1,37 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Configuration model for craft applications."""
+from __future__ import annotations
+
+import craft_cli
+import pydantic
+
+
+class ConfigModel(pydantic.BaseModel):
+ """A configuration model for a craft application."""
+
+ verbosity_level: craft_cli.EmitterMode = craft_cli.EmitterMode.BRIEF
+ debug: bool = False
+ build_environment: str | None = None
+ secrets: str
+
+ platform: str | None = None
+ build_for: str | None = None
+
+ parallel_build_count: int
+ max_parallel_build_count: int
+ lxd_remote: str = "local"
+ launchpad_instance: str = "production"
diff --git a/craft_application/application.py b/craft_application/application.py
index f6ed19bf..c104b25f 100644
--- a/craft_application/application.py
+++ b/craft_application/application.py
@@ -17,6 +17,7 @@
from __future__ import annotations
+import argparse
import importlib
import os
import pathlib
@@ -36,7 +37,7 @@
from craft_parts.plugins.plugins import PluginType
from platformdirs import user_cache_path
-from craft_application import commands, errors, grammar, models, secrets, util
+from craft_application import _config, commands, errors, grammar, models, secrets, util
from craft_application.errors import PathInvalidError
from craft_application.models import BuildInfo, GrammarAwareProject
from craft_application.util import ProServices, ValidatorOptions
@@ -54,7 +55,7 @@
"craft_parts",
"craft_providers",
"craft_store",
- "craft_application.remote",
+ "craft_application",
}
)
@@ -81,6 +82,7 @@ class AppMetadata:
features: AppFeatures = AppFeatures()
project_variables: list[str] = field(default_factory=lambda: ["version"])
mandatory_adoptable_fields: list[str] = field(default_factory=lambda: ["version"])
+ ConfigModel: type[_config.ConfigModel] = _config.ConfigModel
ProjectClass: type[models.Project] = models.Project
BuildPlannerClass: type[models.BuildPlanner] = models.BuildPlanner
@@ -160,6 +162,11 @@ def __init__(
else:
self._work_dir = pathlib.Path.cwd()
+ # Whether the command execution should use the fetch-service
+ self._enable_fetch_service = False
+ # The kind of sessions that the fetch-service service should create
+ self._fetch_service_policy = "strict"
+
@property
def app_config(self) -> dict[str, Any]:
"""Get the configuration passed to dispatcher.load_command().
@@ -175,22 +182,85 @@ def app_config(self) -> dict[str, Any]:
@property
def command_groups(self) -> list[craft_cli.CommandGroup]:
- """Return command groups."""
- lifecycle_commands = commands.get_lifecycle_command_group()
- other_commands = commands.get_other_command_group()
+ """Return command groups.
+
+ Merges command groups provided by the application with craft-application's
+ default commands.
+
+ If the application and craft-application provide a command with the same name
+ in the same group, the application's command is used.
+
+ Note that a command with the same name cannot exist in multiple groups.
+ """
+ lifeycle_default_commands = commands.get_lifecycle_command_group()
+ other_default_commands = commands.get_other_command_group()
+
+ merged = {group.name: group for group in self._command_groups}
+
+ merged[lifeycle_default_commands.name] = self._merge_defaults(
+ app_commands=merged.get(lifeycle_default_commands.name),
+ default_commands=lifeycle_default_commands,
+ )
+ merged[other_default_commands.name] = self._merge_defaults(
+ app_commands=merged.get(other_default_commands.name),
+ default_commands=other_default_commands,
+ )
+
+ return list(merged.values())
+
+ def _merge_defaults(
+ self,
+ *,
+ app_commands: craft_cli.CommandGroup | None,
+ default_commands: craft_cli.CommandGroup,
+ ) -> craft_cli.CommandGroup:
+ """Merge default commands with application commands for a particular group.
+
+ Default commands are only used if the application does not have a command
+ with the same name.
+
+ The order of the merged commands follow the order of the default commands.
+ Extra application commands are appended to the end of the command list.
- merged: dict[str, list[type[craft_cli.BaseCommand]]] = {}
- all_groups = [lifecycle_commands, other_commands, *self._command_groups]
+ :param app_commands: The application's commands.
+ :param default_commands: Craft Application's default commands.
- # Merge the default command groups with those provided by the application,
- # so that we don't get multiple groups with the same name.
- for group in all_groups:
- merged.setdefault(group.name, []).extend(group.commands)
+ :returns: A list of app commands and default commands.
+ """
+ if not app_commands:
+ return default_commands
+
+ craft_cli.emit.debug(f"Merging commands for group {default_commands.name!r}:")
+
+ # for lookup of commands by name
+ app_commands_dict = {command.name: command for command in app_commands.commands}
+
+ merged_commands: list[type[craft_cli.BaseCommand]] = []
+ processed_command_names: set[str] = set()
+
+ for default_command in default_commands.commands:
+ # prefer the application command if it exists
+ command_name = default_command.name
+ if command_name in app_commands_dict:
+ craft_cli.emit.debug(
+ f" - using application command for {command_name!r}."
+ )
+ merged_commands.append(app_commands_dict[command_name])
+ processed_command_names.add(command_name)
+ # otherwise use the default
+ else:
+ merged_commands.append(default_command)
- return [
- craft_cli.CommandGroup(name, commands_)
- for name, commands_ in merged.items()
- ]
+ # append remaining commands from the application
+ for app_command in app_commands.commands:
+ if app_command.name not in processed_command_names:
+ merged_commands.append(app_command)
+
+ return craft_cli.CommandGroup(
+ name=default_commands.name,
+ commands=merged_commands,
+ ordered=default_commands.ordered,
+ )
@property
def log_path(self) -> pathlib.Path | None:
@@ -229,7 +299,7 @@ def _configure_services(self, provider_name: str | None) -> None:
Any child classes that override this must either call this directly or must
provide a valid ``project`` to ``self.services``.
"""
- self.services.set_kwargs(
+ self.services.update_kwargs(
"lifecycle",
cache_dir=self.cache_dir,
work_dir=self._work_dir,
@@ -239,12 +309,17 @@ def _configure_services(self, provider_name: str | None) -> None:
self._pro_services
), # TODO: should this be passed as a arg instead?
)
- self.services.set_kwargs(
+ self.services.update_kwargs(
"provider",
work_dir=self._work_dir,
build_plan=self._build_plan,
provider_name=provider_name,
)
+ self.services.update_kwargs(
+ "fetch",
+ build_plan=self._build_plan,
+ session_policy=self._fetch_service_policy,
+ )
def _resolve_project_path(self, project_dir: pathlib.Path | None) -> pathlib.Path:
"""Find the project file for the current project.
@@ -345,8 +420,10 @@ def is_managed(self) -> bool:
def run_managed(self, platform: str | None, build_for: str | None) -> None:
"""Run the application in a managed instance."""
- extra_args: dict[str, Any] = {}
+ if not self._build_plan:
+ raise errors.EmptyBuildPlanError
+ extra_args: dict[str, Any] = {}
for build_info in self._build_plan:
if platform and platform != build_info.platform:
continue
@@ -373,8 +450,14 @@ def run_managed(self, platform: str | None, build_for: str | None) -> None:
instance_path = pathlib.PosixPath("/root/project")
with self.services.provider.instance(
- build_info, work_dir=self._work_dir
+ build_info,
+ work_dir=self._work_dir,
+ clean_existing=self._enable_fetch_service,
) as instance:
+ if self._enable_fetch_service:
+ session_env = self.services.fetch.create_session(instance)
+ env.update(session_env)
+
# if pro services are required, ensure the pro client is
# installed, attached and the correct services are enabled
if self._pro_services:
@@ -410,13 +493,20 @@ def run_managed(self, platform: str | None, build_for: str | None) -> None:
raise craft_providers.ProviderError(
f"Failed to execute {self.app.name} in instance."
) from exc
+ finally:
+ if self._enable_fetch_service:
+ self.services.fetch.teardown_session()
+
+ if self._enable_fetch_service:
+ self.services.fetch.shutdown(force=True)
def configure(self, global_args: dict[str, Any]) -> None:
"""Configure the application using any global arguments."""
def _get_dispatcher(self) -> craft_cli.Dispatcher:
- """Configure this application. Should be called by the run method.
+ """Configure this application.
+ Should be called by the _run_inner method.
Side-effect: This method may exit the process.
:returns: A ready-to-run Dispatcher object
@@ -425,15 +515,18 @@ def _get_dispatcher(self) -> craft_cli.Dispatcher:
try:
craft_cli.emit.trace("pre-parsing arguments...")
+ app_config = self.app_config
# Workaround for the fact that craft_cli requires a command.
# https://github.com/canonical/craft-cli/issues/141
if "--version" in sys.argv or "-V" in sys.argv:
try:
- global_args = dispatcher.pre_parse_args(["pull", *sys.argv[1:]])
+ global_args = dispatcher.pre_parse_args(
+ ["pull", *sys.argv[1:]], app_config
+ )
except craft_cli.ArgumentParsingError:
- global_args = dispatcher.pre_parse_args(sys.argv[1:])
+ global_args = dispatcher.pre_parse_args(sys.argv[1:], app_config)
else:
- global_args = dispatcher.pre_parse_args(sys.argv[1:])
+ global_args = dispatcher.pre_parse_args(sys.argv[1:], app_config)
if global_args.get("version"):
craft_cli.emit.message(f"{self.app.name} {self.app.version}")
@@ -456,7 +549,7 @@ def _get_dispatcher(self) -> craft_cli.Dispatcher:
f"Internal error while loading {self.app.name}: {err!r}"
)
)
- if os.getenv("CRAFT_DEBUG") == "1":
+ if self.services.config.get("debug"):
raise
sys.exit(os.EX_SOFTWARE)
@@ -476,6 +569,7 @@ def _create_dispatcher(self) -> craft_cli.Dispatcher:
self.command_groups,
summary=str(self.app.summary),
extra_global_args=self._global_arguments,
+ docs_base_url=self.app.versioned_docs_url,
)
def _get_app_plugins(self) -> dict[str, PluginType]:
@@ -504,13 +598,15 @@ def _pre_run(self, dispatcher: craft_cli.Dispatcher) -> None:
At the time this is run, the command is loaded in the dispatcher, but
the project has not yet been loaded.
"""
+ args = dispatcher.parsed_args()
+
# Some commands might have a project_dir parameter. Those commands and
# only those commands should get a project directory, but only when
# not managed.
if self.is_managed():
self.project_dir = pathlib.Path("/root/project")
- elif project_dir := getattr(dispatcher.parsed_args(), "project_dir", None):
+ elif project_dir := getattr(args, "project_dir", None):
self.project_dir = pathlib.Path(project_dir).expanduser().resolve()
if self.project_dir.exists() and not self.project_dir.is_dir():
raise errors.ProjectFileMissingError(
@@ -519,6 +615,25 @@ def _pre_run(self, dispatcher: craft_cli.Dispatcher) -> None:
resolution="Ensure the path entered is correct.",
)
+ fetch_service_policy: str | None = getattr(args, "fetch_service_policy", None)
+ if fetch_service_policy:
+ self._enable_fetch_service = True
+ self._fetch_service_policy = fetch_service_policy
+
+ def get_arg_or_config(
+ self, parsed_args: argparse.Namespace, item: str
+ ) -> Any: # noqa: ANN401
+ """Get a configuration option that could be overridden by a command argument.
+
+ :param parsed_args: The argparse Namespace to check.
+ :param item: the name of the namespace or config item.
+ :returns: the requested value.
+ """
+ arg_value = getattr(parsed_args, item, None)
+ if arg_value is not None:
+ return arg_value
+ return self.services.config.get(item)
+
@staticmethod
def _check_pro_requirement(
pro_services: ProServices | None,
@@ -531,84 +646,88 @@ def _check_pro_requirement(
craft_cli.emit.debug(
f"Validating requested Ubuntu Pro status on host: {pro_services}"
)
+ pro_services.managed_mode = False
pro_services.validate()
# Validate requested pro services running in managed mode inside a managed instance.
elif run_managed and is_managed:
craft_cli.emit.debug(
f"Validating requested Ubuntu Pro status in managed instance: {pro_services}"
)
+ pro_services.managed_mode = True
pro_services.validate()
# Validate pro attachment and service names on the host before starting a managed instance.
elif run_managed and not is_managed:
craft_cli.emit.debug(
f"Validating requested Ubuntu Pro attachment on host: {pro_services}"
)
+ pro_services.managed_mode = True
pro_services.validate(
options=ValidatorOptions.ATTACHMENT | ValidatorOptions.SUPPORT
)
- def run( # noqa: PLR0912,PLR0915 (too many branches, too many statements)
- self,
- ) -> int:
- """Bootstrap and run the application."""
- self._setup_logging()
- self._initialize_craft_parts()
+ def _run_inner(self) -> int:
+ """Actual run implementation."""
dispatcher = self._get_dispatcher()
- craft_cli.emit.debug("Preparing application...")
-
- return_code = 1 # General error
- try:
- command = cast(
- commands.AppCommand,
- dispatcher.load_command(self.app_config),
+ command = cast(
+ commands.AppCommand,
+ dispatcher.load_command(self.app_config),
+ )
+ parsed_args = dispatcher.parsed_args()
+ platform = self.get_arg_or_config(parsed_args, "platform")
+ build_for = self.get_arg_or_config(parsed_args, "build_for")
+
+ # Some commands (e.g. remote build) can allow multiple platforms
+ # or build-fors, comma-separated. In these cases, we create the
+ # project using the first defined platform.
+ if platform and "," in platform:
+ platform = platform.split(",", maxsplit=1)[0]
+ if build_for and "," in build_for:
+ build_for = build_for.split(",", maxsplit=1)[0]
+
+ provider_name = command.provider_name(dispatcher.parsed_args())
+ managed_mode = command.run_managed(dispatcher.parsed_args())
+
+ # TODO: Move pro operations out to new service for managing Ubuntu Pro
+ # A ProServices instance will only be available for lifecycle commands,
+ # which may consume pro packages,
+ self._pro_services = getattr(dispatcher.parsed_args(), "pro", None)
+ # Check that pro services are correctly configured if available
+ self._check_pro_requirement(self._pro_services, managed_mode, self.is_managed())
+
+ craft_cli.emit.debug(f"Build plan: platform={platform}, build_for={build_for}")
+ self._pre_run(dispatcher)
+
+ if managed_mode or command.needs_project(dispatcher.parsed_args()):
+ self.services.project = self.get_project(
+ platform=platform, build_for=build_for
)
- platform = getattr(dispatcher.parsed_args(), "platform", None)
- build_for = getattr(dispatcher.parsed_args(), "build_for", None)
+ self._configure_services(provider_name)
- run_managed = command.run_managed(dispatcher.parsed_args())
- is_managed = self.is_managed()
-
- # Some commands (e.g. remote build) can allow multiple platforms
- # or build-fors, comma-separated. In these cases, we create the
- # project using the first defined platform.
- if platform and "," in platform:
- platform = platform.split(",", maxsplit=1)[0]
- if build_for and "," in build_for:
- build_for = build_for.split(",", maxsplit=1)[0]
+ return_code = 1 # General error
+ if not managed_mode:
+ # command runs in the outer instance
+ craft_cli.emit.debug(f"Running {self.app.name} {command.name} on host")
+ return_code = dispatcher.run() or os.EX_OK
+ elif not self.is_managed():
+ # command runs in inner instance, but this is the outer instance
+ self.run_managed(platform, build_for)
+ return_code = os.EX_OK
+ else:
+ # command runs in inner instance
+ return_code = dispatcher.run() or 0
- provider_name = command.provider_name(dispatcher.parsed_args())
+ return return_code
- # TODO: Move pro operations out to new service for managing Ubuntu Pro
- # A ProServices instance will only be available for lifecycle commands,
- # which may consume pro packages,
- self._pro_services = getattr(dispatcher.parsed_args(), "pro", None)
- # Check that pro services are correctly configured if available
- self._check_pro_requirement(self._pro_services, run_managed, is_managed)
+ def run(self) -> int:
+ """Bootstrap and run the application."""
+ self._setup_logging()
+ self._initialize_craft_parts()
- if run_managed or command.needs_project(dispatcher.parsed_args()):
- self.services.project = self.get_project(
- platform=platform, build_for=build_for
- )
+ craft_cli.emit.debug("Preparing application...")
- craft_cli.emit.debug(
- f"Build plan: platform={platform}, build_for={build_for}"
- )
- self._pre_run(dispatcher)
-
- self._configure_services(provider_name)
-
- if not run_managed:
- # command runs in the outer instance
- craft_cli.emit.debug(f"Running {self.app.name} {command.name} on host")
- return_code = dispatcher.run() or os.EX_OK
- elif not is_managed:
- # command runs in inner instance, but this is the outer instance
- self.run_managed(platform, build_for)
- return_code = os.EX_OK
- else:
- # command runs in inner instance
- return_code = dispatcher.run() or 0
+ try:
+ return_code = self._run_inner()
except craft_cli.ArgumentParsingError as err:
print(err, file=sys.stderr) # to stderr, as argparse normally does
craft_cli.emit.ended_ok()
@@ -638,7 +757,7 @@ def run( # noqa: PLR0912,PLR0915 (too many branches, too many statements)
craft_cli.CraftError(f"{self.app.name} internal error: {err!r}"),
cause=err,
)
- if os.getenv("CRAFT_DEBUG") == "1":
+ if self.services.config.get("debug"):
raise
return_code = os.EX_SOFTWARE
else:
@@ -725,6 +844,7 @@ def _expand_environment(self, yaml_data: dict[str, Any], build_for: str) -> None
application_name=self.app.name, # not used in environment expansion
cache_dir=pathlib.Path(), # not used in environment expansion
arch=build_for_arch,
+ parallel_build_count=util.get_parallel_build_count(self.app.name),
project_name=yaml_data.get("name", ""),
project_dirs=project_dirs,
project_vars=environment_vars,
diff --git a/craft_application/commands/__init__.py b/craft_application/commands/__init__.py
index 6f9d065a..d636c58a 100644
--- a/craft_application/commands/__init__.py
+++ b/craft_application/commands/__init__.py
@@ -15,17 +15,16 @@
# along with this program. If not, see .
"""Command classes for a craft application."""
-from craft_application.commands.base import AppCommand, ExtensibleCommand
-from craft_application.commands import lifecycle
-from craft_application.commands.lifecycle import (
- get_lifecycle_command_group,
- LifecycleCommand,
-)
-from craft_application.commands.other import get_other_command_group
+from .base import AppCommand, ExtensibleCommand
+from . import lifecycle
+from .init import InitCommand
+from .lifecycle import get_lifecycle_command_group, LifecycleCommand
+from .other import get_other_command_group
__all__ = [
"AppCommand",
"ExtensibleCommand",
+ "InitCommand",
"lifecycle",
"LifecycleCommand",
"get_lifecycle_command_group",
diff --git a/craft_application/commands/base.py b/craft_application/commands/base.py
index 4b6d9f9b..33056c05 100644
--- a/craft_application/commands/base.py
+++ b/craft_application/commands/base.py
@@ -18,6 +18,7 @@
import abc
import argparse
+import warnings
from typing import Any, Optional, Protocol, final
from craft_cli import BaseCommand, emit
@@ -55,8 +56,11 @@ class AppCommand(BaseCommand):
def __init__(self, config: dict[str, Any] | None) -> None:
if config is None:
- # This should only be the case when the command is not going to be run.
- # For example, when requesting help on the command.
+ warnings.warn(
+ "Creating an AppCommand without a config dict is pending deprecation.",
+ PendingDeprecationWarning,
+ stacklevel=3,
+ )
emit.trace("Not completing command configuration")
return
diff --git a/craft_application/commands/init.py b/craft_application/commands/init.py
new file mode 100644
index 00000000..3067de85
--- /dev/null
+++ b/craft_application/commands/init.py
@@ -0,0 +1,153 @@
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Command to initialise a project."""
+
+from __future__ import annotations
+
+import argparse
+import importlib.resources
+import pathlib
+from textwrap import dedent
+from typing import cast
+
+import craft_cli
+
+from craft_application.util import humanize_list
+
+from . import base
+
+
+class InitCommand(base.AppCommand):
+ """Command to create initial project files.
+
+ The init command should always produce a working and ready-to-build project.
+ """
+
+ name = "init"
+ help_msg = "Create an initial project filetree"
+ overview = dedent(
+ """
+ Initialise a project.
+
+ If '' is provided, initialise in that directory,
+ otherwise initialise in the current working directory.
+
+ If '--name ' is provided, the project will be named ''.
+ Otherwise, the project will be named after the directory it is initialised in.
+
+ '--profile ' is used to initialise the project for a specific use case.
+
+ Init can work in an existing project directory. If there are any files in the
+ directory that would be overwritten, then init command will fail.
+ """
+ )
+ common = True
+
+ default_profile = "simple"
+ """The default profile to use when initialising a project."""
+
+ def fill_parser(self, parser: argparse.ArgumentParser) -> None:
+ """Specify command's specific parameters."""
+ parser.add_argument(
+ "project_dir",
+ type=pathlib.Path,
+ nargs="?",
+ default=None,
+ help="Path to initialise project in; defaults to current working directory.",
+ )
+ parser.add_argument(
+ "--name",
+ type=str,
+ default=None,
+ help="The name of project; defaults to the name of ",
+ )
+ parser.add_argument(
+ "--profile",
+ type=str,
+ choices=self.profiles,
+ default=self.default_profile,
+ help=(
+ f"Use the specified project profile (default is {self.default_profile}, "
+ f"choices are {humanize_list(self.profiles, 'and')})"
+ ),
+ )
+
+ @property
+ def parent_template_dir(self) -> pathlib.Path:
+ """Return the path to the directory that contains all templates."""
+ with importlib.resources.path(
+ self._app.name, "templates"
+ ) as _parent_template_dir:
+ return _parent_template_dir
+
+ @property
+ def profiles(self) -> list[str]:
+ """A list of profile names generated from template directories."""
+ template_dirs = [
+ path for path in self.parent_template_dir.iterdir() if path.is_dir()
+ ]
+ return sorted([template.name for template in template_dirs])
+
+ def run(self, parsed_args: argparse.Namespace) -> None:
+ """Run the command."""
+ # If the user provided a "name" and it's not valid, the command fails.
+ if parsed_args.name is not None:
+ self._services.init.validate_project_name(parsed_args.name)
+
+ # However, if the name comes from the directory, we don't fail and
+ # instead fallback to its default.
+ project_name = self._get_name(parsed_args)
+ project_name = self._services.init.validate_project_name(
+ project_name, use_default=True
+ )
+
+ project_dir = self._get_project_dir(parsed_args)
+ template_dir = pathlib.Path(self.parent_template_dir / parsed_args.profile)
+
+ craft_cli.emit.progress("Checking project directory.")
+ self._services.init.check_for_existing_files(
+ project_dir=project_dir, template_dir=template_dir
+ )
+
+ craft_cli.emit.progress("Initialising project.")
+ self._services.init.initialise_project(
+ project_dir=project_dir,
+ project_name=project_name,
+ template_dir=template_dir,
+ )
+ craft_cli.emit.message("Successfully initialised project.")
+
+ def _get_name(self, parsed_args: argparse.Namespace) -> str:
+ """Get name of the package that is about to be initialised.
+
+ Check if name is set explicitly or fallback to project_dir.
+ """
+ if parsed_args.name is not None:
+ return cast(str, parsed_args.name)
+ return self._get_project_dir(parsed_args).name
+
+ @staticmethod
+ def _get_project_dir(parsed_args: argparse.Namespace) -> pathlib.Path:
+ """Get project dir where project should be initialised.
+
+ It applies rules in the following order:
+ - if is specified explicitly, it returns
+ - if is undefined, it defaults to cwd
+ """
+ # if set explicitly, just return it
+ if parsed_args.project_dir is not None:
+ return pathlib.Path(parsed_args.project_dir).expanduser().resolve()
+
+ # If both args are undefined, default to current dir
+ return pathlib.Path.cwd().resolve()
diff --git a/craft_application/commands/lifecycle.py b/craft_application/commands/lifecycle.py
index be6a73e3..8f0439a7 100644
--- a/craft_application/commands/lifecycle.py
+++ b/craft_application/commands/lifecycle.py
@@ -46,6 +46,7 @@ def get_lifecycle_command_group() -> CommandGroup:
return CommandGroup(
"Lifecycle",
commands, # type: ignore[arg-type] # https://github.com/canonical/craft-cli/pull/157
+ ordered=True,
)
@@ -163,14 +164,12 @@ def _fill_parser(self, parser: argparse.ArgumentParser) -> None:
"--platform",
type=str,
metavar="name",
- default=os.getenv("CRAFT_PLATFORM"),
help="Set platform to build for",
)
group.add_argument(
"--build-for",
type=str,
metavar="arch",
- default=os.getenv("CRAFT_BUILD_FOR"),
help="Set architecture to build for",
)
@@ -373,6 +372,14 @@ def _fill_parser(self, parser: argparse.ArgumentParser) -> None:
help="Output directory for created packages.",
)
+ parser.add_argument(
+ "--enable-fetch-service",
+ help=argparse.SUPPRESS,
+ choices=("strict", "permissive"),
+ metavar="policy",
+ dest="fetch_service_policy",
+ )
+
@override
def _run(
self,
@@ -383,13 +390,35 @@ def _run(
"""Run the pack command."""
if step_name not in ("pack", None):
raise RuntimeError(f"Step name {step_name} passed to pack command.")
+
+ shell = getattr(parsed_args, "shell", False)
+ shell_after = getattr(parsed_args, "shell_after", False)
+ debug = getattr(parsed_args, "debug", False)
+
+ # Prevent the steps in the prime command from using `--shell` or `--shell-after`
+ parsed_args.shell = False
+ parsed_args.shell_after = False
+
super()._run(parsed_args, step_name="prime")
self._run_post_prime_steps()
+ if shell:
+ _launch_shell()
+ return
+
emit.progress("Packing...")
- packages = self._services.package.pack(
- self._services.lifecycle.prime_dir, parsed_args.output
- )
+ try:
+ packages = self._services.package.pack(
+ self._services.lifecycle.prime_dir, parsed_args.output
+ )
+ except Exception as err:
+ if debug:
+ emit.progress(str(err), permanent=True)
+ _launch_shell()
+ raise
+
+ if parsed_args.fetch_service_policy and packages:
+ self._services.fetch.create_project_manifest(packages)
if not packages:
emit.progress("No packages created.", permanent=True)
@@ -399,10 +428,8 @@ def _run(
package_names = ", ".join(pkg.name for pkg in packages)
emit.progress(f"Packed: {package_names}", permanent=True)
- @staticmethod
- @override
- def _should_add_shell_args() -> bool:
- return False
+ if shell_after:
+ _launch_shell()
class CleanCommand(_BaseLifecycleCommand):
@@ -428,6 +455,12 @@ def _fill_parser(self, parser: argparse.ArgumentParser) -> None:
nargs="*",
help="Optional list of parts to process",
)
+ parser.add_argument(
+ "--platform",
+ type=str,
+ metavar="name",
+ help="Platform to clean",
+ )
@override
def _run(
diff --git a/craft_application/commands/other.py b/craft_application/commands/other.py
index eb30d80f..25963089 100644
--- a/craft_application/commands/other.py
+++ b/craft_application/commands/other.py
@@ -18,7 +18,7 @@
from craft_cli import CommandGroup, emit
-from craft_application.commands import base
+from . import InitCommand, base
if TYPE_CHECKING: # pragma: no cover
import argparse
@@ -27,6 +27,7 @@
def get_other_command_group() -> CommandGroup:
"""Return the lifecycle related command group."""
commands: list[type[base.AppCommand]] = [
+ InitCommand,
VersionCommand,
]
@@ -37,7 +38,7 @@ def get_other_command_group() -> CommandGroup:
class VersionCommand(base.AppCommand):
- """Show the snapcraft version."""
+ """Show the application version."""
name = "version"
help_msg = "Show the application version and exit"
diff --git a/craft_application/errors.py b/craft_application/errors.py
index 2d4df834..735120b1 100644
--- a/craft_application/errors.py
+++ b/craft_application/errors.py
@@ -17,6 +17,7 @@
All errors inherit from craft_cli.CraftError.
"""
+
from __future__ import annotations
import os
@@ -52,6 +53,8 @@ class YamlError(CraftError, yaml.YAMLError):
def from_yaml_error(cls, filename: str, error: yaml.YAMLError) -> Self:
"""Convert a pyyaml YAMLError to a craft-application YamlError."""
message = f"error parsing {filename!r}"
+ if isinstance(error, yaml.MarkedYAMLError):
+ message += f": {error.problem}"
details = str(error)
return cls(
message,
@@ -150,8 +153,11 @@ class EmptyBuildPlanError(CraftError):
"""The build plan filtered out all possible builds."""
def __init__(self) -> None:
- message = "No build matches the current platform."
- resolution = 'Check the "--platform" and "--build-for" parameters.'
+ message = "No build matches the current execution environment."
+ resolution = (
+ "Check the project's 'platforms' declaration, and the "
+ "'--platform' and '--build-for' parameters."
+ )
super().__init__(message=message, resolution=resolution)
@@ -239,6 +245,14 @@ def __init__( # (too many arguments)
)
+class FetchServiceError(CraftError):
+ """Errors related to the fetch-service."""
+
+
+class InitError(CraftError):
+ """Errors related to initialising a project."""
+
+
class UbuntuProError(CraftError):
"""Base Exception class for ProServices."""
@@ -262,7 +276,6 @@ class UbuntuProClientNotFoundError(UbuntuProApiError):
"""Raised when Ubuntu Pro client was not found on the system."""
def __init__(self, path: str) -> None:
-
message = f'The Ubuntu Pro client was not found on the system at "{path}"'
super().__init__(message=message)
@@ -272,7 +285,6 @@ class UbuntuProDetachedError(InvalidUbuntuProStateError):
"""Raised when Ubuntu Pro is not attached, but Pro services were requested."""
def __init__(self) -> None:
-
message = "Ubuntu Pro is requested, but was found detached."
resolution = 'Attach Ubuntu Pro to continue. See "pro" command for details.'
@@ -283,7 +295,6 @@ class UbuntuProAttachedError(InvalidUbuntuProStateError):
"""Raised when Ubuntu Pro is attached, but Pro services were not requested."""
def __init__(self) -> None:
-
message = "Ubuntu Pro is not requested, but was found attached."
resolution = 'Detach Ubuntu Pro to continue. See "pro" command for details.'
@@ -297,7 +308,6 @@ class InvalidUbuntuProServiceError(InvalidUbuntuProStateError):
# if so where is the list of supported service names?
def __init__(self, invalid_services: set[str]) -> None:
-
invalid_services_str = "".join(invalid_services)
message = "Invalid Ubuntu Pro Services were requested."
@@ -312,20 +322,15 @@ def __init__(self, invalid_services: set[str]) -> None:
class InvalidUbuntuProStatusError(InvalidUbuntuProStateError):
- """Raised when the incorrect set of Pro Services are enabled."""
-
- def __init__(
- self, requested_services: set[str], available_services: set[str]
- ) -> None:
+ """Raised when a set of requested Pro Services are disabled."""
- enable_services_str = " ".join(requested_services - available_services)
- disable_services_str = " ".join(available_services - requested_services)
+ def __init__(self, requested_services: set[str]) -> None:
+ requested_services_str = ", ".join(requested_services)
- message = "Incorrect Ubuntu Pro Services were enabled."
+ message = "Some of the requested Ubuntu Pro Services are disabled."
resolution = (
- "Please enable or disable the following services.\n"
- f"Enable: {enable_services_str}\n"
- f"Disable: {disable_services_str}\n"
+ "Please enable the following services.\n"
+ f"Enable: {requested_services_str}\n"
'See "pro" command for details.'
)
diff --git a/craft_application/fetch.py b/craft_application/fetch.py
new file mode 100644
index 00000000..c37fcfac
--- /dev/null
+++ b/craft_application/fetch.py
@@ -0,0 +1,514 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Utilities to interact with the fetch-service."""
+import contextlib
+import io
+import logging
+import pathlib
+import shlex
+import subprocess
+from dataclasses import dataclass
+from functools import cache
+from typing import Any, cast
+
+import craft_providers
+import requests
+from craft_cli import emit
+from pydantic import Field
+from requests.auth import HTTPBasicAuth
+
+from craft_application import errors, util
+from craft_application.models import CraftBaseModel
+from craft_application.util import retry
+
+logger = logging.getLogger(__name__)
+
+
+@dataclass(frozen=True)
+class FetchServiceConfig:
+ """Dataclass for the ports that a fetch-service instance uses."""
+
+ proxy: int
+ """The proxy port, to be passed to the applications to be proxied."""
+ control: int
+ """The control port, to create/terminate sessions, get status, etc."""
+ username: str
+ """The username for auth."""
+ password: str
+ """The password for auth."""
+
+ @property
+ def auth(self) -> str:
+ """Authentication in user:passwd format."""
+ return f"{self.username}:{self.password}"
+
+
+_FETCH_BINARY = "/snap/bin/fetch-service"
+
+_DEFAULT_CONFIG = FetchServiceConfig(
+ proxy=13444,
+ control=13555,
+ username="craft",
+ password="craft", # noqa: S106 (hardcoded-password-func-arg)
+)
+
+# The path to the fetch-service's certificate inside the build instance.
+_FETCH_CERT_INSTANCE_PATH = pathlib.Path(
+ "/usr/local/share/ca-certificates/local-ca.crt"
+)
+
+
+class SessionData(CraftBaseModel):
+ """Fetch service session data."""
+
+ session_id: str = Field(alias="id")
+ token: str
+
+
+class NetInfo:
+ """Network and proxy info linking a fetch-service session and a build instance."""
+
+ def __init__(
+ self, instance: craft_providers.Executor, session_data: SessionData
+ ) -> None:
+ self._gateway = _get_gateway(instance)
+ self._session_data = session_data
+
+ @property
+ def http_proxy(self) -> str:
+ """Proxy string in the 'http://:@:/."""
+ session = self._session_data
+ port = _DEFAULT_CONFIG.proxy
+ gw = self._gateway
+ return f"http://{session.session_id}:{session.token}@{gw}:{port}/"
+
+ @property
+ def env(self) -> dict[str, str]:
+ """Environment variables to use for the proxy."""
+ return {
+ "http_proxy": self.http_proxy,
+ "https_proxy": self.http_proxy,
+ # This makes the requests lib take our cert into account.
+ "REQUESTS_CA_BUNDLE": str(_FETCH_CERT_INSTANCE_PATH),
+ # Same, but for cargo.
+ "CARGO_HTTP_CAINFO": str(_FETCH_CERT_INSTANCE_PATH),
+ # Have go download directly from repositories
+ "GOPROXY": "direct",
+ }
+
+
+def is_service_online() -> bool:
+ """Whether the fetch-service is up and listening."""
+ try:
+ status = get_service_status()
+ except errors.FetchServiceError:
+ return False
+ return "uptime" in status
+
+
+def get_service_status() -> dict[str, Any]:
+ """Get the JSON status of the fetch-service.
+
+ :raises errors.FetchServiceError: if a connection error happens.
+ """
+ response = _service_request("get", "status")
+ return cast(dict[str, Any], response.json())
+
+
+def start_service() -> subprocess.Popen[str] | None:
+ """Start the fetch-service with default ports and auth."""
+ if is_service_online():
+ # Nothing to do, service is already up.
+ return None
+
+ # Check that the fetch service is actually installed
+ verify_installed()
+
+ cmd = [_FETCH_BINARY]
+
+ env = {"FETCH_SERVICE_AUTH": _DEFAULT_CONFIG.auth}
+
+ # Add the ports
+ cmd.append(f"--control-port={_DEFAULT_CONFIG.control}")
+ cmd.append(f"--proxy-port={_DEFAULT_CONFIG.proxy}")
+
+ # Set config and spool directories
+ base_dir = _get_service_base_dir()
+
+ for dir_name in ("config", "spool"):
+ dir_path = base_dir / dir_name
+ dir_path.mkdir(exist_ok=True)
+ cmd.append(f"--{dir_name}={dir_path}")
+
+ cert, cert_key = _obtain_certificate()
+
+ cmd.append(f"--cert={cert}")
+ cmd.append(f"--key={cert_key}")
+
+ # Accept permissive sessions
+ cmd.append("--permissive-mode")
+
+ # Shutdown after 5 minutes with no live sessions
+ cmd.append("--idle-shutdown=300")
+
+ log_filepath = get_log_filepath()
+ log_filepath.parent.mkdir(parents=True, exist_ok=True)
+ cmd.append(f"--log-file={log_filepath}")
+
+ str_cmd = shlex.join(cmd)
+ emit.debug(f"Launching fetch-service with '{str_cmd}'")
+
+ fetch_process = subprocess.Popen(
+ cmd,
+ env=env,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ )
+
+ # Wait a bit for the service to come online
+ with contextlib.suppress(subprocess.TimeoutExpired):
+ fetch_process.wait(0.1)
+
+ if fetch_process.poll() is not None:
+ # fetch-service already exited, something is wrong
+ log = log_filepath.read_text()
+ lines = log.splitlines()
+ error_lines = [line for line in lines if "ERROR:" in line]
+ error_text = "\n".join(error_lines)
+
+ if "bind: address already in use" in error_text:
+ proxy, control = _DEFAULT_CONFIG.proxy, _DEFAULT_CONFIG.control
+ message = f"fetch-service ports {proxy} and {control} are already in use."
+ details = None
+ else:
+ message = "Error spawning the fetch-service."
+ details = error_text
+ raise errors.FetchServiceError(message, details=details)
+
+ status = retry(
+ "wait for fetch-service to come online",
+ errors.FetchServiceError,
+ get_service_status, # pyright: ignore[reportArgumentType]
+ )
+ if "uptime" not in status:
+ stop_service(fetch_process)
+ raise errors.FetchServiceError(
+ f"Fetch service did not start correctly: {status}"
+ )
+
+ return fetch_process
+
+
+def stop_service(fetch_process: subprocess.Popen[str]) -> None:
+ """Stop the fetch-service.
+
+ This function first calls terminate(), and then kill() after a short time.
+ """
+ fetch_process.terminate()
+ try:
+ fetch_process.wait(timeout=1.0)
+ except subprocess.TimeoutExpired:
+ fetch_process.kill()
+
+
+def create_session(*, strict: bool) -> SessionData:
+ """Create a new fetch-service session.
+
+ :param strict: Whether the created session should be strict.
+ :return: a SessionData object containing the session's id and token.
+ """
+ json = {"policy": "strict" if strict else "permissive"}
+ data = _service_request("post", "session", json=json).json()
+
+ return SessionData.unmarshal(data=data)
+
+
+def teardown_session(session_data: SessionData) -> dict[str, Any]:
+ """Stop and cleanup a running fetch-service session.
+
+ :param session_data: the data of a previously-created session.
+ :return: A dict containing the session's report (the contents and format
+ of this dict are still subject to change).
+ """
+ session_id = session_data.session_id
+ session_token = session_data.token
+
+ # Revoke token
+ _revoke_data = _service_request(
+ "delete", f"session/{session_id}/token", json={"token": session_token}
+ ).json()
+
+ # Get session report
+ session_report = _service_request("get", f"session/{session_id}", json={}).json()
+
+ # Delete session
+ _service_request("delete", f"session/{session_id}")
+
+ # Delete session resources
+ _service_request("delete", f"resources/{session_id}")
+
+ return cast(dict[str, Any], session_report)
+
+
+def configure_instance(
+ instance: craft_providers.Executor, session_data: SessionData
+) -> dict[str, str]:
+ """Configure a build instance to use a given fetch-service session."""
+ net_info = NetInfo(instance, session_data)
+
+ _install_certificate(instance)
+ _configure_pip(instance)
+ _configure_snapd(instance, net_info)
+ _configure_apt(instance, net_info)
+
+ return net_info.env
+
+
+def get_log_filepath() -> pathlib.Path:
+ """Get the path containing the fetch-service's output."""
+ # All craft tools log to the same place, because it's a single fetch-service
+ # instance. It needs to be a location that the fetch-service, as a strict
+ # snap, can write to.
+ logdir = _get_service_base_dir() / "craft-logs"
+ logdir.mkdir(exist_ok=True, parents=True)
+ return logdir / "fetch-service.log"
+
+
+def verify_installed() -> None:
+ """Verify that the fetch-service is installed, raising an error if it isn't."""
+ if not _check_installed():
+ raise errors.FetchServiceError(
+ "The 'fetch-service' snap is not installed.",
+ resolution=(
+ "Install the fetch-service snap via "
+ "'snap install --channel=candidate fetch-service'."
+ ),
+ )
+
+
+def _service_request(
+ verb: str, endpoint: str, json: dict[str, Any] | None = None
+) -> requests.Response:
+ headers = {
+ "Content-type": "application/json",
+ }
+ auth = HTTPBasicAuth(_DEFAULT_CONFIG.username, _DEFAULT_CONFIG.password)
+ try:
+ response = requests.request(
+ verb,
+ f"http://localhost:{_DEFAULT_CONFIG.control}/{endpoint}",
+ auth=auth,
+ headers=headers,
+ json=json, # Use defaults
+ timeout=0.1,
+ )
+ response.raise_for_status()
+ except requests.RequestException as err:
+ message = f"Error with fetch-service {verb.upper()}: {str(err)}"
+ raise errors.FetchServiceError(message)
+
+ return response
+
+
+@cache
+def _get_service_base_dir() -> pathlib.Path:
+ """Get the base directory to contain the fetch-service's runtime files."""
+ input_line = "sh -c 'echo $SNAP_USER_COMMON'"
+ output = subprocess.check_output(
+ ["snap", "run", "--shell", "fetch-service"], text=True, input=input_line
+ )
+ return pathlib.Path(output.strip())
+
+
+def _install_certificate(instance: craft_providers.Executor) -> None:
+
+ logger.info("Installing certificate")
+ # Push the local certificate
+ cert, _key = _obtain_certificate()
+ instance.push_file(
+ source=cert,
+ destination=_FETCH_CERT_INSTANCE_PATH,
+ )
+ # Update the certificates db
+ _execute_run(
+ instance, ["/bin/sh", "-c", "/usr/sbin/update-ca-certificates > /dev/null"]
+ )
+
+
+def _configure_pip(instance: craft_providers.Executor) -> None:
+ logger.info("Configuring pip")
+
+ _execute_run(instance, ["mkdir", "-p", "/root/.pip"])
+ pip_config = b"[global]\ncert=/usr/local/share/ca-certificates/local-ca.crt"
+ instance.push_file_io(
+ destination=pathlib.Path("/root/.pip/pip.conf"),
+ content=io.BytesIO(pip_config),
+ file_mode="0644",
+ )
+
+
+def _configure_snapd(instance: craft_providers.Executor, net_info: NetInfo) -> None:
+ """Configure snapd to use the proxy and see our certificate.
+
+ Note: This *must* be called *after* _install_certificate(), to ensure that
+ when the snapd restart happens the new cert is there.
+ """
+ logger.info("Configuring snapd")
+ _execute_run(instance, ["systemctl", "restart", "snapd"])
+ for config in ("proxy.http", "proxy.https"):
+ _execute_run(
+ instance, ["snap", "set", "system", f"{config}={net_info.http_proxy}"]
+ )
+
+
+def _configure_apt(instance: craft_providers.Executor, net_info: NetInfo) -> None:
+ logger.info("Configuring Apt")
+ apt_config = f'Acquire::http::Proxy "{net_info.http_proxy}";\n'
+ apt_config += f'Acquire::https::Proxy "{net_info.http_proxy}";\n'
+
+ instance.push_file_io(
+ destination=pathlib.Path("/etc/apt/apt.conf.d/99proxy"),
+ content=io.BytesIO(apt_config.encode("utf-8")),
+ file_mode="0644",
+ )
+ _execute_run(instance, ["/bin/rm", "-Rf", "/var/lib/apt/lists"])
+
+ logger.info("Refreshing Apt package listings")
+ _execute_run(instance, ["apt", "update"])
+
+
+def _get_gateway(instance: craft_providers.Executor) -> str:
+ from craft_providers.lxd import LXDInstance
+
+ if not isinstance(instance, LXDInstance):
+ raise TypeError("Don't know how to handle non-lxd instances")
+
+ instance_name = instance.instance_name
+ project = instance.project
+ output = subprocess.check_output(
+ ["lxc", "--project", project, "config", "show", instance_name, "--expanded"],
+ text=True,
+ )
+ config = util.safe_yaml_load(io.StringIO(output))
+ network = config["devices"]["eth0"]["network"]
+
+ route = subprocess.check_output(
+ ["ip", "route", "show", "dev", network],
+ text=True,
+ )
+ return route.strip().split()[-1]
+
+
+def _obtain_certificate() -> tuple[pathlib.Path, pathlib.Path]:
+ """Retrieve, possibly creating, the certificate and key for the fetch service.
+
+ :return: The full paths to the self-signed certificate and its private key.
+ """
+ cert_dir = _get_certificate_dir()
+
+ cert_dir.mkdir(parents=True, exist_ok=True)
+
+ cert = cert_dir / "local-ca.pem"
+ key = cert_dir / "local-ca.key.pem"
+
+ if cert.is_file() and key.is_file():
+ # Certificate and key already generated
+ # TODO check that the certificate hasn't expired
+ return cert, key
+
+ # At least one is missing, regenerate both
+ key_tmp = cert_dir / "key-tmp.pem"
+ cert_tmp = cert_dir / "cert-tmp.pem"
+
+ # Create the key
+ subprocess.run(
+ [
+ "openssl",
+ "genrsa",
+ "-aes256",
+ "-passout",
+ "pass:1",
+ "-out",
+ key_tmp,
+ "4096",
+ ],
+ check=True,
+ capture_output=True,
+ )
+
+ subprocess.run(
+ [
+ "openssl",
+ "rsa",
+ "-passin",
+ "pass:1",
+ "-in",
+ key_tmp,
+ "-out",
+ key_tmp,
+ ],
+ check=True,
+ capture_output=True,
+ )
+
+ # Create a certificate with the key
+ subprocess.run(
+ [
+ "openssl",
+ "req",
+ "-subj",
+ "/CN=root@localhost",
+ "-key",
+ key_tmp,
+ "-new",
+ "-x509",
+ "-days",
+ "7300",
+ "-sha256",
+ "-extensions",
+ "v3_ca",
+ "-out",
+ cert_tmp,
+ ],
+ check=True,
+ capture_output=True,
+ )
+
+ cert_tmp.rename(cert)
+ key_tmp.rename(key)
+
+ return cert, key
+
+
+def _get_certificate_dir() -> pathlib.Path:
+ """Get the location that should contain the fetch-service certificate and key."""
+ base_dir = _get_service_base_dir()
+
+ return base_dir / "craft/fetch-certificate"
+
+
+def _check_installed() -> bool:
+ """Check whether the fetch-service is installed."""
+ return pathlib.Path(_FETCH_BINARY).is_file()
+
+
+def _execute_run(
+ instance: craft_providers.Executor, cmd: list[str]
+) -> subprocess.CompletedProcess[str]:
+ return instance.execute_run( # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
+ cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
+ )
diff --git a/craft_application/git/__init__.py b/craft_application/git/__init__.py
index 1d055527..6b37afd5 100644
--- a/craft_application/git/__init__.py
+++ b/craft_application/git/__init__.py
@@ -14,9 +14,10 @@
"""Git repository utilities."""
+from ._consts import COMMIT_SHORT_SHA_LEN
from ._errors import GitError
-from ._models import GitType
-from ._git_repo import GitRepo, get_git_repo_type, is_repo
+from ._models import GitType, short_commit_sha
+from ._git_repo import GitRepo, get_git_repo_type, is_repo, parse_describe
__all__ = [
"GitError",
@@ -24,4 +25,7 @@
"GitType",
"get_git_repo_type",
"is_repo",
+ "parse_describe",
+ "short_commit_sha",
+ "COMMIT_SHORT_SHA_LEN",
]
diff --git a/tests/unit/remote/conftest.py b/craft_application/git/_consts.py
similarity index 69%
rename from tests/unit/remote/conftest.py
rename to craft_application/git/_consts.py
index f64a3ec5..1c45290c 100644
--- a/tests/unit/remote/conftest.py
+++ b/craft_application/git/_consts.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2022,2024 Canonical Ltd
+# Copyright 2024 Canonical Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3 as
@@ -11,19 +11,9 @@
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see .
-import os
-import pathlib
-import pytest
+"""Git repository consts."""
+from typing import Final
-@pytest.fixture
-def new_dir(tmp_path):
- """Change to a new temporary directory."""
-
- cwd = pathlib.Path.cwd()
- os.chdir(tmp_path)
-
- yield tmp_path
-
- os.chdir(cwd)
+COMMIT_SHORT_SHA_LEN: Final[int] = 7
diff --git a/craft_application/git/_git_repo.py b/craft_application/git/_git_repo.py
index 9c4dec12..c87dd870 100644
--- a/craft_application/git/_git_repo.py
+++ b/craft_application/git/_git_repo.py
@@ -91,6 +91,35 @@ def get_git_repo_type(path: Path) -> GitType:
return GitType.INVALID
+def parse_describe(describe_str: str) -> str:
+ """Parse git describe string to get a human-readable version.
+
+ Examples (git describe -> parse_describe):
+ 4.1.1-0-gad012482d -> 4.1.1
+ 4.1.1-16-g2d8943dbc -> 4.1.1.post16+git2d8943dbc
+ curl-8_11_0-0-gb1ef0e1 -> curl-8_11_0
+
+ For shallow clones or repositories missing tags:
+ 0ae7c04 -> 0ae7c04
+ """
+ if "-" not in describe_str:
+ return describe_str
+ number_of_expected_elements = 3
+ splitted_describe = describe_str.rsplit(
+ "-",
+ maxsplit=number_of_expected_elements - 1,
+ )
+ if len(splitted_describe) != number_of_expected_elements:
+ logger.warning("Cannot determine version basing on describe result.")
+ return describe_str
+
+ version, distance, commit = splitted_describe
+
+ if distance == "0":
+ return version
+ return f"{version}.post{distance}+git{commit[1:]}"
+
+
class GitRepo:
"""Git repository class."""
@@ -353,6 +382,39 @@ def push_url( # noqa: PLR0912 (too-many-branches)
f"for the git repository in {str(self.path)!r}."
)
+ def describe(
+ self,
+ *,
+ committish: str | None = None,
+ abbreviated_size: int | None = None,
+ always_use_long_format: bool | None = None,
+ show_commit_oid_as_fallback: bool | None = None,
+ ) -> str:
+ """Return a human readable name base on an available ref.
+
+ :param committish: Commit-ish object name to describe. If None, HEAD will be
+ described
+ :param abbreviated_size: The same as --abbrev of ``git describe`` command
+ :param always_use_long_format: Always use the long format
+ :param show_commit_oid_as_fallback: Show uniquely abbrevaited commit as fallback
+
+ :returns: String that describes given object.
+
+ raises GitError: if object cannot be described
+ """
+ logger.debug(f"Trying to describe {committish or 'HEAD'!r}.")
+ try:
+ described: str = self._repo.describe(
+ committish=committish,
+ abbreviated_size=abbreviated_size,
+ always_use_long_format=always_use_long_format,
+ show_commit_oid_as_fallback=show_commit_oid_as_fallback,
+ )
+ except (pygit2.GitError, KeyError) as err:
+ raise GitError("Could not describe given object") from err
+ else:
+ return described
+
def _resolve_ref(self, ref: str) -> str:
"""Get a full reference name for a shorthand ref.
diff --git a/craft_application/git/_models.py b/craft_application/git/_models.py
index 4f9cefc4..8af52271 100644
--- a/craft_application/git/_models.py
+++ b/craft_application/git/_models.py
@@ -16,6 +16,13 @@
from enum import Enum
+from ._consts import COMMIT_SHORT_SHA_LEN
+
+
+def short_commit_sha(commit_sha: str) -> str:
+ """Return shortened version of the commit."""
+ return commit_sha[:COMMIT_SHORT_SHA_LEN]
+
class GitType(Enum):
"""Type of git repository."""
diff --git a/craft_application/grammar.py b/craft_application/grammar.py
index df99fda6..aad6a79d 100644
--- a/craft_application/grammar.py
+++ b/craft_application/grammar.py
@@ -52,7 +52,7 @@ def process_part(
*, part_yaml_data: dict[str, Any], processor: GrammarProcessor
) -> dict[str, Any]:
"""Process grammar for a given part."""
- for key in part_yaml_data:
+ for key, _ in part_yaml_data.items():
unprocessed_grammar = part_yaml_data[key]
# ignore non-grammar keywords
@@ -72,7 +72,8 @@ def process_part(
# all keys in the dictionary must be a string
for item in unprocessed_grammar: # type: ignore[reportUnknownVariableType]
if isinstance(item, dict) and any(
- not isinstance(key, str) for key in item # type: ignore[reportUnknownVariableType]
+ not isinstance(key, str)
+ for key in item # type: ignore[reportUnknownVariableType]
):
continue
@@ -119,7 +120,7 @@ def self_check(value: Any) -> bool: # noqa: ANN401
# TODO: make checker optional in craft-grammar.
processor = GrammarProcessor(arch=arch, target_arch=target_arch, checker=self_check)
- for part_name in parts_yaml_data:
+ for part_name, _ in parts_yaml_data.items():
parts_yaml_data[part_name] = process_part(
part_yaml_data=parts_yaml_data[part_name], processor=processor
)
diff --git a/craft_application/launchpad/__init__.py b/craft_application/launchpad/__init__.py
index 8ff4d4cc..f548d08a 100644
--- a/craft_application/launchpad/__init__.py
+++ b/craft_application/launchpad/__init__.py
@@ -22,6 +22,7 @@
from .errors import LaunchpadError
from .launchpad import Launchpad
from .models import LaunchpadObject, RecipeType, Recipe, SnapRecipe, CharmRecipe
+from .util import Architecture
__all__ = [
"errors",
@@ -32,4 +33,5 @@
"Recipe",
"SnapRecipe",
"CharmRecipe",
+ "Architecture",
]
diff --git a/craft_application/launchpad/models/base.py b/craft_application/launchpad/models/base.py
index d2c03ff9..e63945d3 100644
--- a/craft_application/launchpad/models/base.py
+++ b/craft_application/launchpad/models/base.py
@@ -133,7 +133,9 @@ def __getattr__(self, item: str) -> Any: # noqa: ANN401
if item in annotations:
cls = annotations[item]
- if isinstance(cls, type) and issubclass(cls, LaunchpadObject):
+ if isinstance( # pyright: ignore[reportUnnecessaryIsInstance]
+ cls, type
+ ) and issubclass(cls, LaunchpadObject):
return cls(self._lp, lp_obj)
# We expect that this class can take the object.
return cls(lp_obj) # type: ignore[call-arg]
diff --git a/craft_application/models/base.py b/craft_application/models/base.py
index 08e30438..cd261f77 100644
--- a/craft_application/models/base.py
+++ b/craft_application/models/base.py
@@ -38,6 +38,7 @@ class CraftBaseModel(pydantic.BaseModel):
extra="forbid",
populate_by_name=True,
alias_generator=alias_generator,
+ coerce_numbers_to_str=True,
)
def marshal(self) -> dict[str, str | list[str] | dict[str, Any]]:
diff --git a/craft_application/models/constraints.py b/craft_application/models/constraints.py
index edec9a5f..f7d8b483 100644
--- a/craft_application/models/constraints.py
+++ b/craft_application/models/constraints.py
@@ -91,8 +91,8 @@ def validate(value: str) -> str:
* May not have two hyphens in a row
"""
-_PROJECT_NAME_REGEX = r"^([a-z0-9][a-z0-9-]?)*[a-z]+([a-z0-9-]?[a-z0-9])*$"
-_PROJECT_NAME_COMPILED_REGEX = re.compile(_PROJECT_NAME_REGEX)
+PROJECT_NAME_REGEX = r"^([a-z0-9][a-z0-9-]?)*[a-z]+([a-z0-9-]?[a-z0-9])*$"
+PROJECT_NAME_COMPILED_REGEX = re.compile(PROJECT_NAME_REGEX)
MESSAGE_INVALID_NAME = (
"invalid name: Names can only use ASCII lowercase letters, numbers, and hyphens. "
"They must have at least one letter, may not start or end with a hyphen, "
@@ -102,13 +102,13 @@ def validate(value: str) -> str:
ProjectName = Annotated[
str,
pydantic.BeforeValidator(
- get_validator_by_regex(_PROJECT_NAME_COMPILED_REGEX, MESSAGE_INVALID_NAME)
+ get_validator_by_regex(PROJECT_NAME_COMPILED_REGEX, MESSAGE_INVALID_NAME)
),
pydantic.Field(
min_length=1,
max_length=40,
strict=True,
- pattern=_PROJECT_NAME_REGEX,
+ pattern=PROJECT_NAME_REGEX,
description=_PROJECT_NAME_DESCRIPTION,
title="Project Name",
examples=[
diff --git a/craft_application/models/manifest.py b/craft_application/models/manifest.py
new file mode 100644
index 00000000..5aa01bfa
--- /dev/null
+++ b/craft_application/models/manifest.py
@@ -0,0 +1,182 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Models representing manifests for projects and fetch-service assets."""
+import hashlib
+import pathlib
+from datetime import datetime, timezone
+from typing import Any, Literal
+
+from pydantic import Field
+from typing_extensions import Self, override
+
+from craft_application import models
+from craft_application.models import CraftBaseModel
+
+
+class Hashes(CraftBaseModel):
+ """Digests identifying an artifact/asset."""
+
+ sha1: str
+ sha256: str
+
+ @classmethod
+ def from_path(cls, path: pathlib.Path) -> Self:
+ """Compute digests for a given path."""
+ read_bytes = path.read_bytes()
+
+ return cls(
+ sha1=hashlib.sha1( # noqa: S324 (insecure hash function)
+ read_bytes
+ ).hexdigest(),
+ sha256=hashlib.sha256(read_bytes).hexdigest(),
+ )
+
+
+class ComponentID(CraftBaseModel):
+ """Unique identifications for an artifact/asset."""
+
+ hashes: Hashes
+
+
+class BaseManifestModel(CraftBaseModel):
+ """Common properties shared between project and fetch-service manifests."""
+
+ component_name: str
+ component_version: str
+ component_description: str
+ component_id: ComponentID
+ architecture: str
+
+
+class ProjectManifest(BaseManifestModel):
+ """Model for the project-specific properties of the craft manifest."""
+
+ license: str | None = None
+ comment: str | None = None
+ metadata_generator: Literal["Craft Application"] = "Craft Application"
+ creation_timestamp: str
+
+ @override
+ def marshal(self) -> dict[str, str | list[str] | dict[str, Any]]:
+ """Overridden to include the metadata_generator constant field."""
+ return self.model_dump(
+ mode="json",
+ by_alias=True,
+ exclude_none=True,
+ exclude_defaults=False, # to include 'metadata_generator'
+ )
+
+ @classmethod
+ def from_packed_artifact(
+ cls,
+ project: models.Project,
+ build_info: models.BuildInfo,
+ artifact: pathlib.Path,
+ ) -> Self:
+ """Create the project manifest for a packed artifact."""
+ hashes = Hashes.from_path(artifact)
+
+ now = datetime.now(timezone.utc)
+
+ return cls.unmarshal(
+ {
+ "component-name": project.name,
+ "component-version": project.version,
+ "component-description": project.summary,
+ "component-id": {"hashes": hashes.marshal()},
+ "architecture": build_info.build_for,
+ "license": project.license,
+ "creation_timestamp": now.isoformat(),
+ }
+ )
+
+
+class SessionArtifactManifest(BaseManifestModel):
+ """Model for an artifact downloaded during the fetch-service session."""
+
+ component_type: str = Field(alias="type")
+ component_author: str
+ component_vendor: str
+ size: int
+ url: list[str]
+ rejected: bool = Field(exclude=True)
+ rejection_reasons: list[str] = Field(exclude=True)
+
+ @classmethod
+ def from_session_report(cls, report: dict[str, Any]) -> list[Self]:
+ """Create session manifests from a fetch-session report."""
+ artifacts: list[Self] = []
+
+ for artifact in report["artefacts"]:
+ # Figure out if the artifact was rejected, and for which reasons
+ rejected = artifact.get("result") == "Rejected"
+ reasons: set[str] = set()
+ if rejected:
+ reasons.update(_get_reasons(artifact.get("request-inspection", {})))
+ reasons.update(_get_reasons(artifact.get("response-inspection", {})))
+
+ metadata = artifact["metadata"]
+ data = {
+ "type": metadata["type"],
+ "component-name": metadata["name"],
+ "component-version": metadata["version"],
+ "component-description": metadata["description"],
+ # "architecture" is only present on the metadata if applicable.
+ "architecture": metadata.get("architecture", ""),
+ "component-id": {
+ "hashes": {"sha1": metadata["sha1"], "sha256": metadata["sha256"]}
+ },
+ "component-author": metadata["author"],
+ "component-vendor": metadata["vendor"],
+ "size": metadata["size"],
+ "url": [d["url"] for d in artifact["downloads"]],
+ "rejected": rejected,
+ "rejection-reasons": sorted(reasons),
+ }
+ artifacts.append(cls.unmarshal(data))
+
+ return artifacts
+
+
+class CraftManifest(ProjectManifest):
+ """Full manifest for a generated artifact.
+
+ Includes project metadata and information on assets downloaded through a
+ fetch-service session.
+ """
+
+ dependencies: list[SessionArtifactManifest]
+
+ @classmethod
+ def create_craft_manifest(
+ cls, project_manifest_path: pathlib.Path, session_report: dict[str, Any]
+ ) -> Self:
+ """Create the full Craft manifest from a project and session report."""
+ project = ProjectManifest.from_yaml_file(project_manifest_path)
+ session_deps = SessionArtifactManifest.from_session_report(session_report)
+
+ data = {**project.marshal(), "dependencies": session_deps}
+ return cls.model_validate(data)
+
+
+def _get_reasons(inspections: dict[str, Any]) -> set[str]:
+ reasons: set[str] = set()
+
+ for inspection in inspections.values():
+ if inspection.get("opinion") in ("Rejected", "Unknown"):
+ reasons.add(inspection["reason"])
+
+ return reasons
diff --git a/craft_application/models/project.py b/craft_application/models/project.py
index 550f00e1..b7b92286 100644
--- a/craft_application/models/project.py
+++ b/craft_application/models/project.py
@@ -19,15 +19,18 @@
"""
import abc
import dataclasses
+import warnings
from collections.abc import Mapping
-from typing import Annotated, Any
+from typing import Annotated, Any, cast
import craft_parts
+import craft_platforms
import craft_providers.bases
import pydantic
from craft_cli import emit
from craft_providers import bases
from craft_providers.errors import BaseConfigurationError
+from typing_extensions import Self
from craft_application import errors
from craft_application.models import base
@@ -86,6 +89,30 @@ class BuildInfo:
base: craft_providers.bases.BaseName
"""The base to build on."""
+ def __post_init__(self) -> None:
+ warnings.warn(
+ "BuildInfo is pending deprecation and will be replaced with craft_platforms.BuildInfo.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+
+ @classmethod
+ def from_platforms(cls, info: craft_platforms.BuildInfo) -> Self:
+ """Convert a craft-platforms BuildInfo to a craft-application BuildInfo."""
+ build_for = (
+ "all"
+ if info.build_for == "all"
+ else craft_platforms.DebianArchitecture(info.build_for)
+ )
+ return cls(
+ platform=info.platform,
+ build_on=craft_platforms.DebianArchitecture(info.build_on),
+ build_for=build_for,
+ base=craft_providers.bases.BaseName(
+ name=info.build_base.distribution, version=info.build_base.series
+ ),
+ )
+
class Platform(base.CraftBaseModel):
"""Project platform definition."""
@@ -93,6 +120,14 @@ class Platform(base.CraftBaseModel):
build_on: UniqueList[str] | None = pydantic.Field(min_length=1)
build_for: SingleEntryList[str] | None = None
+ @pydantic.field_validator("build_on", "build_for", mode="before")
+ @classmethod
+ def _vectorise_architectures(cls, values: str | list[str]) -> list[str]:
+ """Convert string build-on and build-for to lists."""
+ if isinstance(values, str):
+ return [values]
+ return values
+
@pydantic.field_validator("build_on", "build_for", mode="after")
@classmethod
def _validate_architectures(cls, values: list[str]) -> list[str]:
@@ -119,6 +154,18 @@ def _validate_platform_set(
return values
+ @classmethod
+ def from_platforms(cls, platforms: craft_platforms.Platforms) -> dict[str, Self]:
+ """Create a dictionary ofthese objects from craft_platforms PlatformDicts."""
+ result: dict[str, Self] = {}
+ for key, value in platforms.items():
+ name = str(key)
+ platform = (
+ {"build-on": [name], "build-for": [name]} if value is None else value
+ )
+ result[name] = cls.model_validate(platform)
+ return result
+
def _populate_platforms(platforms: dict[str, Any]) -> dict[str, Any]:
"""Populate empty platform entries.
@@ -152,6 +199,15 @@ class BuildPlanner(base.CraftBaseModel, metaclass=abc.ABCMeta):
base: str | None = None
build_base: str | None = None
+ @pydantic.model_validator(mode="after")
+ def _warn_deprecation(self) -> Self:
+ warnings.warn(
+ "The craft-application BuildPlanner is pending deprecation in favour of functions that create build plans in craft-platforms.",
+ PendingDeprecationWarning,
+ stacklevel=2,
+ )
+ return self
+
@pydantic.field_validator("platforms", mode="before")
@classmethod
def _populate_platforms(cls, platforms: dict[str, Any]) -> dict[str, Any]:
@@ -206,21 +262,22 @@ def effective_base(self) -> bases.BaseName:
def get_build_plan(self) -> list[BuildInfo]:
"""Obtain the list of architectures and bases from the Project."""
- build_infos: list[BuildInfo] = []
-
- for platform_label, platform in self.platforms.items():
- for build_for in platform.build_for or [platform_label]:
- for build_on in platform.build_on or [platform_label]:
- build_infos.append(
- BuildInfo(
- platform=platform_label,
- build_on=build_on,
- build_for=build_for,
- base=self.effective_base,
- )
- )
-
- return build_infos
+ effective_base = self.effective_base
+ base = craft_platforms.DistroBase(
+ distribution=effective_base.name, series=effective_base.version
+ )
+ platforms = cast(
+ craft_platforms.Platforms,
+ {key: value.marshal() for key, value in self.platforms.items()},
+ )
+
+ return [
+ BuildInfo.from_platforms(info)
+ for info in craft_platforms.get_platforms_build_plan(
+ base=base,
+ platforms=platforms,
+ )
+ ]
def _validate_package_repository(repository: dict[str, Any]) -> dict[str, Any]:
diff --git a/craft_application/remote/errors.py b/craft_application/remote/errors.py
index a9fc22dc..c12e902f 100644
--- a/craft_application/remote/errors.py
+++ b/craft_application/remote/errors.py
@@ -14,63 +14,41 @@
"""Remote build errors."""
-from dataclasses import dataclass
+import craft_cli.errors
+from craft_application.util import humanize_list
-@dataclass(repr=True)
-class RemoteBuildError(Exception):
- """Unexpected remote build error.
- :param brief: Brief description of error.
- :param details: Detailed information.
- """
-
- brief: str
- details: str | None = None
-
- def __str__(self) -> str:
- """Return the string representation of the error."""
- components = [self.brief]
-
- if self.details:
- components.append(self.details)
-
- return "\n".join(components)
+class RemoteBuildError(craft_cli.errors.CraftError):
+ """Error for remote builds."""
class RemoteBuildGitError(RemoteBuildError):
- """Git repository cannot be prepared correctly."""
+ """Git repository cannot be prepared correctly.
- def __init__(self, message: str) -> None:
- self.message = message
- brief = "Git operation failed"
- details = message
+ :param message: Git error message.
+ """
- super().__init__(brief=brief, details=details)
+ def __init__(self, message: str) -> None:
+ message = f"Git operation failed with: {message}"
+ super().__init__(message=message)
class UnsupportedArchitectureError(RemoteBuildError):
- """Unsupported architecture error."""
+ """Unsupported architecture error.
+
+ :param architectures: List of unsupported architectures.
+ """
def __init__(self, architectures: list[str]) -> None:
- brief = "Architecture not supported by the remote builder."
- details = (
+ message = (
"The following architectures are not supported by the remote builder: "
- f"{architectures}.\nPlease remove them from the "
- "architecture list and try again."
+ f"{humanize_list(architectures, 'and')}."
)
+ resolution = "Remove them from the architecture list and try again."
- super().__init__(brief=brief, details=details)
+ super().__init__(message=message, resolution=resolution)
class RemoteBuildInvalidGitRepoError(RemoteBuildError):
- """The Git repository is invalid for remote build.
-
- :param brief: Brief description of error.
- :param details: Detailed information.
- """
-
- def __init__(self, details: str) -> None:
- brief = "The Git repository is invalid for remote build."
-
- super().__init__(brief=brief, details=details)
+ """The Git repository is invalid for remote build."""
diff --git a/craft_application/remote/git.py b/craft_application/remote/git.py
index 60bf15f7..9559d887 100644
--- a/craft_application/remote/git.py
+++ b/craft_application/remote/git.py
@@ -32,10 +32,12 @@ def check_git_repo_for_remote_build(path: Path) -> None:
if git_type == GitType.INVALID:
raise RemoteBuildInvalidGitRepoError(
- f"Could not find a git repository in {str(path)!r}"
+ message=f"Could not find a git repository in {str(path)!r}",
+ resolution="Initialize a git repository in the project directory",
)
if git_type == GitType.SHALLOW:
raise RemoteBuildInvalidGitRepoError(
- "Remote build for shallow cloned git repos are no longer supported"
+ message="Remote builds for shallow cloned git repos are not supported",
+ resolution="Make a non-shallow clone of the repository",
)
diff --git a/craft_application/services/__init__.py b/craft_application/services/__init__.py
index 331c56fe..e9c066c2 100644
--- a/craft_application/services/__init__.py
+++ b/craft_application/services/__init__.py
@@ -16,7 +16,10 @@
"""Service classes for the business logic of various categories of command."""
from craft_application.services.base import AppService, ProjectService
+from craft_application.services.config import ConfigService
+from craft_application.services.fetch import FetchService
from craft_application.services.lifecycle import LifecycleService
+from craft_application.services.init import InitService
from craft_application.services.package import PackageService
from craft_application.services.provider import ProviderService
from craft_application.services.remotebuild import RemoteBuildService
@@ -25,8 +28,11 @@
__all__ = [
"AppService",
+ "FetchService",
"ProjectService",
+ "ConfigService",
"LifecycleService",
+ "InitService",
"PackageService",
"ProviderService",
"RemoteBuildService",
diff --git a/craft_application/services/config.py b/craft_application/services/config.py
new file mode 100644
index 00000000..6b8c4e48
--- /dev/null
+++ b/craft_application/services/config.py
@@ -0,0 +1,210 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Configuration service."""
+from __future__ import annotations
+
+import abc
+import contextlib
+import enum
+import os
+from collections.abc import Iterable
+from typing import TYPE_CHECKING, Any, TypeVar, cast, final
+
+import pydantic
+import pydantic_core
+import snaphelpers
+from craft_cli import emit
+from typing_extensions import override
+
+from craft_application import _config, application, util
+from craft_application.services import base
+
+if TYPE_CHECKING:
+ from craft_application.services.service_factory import ServiceFactory
+
+
+T = TypeVar("T")
+
+
+class ConfigHandler(abc.ABC):
+ """An abstract class for configuration handlers."""
+
+ def __init__(self, app: application.AppMetadata) -> None:
+ self._app = app
+
+ @abc.abstractmethod
+ def get_raw(self, item: str) -> Any: # noqa: ANN401
+ """Get the string value for a configuration item.
+
+ :param item: the name of the configuration item.
+ :returns: The raw value of the item.
+ :raises: KeyError if the item cannot be found.
+ """
+
+
+@final
+class AppEnvironmentHandler(ConfigHandler):
+ """Configuration handler to get values from app-specific environment variables."""
+
+ def __init__(self, app: application.AppMetadata) -> None:
+ super().__init__(app)
+ self._environ_prefix = f"{app.name.upper()}"
+
+ @override
+ def get_raw(self, item: str) -> str:
+ return os.environ[f"{self._environ_prefix}_{item.upper()}"]
+
+
+@final
+class CraftEnvironmentHandler(ConfigHandler):
+ """Configuration handler to get values from CRAFT environment variables."""
+
+ def __init__(self, app: application.AppMetadata) -> None:
+ super().__init__(app)
+ self._fields = _config.ConfigModel.model_fields
+
+ @override
+ def get_raw(self, item: str) -> str:
+ # Ensure that CRAFT_* env vars can only be used for configuration items
+ # known to craft-application.
+ if item not in self._fields:
+ raise KeyError(f"{item!r} not a general craft-application config item.")
+
+ return os.environ[f"CRAFT_{item.upper()}"]
+
+
+class SnapConfigHandler(ConfigHandler):
+ """Configuration handler that gets values from snap."""
+
+ def __init__(self, app: application.AppMetadata) -> None:
+ super().__init__(app)
+ if not snaphelpers.is_snap():
+ raise OSError("Not running as a snap.")
+ try:
+ self._snap = snaphelpers.SnapConfig()
+ except KeyError:
+ raise OSError("Not running as a snap.")
+ except snaphelpers.SnapCtlError:
+ # Most likely to happen in a container that has the snap environment set.
+ # See: https://github.com/canonical/snapcraft/issues/5079
+ emit.progress(
+ "Snap environment is set, but cannot connect to snapd. "
+ "Snap configuration is unavailable.",
+ permanent=True,
+ )
+ raise OSError("Not running as a snap or with snapd disabled.")
+
+ @override
+ def get_raw(self, item: str) -> Any:
+ snap_item = item.replace("_", "-")
+ try:
+ return self._snap.get(snap_item)
+ except snaphelpers.UnknownConfigKey as exc:
+ raise KeyError(f"unknown snap config item: {item!r}") from exc
+
+
+@final
+class DefaultConfigHandler(ConfigHandler):
+ """Configuration handler for getting default values."""
+
+ def __init__(self, app: application.AppMetadata) -> None:
+ super().__init__(app)
+ self._config_model = app.ConfigModel
+ self._cache: dict[str, str] = {}
+
+ @override
+ def get_raw(self, item: str) -> Any:
+ if item in self._cache:
+ return self._cache[item]
+
+ field = self._config_model.model_fields[item]
+ if field.default is not pydantic_core.PydanticUndefined:
+ self._cache[item] = field.default
+ return field.default
+ if field.default_factory is not None:
+ # TODO: remove the type ignore after pydantic/pydantic#10945 is fixed
+ default = field.default_factory() # type: ignore[call-arg]
+ self._cache[item] = default
+ return default
+
+ raise KeyError(f"config item {item!r} has no default value.")
+
+
+class ConfigService(base.AppService):
+ """Application-wide configuration access."""
+
+ _handlers: list[ConfigHandler]
+
+ def __init__(
+ self,
+ app: application.AppMetadata,
+ services: ServiceFactory,
+ *,
+ extra_handlers: Iterable[type[ConfigHandler]] = (),
+ ) -> None:
+ super().__init__(app, services)
+ self._extra_handlers = extra_handlers
+ self._default_handler = DefaultConfigHandler(self._app)
+
+ @override
+ def setup(self) -> None:
+ super().setup()
+ self._handlers = [
+ AppEnvironmentHandler(self._app),
+ CraftEnvironmentHandler(self._app),
+ *(handler(self._app) for handler in self._extra_handlers),
+ ]
+ try:
+ snap_handler = SnapConfigHandler(self._app)
+ except OSError:
+ emit.debug(
+ "App is not running as a snap - snap config handler not created."
+ )
+ else:
+ self._handlers.append(snap_handler)
+
+ def get(self, item: str) -> Any: # noqa: ANN401
+ """Get the given configuration item."""
+ if item not in self._app.ConfigModel.model_fields:
+ raise KeyError(r"unknown config item: {item!r}")
+ field_info = self._app.ConfigModel.model_fields[item]
+
+ for handler in self._handlers:
+ try:
+ value = handler.get_raw(item)
+ except KeyError:
+ continue
+ else:
+ break
+ else:
+ return self._default_handler.get_raw(item)
+
+ return self._convert_type(value, field_info.annotation) # type: ignore[arg-type,return-value]
+
+ def _convert_type(self, value: str, field_type: type[T]) -> T:
+ """Convert the value to the appropriate type."""
+ if isinstance(field_type, type): # pyright: ignore[reportUnnecessaryIsInstance]
+ if issubclass(field_type, str):
+ return cast(T, field_type(value))
+ if issubclass(field_type, bool):
+ return cast(T, util.strtobool(value))
+ if issubclass(field_type, enum.Enum):
+ with contextlib.suppress(KeyError):
+ return cast(T, field_type[value])
+ with contextlib.suppress(KeyError):
+ return cast(T, field_type[value.upper()])
+ field_adapter = pydantic.TypeAdapter(field_type)
+ return field_adapter.validate_strings(value)
diff --git a/craft_application/services/fetch.py b/craft_application/services/fetch.py
new file mode 100644
index 00000000..f170576b
--- /dev/null
+++ b/craft_application/services/fetch.py
@@ -0,0 +1,199 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Service class to communicate with the fetch-service."""
+from __future__ import annotations
+
+import json
+import pathlib
+import subprocess
+import typing
+from functools import partial
+
+import craft_providers
+from craft_cli import emit
+from typing_extensions import override
+
+from craft_application import fetch, models, services, util
+from craft_application.models.manifest import CraftManifest, ProjectManifest
+
+if typing.TYPE_CHECKING:
+ from craft_application.application import AppMetadata
+
+
+_PROJECT_MANIFEST_MANAGED_PATH = pathlib.Path(
+ "/tmp/craft-project-manifest.yaml" # noqa: S108 (possibly insecure)
+)
+
+
+class FetchService(services.ProjectService):
+ """Service class that handles communication with the fetch-service.
+
+ This Service is able to spawn a fetch-service instance and create sessions
+ to be used in managed runs. The general usage flow is this:
+
+ - Initialise a fetch-service via setup() (done automatically by the service
+ factory);
+ - For each managed execution:
+ - Create a new session with create_session(), passing the new managed
+ instance;
+ - Teardown/close the session with teardown_session();
+ - Stop the fetch-service via shutdown().
+ """
+
+ _fetch_process: subprocess.Popen[str] | None
+ _session_data: fetch.SessionData | None
+ _instance: craft_providers.Executor | None
+
+ def __init__(
+ self,
+ app: AppMetadata,
+ services: services.ServiceFactory,
+ *,
+ project: models.Project,
+ build_plan: list[models.BuildInfo],
+ session_policy: str,
+ ) -> None:
+ """Create a new FetchService.
+
+ :param session_policy: Whether the created fetch-service sessions should
+ be "strict" or "permissive".
+ """
+ super().__init__(app, services, project=project)
+ self._fetch_process = None
+ self._session_data = None
+ self._build_plan = build_plan
+ self._session_policy = session_policy
+ self._instance = None
+
+ @override
+ def setup(self) -> None:
+ """Start the fetch-service process with proper arguments."""
+ super().setup()
+
+ if not self._services.ProviderClass.is_managed():
+ # Early fail if the fetch-service is not installed.
+ fetch.verify_installed()
+
+ # Emit a warning, but only on the host-side.
+ logpath = fetch.get_log_filepath()
+ emit.message(
+ "Warning: the fetch-service integration is experimental. "
+ f"Logging output to {str(logpath)!r}."
+ )
+
+ self._fetch_process = fetch.start_service()
+
+ def create_session(self, instance: craft_providers.Executor) -> dict[str, str]:
+ """Create a new session.
+
+ :return: The environment variables that must be used by any process
+ that will use the new session.
+ """
+ if self._session_data is not None:
+ raise ValueError(
+ "create_session() called but there's already a live fetch-service session."
+ )
+
+ strict_session = self._session_policy == "strict"
+ self._session_data = fetch.create_session(strict=strict_session)
+ self._instance = instance
+ emit.progress("Configuring fetch-service integration")
+ return fetch.configure_instance(instance, self._session_data)
+
+ def teardown_session(self) -> dict[str, typing.Any]:
+ """Teardown and cleanup a previously-created session."""
+ if self._session_data is None or self._instance is None:
+ raise ValueError(
+ "teardown_session() called with no live fetch-service session."
+ )
+ report = fetch.teardown_session(self._session_data)
+
+ instance = self._instance
+ instance_path = _PROJECT_MANIFEST_MANAGED_PATH
+ with instance.temporarily_pull_file(source=instance_path, missing_ok=True) as f:
+ if f is not None:
+ # Project manifest was generated; we can create the full manifest
+ self._create_craft_manifest(f, report)
+ else:
+ emit.debug("Project manifest file missing in managed instance.")
+
+ self._session_data = None
+ self._instance = None
+
+ return report
+
+ def shutdown(self, *, force: bool = False) -> None:
+ """Stop the fetch-service.
+
+ The default behavior is a no-op; the Application never shuts down the
+ fetch-service so that it stays up and ready to serve other craft
+ applications.
+
+ :param force: Whether the fetch-service should be, in fact, stopped.
+ """
+ if force and self._fetch_process:
+ fetch.stop_service(self._fetch_process)
+
+ def create_project_manifest(self, artifacts: list[pathlib.Path]) -> None:
+ """Create the project manifest for the artifact in ``artifacts``.
+
+ Only supports a single generated artifact, and only in managed runs.
+ """
+ if not self._services.ProviderClass.is_managed():
+ emit.debug("Unable to generate the project manifest on the host.")
+ return
+
+ emit.debug(f"Generating project manifest at {_PROJECT_MANIFEST_MANAGED_PATH}")
+ project_manifest = ProjectManifest.from_packed_artifact(
+ self._project, self._build_plan[0], artifacts[0]
+ )
+ project_manifest.to_yaml_file(_PROJECT_MANIFEST_MANAGED_PATH)
+
+ def _create_craft_manifest(
+ self, project_manifest: pathlib.Path, session_report: dict[str, typing.Any]
+ ) -> None:
+ name = self._project.name
+ version = self._project.version
+ platform = self._build_plan[0].platform
+
+ manifest_path = pathlib.Path(f"{name}_{version}_{platform}.json")
+ emit.debug(f"Generating craft manifest at {manifest_path}")
+
+ craft_manifest = CraftManifest.create_craft_manifest(
+ project_manifest, session_report
+ )
+ data = craft_manifest.marshal()
+
+ with manifest_path.open("w") as f:
+ json.dump(data, f, ensure_ascii=False, indent=2)
+
+ deps = craft_manifest.dependencies
+ rejections = [dep for dep in deps if dep.rejected]
+
+ if rejections:
+ display = partial(emit.progress, permanent=True)
+ items: list[dict[str, typing.Any]] = []
+ for rejection in rejections:
+ url = rejection.url[0] if len(rejection.url) == 1 else rejection.url
+ items.append({"url": url, "reasons": rejection.rejection_reasons})
+ text = util.dump_yaml(items)
+
+ display(
+ "The following artifacts were marked as rejected by the fetch-service:"
+ )
+ for line in text.splitlines():
+ display(line)
+ display("This build will fail on 'strict' fetch-service sessions.")
diff --git a/craft_application/services/init.py b/craft_application/services/init.py
new file mode 100644
index 00000000..dfdd5eae
--- /dev/null
+++ b/craft_application/services/init.py
@@ -0,0 +1,271 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+
+"""Service for initializing a project."""
+from __future__ import annotations
+
+import os
+import pathlib
+import shutil
+import typing
+from re import Pattern
+from typing import Any
+
+import jinja2
+from craft_cli import emit
+
+from craft_application.errors import InitError
+from craft_application.git import GitError, GitRepo, is_repo, parse_describe
+
+from ..models.constraints import MESSAGE_INVALID_NAME, PROJECT_NAME_COMPILED_REGEX
+from . import base
+
+if typing.TYPE_CHECKING: # pragma: no cover
+ from craft_application.application import AppMetadata
+ from craft_application.services import ServiceFactory
+
+
+class InitService(base.AppService):
+ """Service class for initializing a project."""
+
+ def __init__(
+ self,
+ app: AppMetadata,
+ services: ServiceFactory,
+ *,
+ default_name: str = "my-project",
+ name_regex: Pattern[str] = PROJECT_NAME_COMPILED_REGEX,
+ invalid_name_message: str = MESSAGE_INVALID_NAME,
+ ) -> None:
+ super().__init__(app, services)
+ self._default_name = default_name
+ self._name_regex = name_regex
+ self._invalid_name_message = invalid_name_message
+
+ def validate_project_name(self, name: str, *, use_default: bool = False) -> str:
+ """Validate that ``name`` is valid as a project name.
+
+ If ``name`` is invalid and ``use_default`` is False, then an InitError
+ is raised. If ``use_default`` is True, the default project name provided
+ to the service's constructor is returned.
+
+ If ``name`` is valid, it is returned.
+ """
+ if not self._name_regex.match(name):
+ if use_default:
+ return self._default_name
+ raise InitError(self._invalid_name_message)
+
+ return name
+
+ def initialise_project(
+ self,
+ *,
+ project_dir: pathlib.Path,
+ project_name: str,
+ template_dir: pathlib.Path,
+ ) -> None:
+ """Initialise a new project from a template.
+
+ If a file already exists in the project directory, it is not overwritten.
+ Use `check_for_existing_files()` to see if this will occur before initialising
+ the project.
+
+ :param project_dir: The directory to initialise the project in.
+ :param project_name: The name of the project.
+ :param template_dir: The directory containing the templates.
+ """
+ emit.debug(
+ f"Initialising project {project_name!r} in {str(project_dir)!r} from "
+ f"template in {str(template_dir)!r}."
+ )
+ environment = self._get_templates_environment(template_dir)
+ self._create_project_dir(project_dir=project_dir)
+ context = self._get_context(name=project_name, project_dir=project_dir)
+ self._render_project(environment, project_dir, template_dir, context)
+
+ def check_for_existing_files(
+ self,
+ *,
+ project_dir: pathlib.Path,
+ template_dir: pathlib.Path,
+ ) -> None:
+ """Check if there are any existing files in the project directory that would be overwritten.
+
+ :param project_dir: The directory to initialise the project in.
+ :param template_dir: The directory containing the templates.
+
+ :raises InitError: If there are files in the project directory that would be overwritten.
+ """
+ template_files = self._get_template_files(template_dir)
+ existing_files = [
+ template_file
+ for template_file in template_files
+ if (project_dir / template_file).exists()
+ ]
+
+ if existing_files:
+ existing_files_formatted = "\n - ".join(existing_files)
+ raise InitError(
+ message=(
+ f"Cannot initialise project in {str(project_dir)!r} because it "
+ "would overwrite existing files.\nExisting files are:\n - "
+ f"{existing_files_formatted}"
+ ),
+ resolution=(
+ "Initialise the project in an empty directory or remove the existing files."
+ ),
+ retcode=os.EX_CANTCREAT,
+ )
+
+ @property
+ def default_version(self) -> str:
+ """Return default version that should be used for the InitService context."""
+ return "0.1"
+
+ def _copy_template_file(
+ self,
+ template_name: str,
+ template_dir: pathlib.Path,
+ project_dir: pathlib.Path,
+ ) -> None:
+ """Copy the non-ninja template from template_dir to project_dir.
+
+ If the file already exists in the projects copying is skipped.
+
+ :param project_dir: The directory to render the files into.
+ :param template_dir: The directory where templates are stored.
+ :param template_name: Name of the template to copy.
+ """
+ emit.debug(f"Copying file {template_name} to {project_dir}")
+ template_file = template_dir / template_name
+ destination_file = project_dir / template_name
+ if destination_file.exists():
+ emit.debug(f"Skipping file {template_name} because it is already present.")
+ return
+ destination_file.parent.mkdir(parents=True, exist_ok=True)
+ shutil.copy2(template_file, destination_file, follow_symlinks=False)
+
+ def _render_project(
+ self,
+ environment: jinja2.Environment,
+ project_dir: pathlib.Path,
+ template_dir: pathlib.Path,
+ context: dict[str, Any],
+ ) -> None:
+ """Render files for a project from a template.
+
+ :param environment: The Jinja environment to use.
+ :param project_dir: The directory to render the files into.
+ :param template_dir: The directory where templates are stored.
+ :param context: The context to render the templates with.
+ """
+ emit.progress("Rendering project.")
+ for template_name in environment.list_templates():
+ if not template_name.endswith(".j2"):
+ self._copy_template_file(template_name, template_dir, project_dir)
+ continue
+ template = environment.get_template(template_name)
+
+ # trim off `.j2`
+ rendered_template_name = template_name[:-3]
+ emit.debug(f"Rendering {template_name} to {rendered_template_name}")
+
+ path = project_dir / rendered_template_name
+ if path.exists():
+ emit.trace(f"Skipping file {template_name} as it is already present")
+ continue
+ path.parent.mkdir(parents=True, exist_ok=True)
+ with path.open("wt", encoding="utf8") as file:
+ file.write(template.render(context))
+ shutil.copystat((template_dir / template_name), path)
+ emit.progress("Rendered project.")
+
+ def _get_context(self, name: str, *, project_dir: pathlib.Path) -> dict[str, Any]:
+ """Get context to render templates with.
+
+ :returns: A dict of context variables.
+ """
+ emit.debug(f"Set project name to '{name}'")
+
+ version = self._get_version(project_dir=project_dir)
+ if version is not None:
+ emit.debug(f"Discovered project version: {version!r}")
+
+ return {"name": name, "version": version or self.default_version}
+
+ @staticmethod
+ def _create_project_dir(project_dir: pathlib.Path) -> None:
+ """Create the project directory if it does not already exist."""
+ emit.debug(f"Creating project directory {str(project_dir)!r}.")
+ project_dir.mkdir(parents=True, exist_ok=True)
+
+ def _get_version(self, *, project_dir: pathlib.Path) -> str | None:
+ """Try to determine version if project is the git repository."""
+ try:
+ if is_repo(project_dir):
+ git_repo = GitRepo(project_dir)
+ described = git_repo.describe(
+ always_use_long_format=True,
+ show_commit_oid_as_fallback=True,
+ )
+ return parse_describe(described)
+ except GitError as error:
+ emit.debug(f"cannot determine project version: {error.details}")
+
+ return None
+
+ def _get_loader(self, template_dir: pathlib.Path) -> jinja2.BaseLoader:
+ """Return a Jinja loader for the given template directory.
+
+ :param template_dir: The directory containing the templates.
+
+ :returns: A Jinja loader.
+ """
+ return jinja2.PackageLoader(self._app.name, str(template_dir))
+
+ def _get_templates_environment(
+ self, template_dir: pathlib.Path
+ ) -> jinja2.Environment:
+ """Create and return a Jinja environment to deal with the templates.
+
+ :param template_dir: The directory containing the templates.
+
+ :returns: A Jinja environment.
+ """
+ return jinja2.Environment(
+ loader=self._get_loader(template_dir),
+ autoescape=False, # noqa: S701 (jinja2-autoescape-false)
+ keep_trailing_newline=True, # they're not text files if they don't end in newline!
+ optimized=False, # optimization doesn't make sense for one-offs
+ undefined=jinja2.StrictUndefined,
+ ) # fail on undefined
+
+ def _get_template_files(self, template_dir: pathlib.Path) -> list[str]:
+ """Return a list of files that would be created from a template directory.
+
+ Note that the '.j2' suffix is removed from templates.
+
+ :param template_dir: The directory containing the templates.
+
+ :returns: A list of filenames that would be created.
+ """
+ templates = self._get_templates_environment(template_dir).list_templates()
+
+ return [
+ template[:-3] if template.endswith(".j2") else template
+ for template in templates
+ ]
diff --git a/craft_application/services/lifecycle.py b/craft_application/services/lifecycle.py
index 6e959b0e..50024c1c 100644
--- a/craft_application/services/lifecycle.py
+++ b/craft_application/services/lifecycle.py
@@ -17,7 +17,6 @@
from __future__ import annotations
import contextlib
-import os
import types
from typing import TYPE_CHECKING, Any
@@ -165,7 +164,7 @@ def _get_build_for(self) -> str:
# something else like clean() is called).
# We also use the host arch if the build-for is 'all'
if self._build_plan and self._build_plan[0].build_for != "all":
- return self._build_plan[0].build_for
+ return str(self._build_plan[0].build_for)
return util.get_host_architecture()
def _init_lifecycle_manager(self) -> LifecycleManager:
@@ -200,7 +199,7 @@ def _init_lifecycle_manager(self) -> LifecycleManager:
cache_dir=self._cache_dir,
work_dir=self._work_dir,
ignore_local_sources=self._app.source_ignore_patterns,
- parallel_build_count=self._get_parallel_build_count(),
+ parallel_build_count=util.get_parallel_build_count(self._app.name),
project_vars_part_name=self._project.adopt_info,
project_vars=self._project_vars,
track_stage_packages=True,
@@ -324,92 +323,6 @@ def __repr__(self) -> str:
f"{work_dir=}, {cache_dir=}, {plan=}, **{self._manager_kwargs!r})"
)
- def _verify_parallel_build_count(
- self, env_name: str, parallel_build_count: int | str
- ) -> int:
- """Verify the parallel build count is valid.
-
- :param env_name: The name of the environment variable being checked.
- :param parallel_build_count: The value of the variable.
- :return: The parallel build count as an integer.
- """
- try:
- parallel_build_count = int(parallel_build_count)
- except ValueError as err:
- raise errors.InvalidParameterError(
- env_name, str(os.environ[env_name])
- ) from err
-
- # Ensure the value is valid positive integer
- if parallel_build_count < 1:
- raise errors.InvalidParameterError(env_name, str(parallel_build_count))
-
- return parallel_build_count
-
- def _get_parallel_build_count(self) -> int:
- """Get the number of parallel builds to run.
-
- The parallel build count is determined by the first available of the
- following environment variables in the order:
-
- - _PARALLEL_BUILD_COUNT
- - CRAFT_PARALLEL_BUILD_COUNT
- - _MAX_PARALLEL_BUILD_COUNT
- - CRAFT_MAX_PARALLEL_BUILD_COUNT
-
- where the MAX_PARALLEL_BUILD_COUNT variables are dynamically compared to
- the number of CPUs, and the smaller of the two is used.
-
- If no environment variable is set, the CPU count is used.
- If the CPU count is not available for some reason, 1 is used as a fallback.
- """
- parallel_build_count = None
-
- # fixed parallel build count environment variable
- for env_name in [
- (self._app.name + "_PARALLEL_BUILD_COUNT").upper(),
- "CRAFT_PARALLEL_BUILD_COUNT",
- ]:
- if os.environ.get(env_name):
- parallel_build_count = self._verify_parallel_build_count(
- env_name, os.environ[env_name]
- )
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- f"from environment variable {env_name!r}"
- )
- break
-
- # CPU count related max parallel build count environment variable
- if parallel_build_count is None:
- cpu_count = os.cpu_count() or 1
- for env_name in [
- (self._app.name + "_MAX_PARALLEL_BUILD_COUNT").upper(),
- "CRAFT_MAX_PARALLEL_BUILD_COUNT",
- ]:
- if os.environ.get(env_name):
- parallel_build_count = min(
- cpu_count,
- self._verify_parallel_build_count(
- env_name, os.environ[env_name]
- ),
- )
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- f"from environment variable {env_name!r}"
- )
- break
-
- # Default to CPU count if no max environment variable is set
- if parallel_build_count is None:
- parallel_build_count = cpu_count
- emit.debug(
- f"Using parallel build count of {parallel_build_count} "
- "from CPU count"
- )
-
- return parallel_build_count
-
def _get_local_keys_path(self) -> Path | None:
"""Return a directory with public keys for package-repositories.
diff --git a/craft_application/services/provider.py b/craft_application/services/provider.py
index 7ca9e51c..efadd82f 100644
--- a/craft_application/services/provider.py
+++ b/craft_application/services/provider.py
@@ -22,6 +22,8 @@
import pathlib
import pkgutil
import sys
+import urllib.request
+from collections.abc import Generator, Iterable
from pathlib import Path
from typing import TYPE_CHECKING
@@ -36,8 +38,6 @@
from craft_application.util import platforms, snap_config
if TYPE_CHECKING: # pragma: no cover
- from collections.abc import Generator
-
import craft_providers
from craft_application import models
@@ -45,7 +45,7 @@
from craft_application.services import ServiceFactory
-DEFAULT_FORWARD_ENVIRONMENT_VARIABLES = ("http_proxy", "https_proxy", "no_proxy")
+DEFAULT_FORWARD_ENVIRONMENT_VARIABLES: Iterable[str] = ()
class ProviderService(base.ProjectService):
@@ -93,13 +93,31 @@ def setup(self) -> None:
if name in os.environ:
self.environment[name] = os.getenv(name)
+ for scheme, value in urllib.request.getproxies().items():
+ self.environment[f"{scheme.lower()}_proxy"] = value
+ self.environment[f"{scheme.upper()}_PROXY"] = value
+
if self._install_snap:
- channel = (
- None
- if util.is_running_from_snap(self._app.name)
- else os.getenv("CRAFT_SNAP_CHANNEL", "latest/stable")
- )
- self.snaps.append(Snap(name=self._app.name, channel=channel, classic=True))
+ if util.is_running_from_snap(self._app.name):
+ # use the aliased name of the snap when injecting
+ name = os.getenv("SNAP_INSTANCE_NAME", self._app.name)
+ channel = None
+ emit.debug(
+ f"Setting {self._app.name} to be injected from the "
+ "host into the build environment because it is running "
+ "as a snap."
+ )
+ else:
+ # use the snap name when installing from the store
+ name = self._app.name
+ channel = os.getenv("CRAFT_SNAP_CHANNEL", "latest/stable")
+ emit.debug(
+ f"Setting {self._app.name} to be installed from the {channel} "
+ "channel in the build environment because it is not running "
+ "as a snap."
+ )
+
+ self.snaps.append(Snap(name=name, channel=channel, classic=True))
@contextlib.contextmanager
def instance(
@@ -108,6 +126,7 @@ def instance(
*,
work_dir: pathlib.Path,
allow_unstable: bool = True,
+ clean_existing: bool = False,
**kwargs: bool | str | None,
) -> Generator[craft_providers.Executor, None, None]:
"""Context manager for getting a provider instance.
@@ -115,6 +134,8 @@ def instance(
:param build_info: Build information for the instance.
:param work_dir: Local path to mount inside the provider instance.
:param allow_unstable: Whether to allow the use of unstable images.
+ :param clean_existing: Whether pre-existing instances should be wiped
+ and re-created.
:returns: a context manager of the provider instance.
"""
instance_name = self._get_instance_name(work_dir, build_info)
@@ -125,6 +146,9 @@ def instance(
provider.ensure_provider_is_available()
+ if clean_existing:
+ self._clean_instance(provider, work_dir, build_info)
+
emit.progress(f"Launching managed {base_name[0]} {base_name[1]} instance...")
with provider.launched_environment(
project_name=self._project.name,
@@ -206,12 +230,12 @@ def get_provider(self, name: str | None = None) -> craft_providers.Provider:
emit.debug(f"Using provider {name!r} passed as an argument.")
chosen_provider: str = name
- # (2) get the provider from the environment (CRAFT_BUILD_ENVIRONMENT),
- elif env_provider := os.getenv("CRAFT_BUILD_ENVIRONMENT"):
- emit.debug(f"Using provider {env_provider!r} from environment.")
- chosen_provider = env_provider
+ # (2) get the provider from build_environment
+ elif provider := self._services.config.get("build_environment"):
+ emit.debug(f"Using provider {provider!r} from system configuration.")
+ chosen_provider = provider
- # (3) use provider specified with snap configuration,
+ # (3) use provider specified in snap configuration
elif snap_provider := self._get_provider_from_snap_config():
emit.debug(f"Using provider {snap_provider!r} from snap config.")
chosen_provider = snap_provider
@@ -259,9 +283,7 @@ def clean_instances(self) -> None:
emit.progress(f"Cleaning build {target}")
for info in build_plan:
- instance_name = self._get_instance_name(self._work_dir, info)
- emit.debug(f"Cleaning instance {instance_name}")
- provider.clean_project_environments(instance_name=instance_name)
+ self._clean_instance(provider, self._work_dir, info)
def _get_instance_name(
self, work_dir: pathlib.Path, build_info: models.BuildInfo
@@ -285,7 +307,7 @@ def _get_provider_by_name(self, name: str) -> craft_providers.Provider:
def _get_lxd_provider(self) -> LXDProvider:
"""Get the LXD provider for this manager."""
- lxd_remote = os.getenv("CRAFT_LXD_REMOTE", "local")
+ lxd_remote = self._services.config.get("lxd_remote")
return LXDProvider(lxd_project=self._app.name, lxd_remote=lxd_remote)
def _get_multipass_provider(self) -> MultipassProvider:
@@ -324,3 +346,14 @@ def _setup_instance_bashrc(self, instance: craft_providers.Executor) -> None:
content=io.BytesIO(bashrc),
file_mode="644",
)
+
+ def _clean_instance(
+ self,
+ provider: craft_providers.Provider,
+ work_dir: pathlib.Path,
+ info: models.BuildInfo,
+ ) -> None:
+ """Clean an instance, if it exists."""
+ instance_name = self._get_instance_name(work_dir, info)
+ emit.debug(f"Cleaning instance {instance_name}")
+ provider.clean_project_environments(instance_name=instance_name)
diff --git a/craft_application/services/remotebuild.py b/craft_application/services/remotebuild.py
index a58b7dee..e7645ac8 100644
--- a/craft_application/services/remotebuild.py
+++ b/craft_application/services/remotebuild.py
@@ -47,10 +47,6 @@
DEFAULT_POLL_INTERVAL = 30
-def _get_launchpad_instance(default: str = "production") -> str:
- return os.getenv("CRAFT_LAUNCHPAD_INSTANCE", default)
-
-
class RemoteBuildService(base.AppService):
"""Abstract service for performing remote builds."""
@@ -254,7 +250,7 @@ def _get_lp_client(self) -> launchpad.Launchpad:
with craft_cli.emit.pause():
return launchpad.Launchpad.login(
f"{self._app.name}/{self._app.version}",
- root=_get_launchpad_instance(),
+ root=self._services.config.get("launchpad_instance"),
credentials_file=credentials_filepath,
)
diff --git a/craft_application/services/service_factory.py b/craft_application/services/service_factory.py
index d00e4be6..51416e52 100644
--- a/craft_application/services/service_factory.py
+++ b/craft_application/services/service_factory.py
@@ -15,6 +15,7 @@
from __future__ import annotations
import dataclasses
+import warnings
from typing import TYPE_CHECKING, Any
from craft_application import models, services
@@ -41,6 +42,9 @@ class ServiceFactory:
ProviderClass: type[services.ProviderService] = services.ProviderService
RemoteBuildClass: type[services.RemoteBuildService] = services.RemoteBuildService
RequestClass: type[services.RequestService] = services.RequestService
+ ConfigClass: type[services.ConfigService] = services.ConfigService
+ FetchClass: type[services.FetchService] = services.FetchService
+ InitClass: type[services.InitService] = services.InitService
project: models.Project | None = None
@@ -52,6 +56,9 @@ class ServiceFactory:
provider: services.ProviderService = None # type: ignore[assignment]
remote_build: services.RemoteBuildService = None # type: ignore[assignment]
request: services.RequestService = None # type: ignore[assignment]
+ config: services.ConfigService = None # type: ignore[assignment]
+ fetch: services.FetchService = None # type: ignore[assignment]
+ init: services.InitService = None # type: ignore[assignment]
def __post_init__(self) -> None:
self._service_kwargs: dict[str, dict[str, Any]] = {}
@@ -61,9 +68,32 @@ def set_kwargs(
service: str,
**kwargs: Any, # noqa: ANN401 this is intentionally duck-typed.
) -> None:
- """Set up the keyword arguments to pass to a particular service class."""
+ """Set up the keyword arguments to pass to a particular service class.
+
+ PENDING DEPRECATION: use update_kwargs instead
+ """
+ warnings.warn(
+ PendingDeprecationWarning(
+ "ServiceFactory.set_kwargs is pending deprecation. Use update_kwargs instead."
+ ),
+ stacklevel=2,
+ )
self._service_kwargs[service] = kwargs
+ def update_kwargs(
+ self,
+ service: str,
+ **kwargs: Any, # noqa: ANN401 this is intentionally duck-typed.
+ ) -> None:
+ """Update the keyword arguments to pass to a particular service class.
+
+ This works like ``dict.update()``, overwriting already-set values.
+
+ :param service: the name of the service (e.g. "lifecycle")
+ :param kwargs: keyword arguments to set.
+ """
+ self._service_kwargs.setdefault(service, {}).update(kwargs)
+
def __getattr__(self, service: str) -> services.AppService:
"""Instantiate a service class.
diff --git a/craft_application/util/__init__.py b/craft_application/util/__init__.py
index 86a92151..14c379db 100644
--- a/craft_application/util/__init__.py
+++ b/craft_application/util/__init__.py
@@ -32,6 +32,7 @@
is_running_from_snap,
)
from craft_application.util.string import humanize_list, strtobool
+from craft_application.util.system import get_parallel_build_count
from craft_application.util.yaml import dump_yaml, safe_yaml_load
from craft_application.util.pro_services import ProServices, ValidatorOptions
@@ -53,6 +54,7 @@
"dump_yaml",
"safe_yaml_load",
"retry",
+ "get_parallel_build_count",
"ProServices",
"ValidatorOptions",
]
diff --git a/craft_application/util/pro_services.py b/craft_application/util/pro_services.py
index 70bec90a..5ba3a754 100644
--- a/craft_application/util/pro_services.py
+++ b/craft_application/util/pro_services.py
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see .
"""Handling of Ubuntu Pro Services."""
+
from __future__ import annotations
import json
@@ -72,6 +73,8 @@ class ProServices(set[str]):
# placeholder for empty sets
empty_placeholder = "none"
+ managed_mode = False
+
supported_services: set[str] = {
"esm-apps",
"esm-infra",
@@ -84,7 +87,6 @@ class ProServices(set[str]):
pro_executable: Path | None = next(
(path for path in PRO_CLIENT_PATHS if path.exists()), None
)
- # locations to check for pro client
def __str__(self) -> str:
"""Convert to string for display to user."""
@@ -165,7 +167,7 @@ def is_pro_attached(cls) -> bool:
return response["data"]["attributes"]["is_attached"] # type: ignore [no-any-return]
@classmethod
- def get_pro_services(cls) -> ProServices:
+ def _get_pro_services(cls) -> set[str]:
"""Return set of enabled Ubuntu Pro services in the environment.
The returned set only includes services relevant to lifecycle commands.
@@ -176,9 +178,15 @@ def get_pro_services(cls) -> ProServices:
service_names = {service["name"] for service in enabled_services}
# remove any services that aren't relevant to build services
- service_names = service_names.intersection(cls.supported_services)
+ return service_names.intersection(cls.supported_services)
- return cls(service_names)
+ @classmethod
+ def get_pro_services(cls) -> ProServices:
+ """Return a class of enabled Ubuntu Pro services in the environment.
+
+ The returned set only includes services relevant to lifecycle commands.
+ """
+ return cls(cls._get_pro_services())
def validate(
self,
@@ -198,22 +206,28 @@ def validate(
if self.is_pro_attached() != bool(self):
if ValidatorOptions._ATTACHED in options and self: # type: ignore [reportPrivateUsage]
- # Ubuntu Pro is requested but not attached
+ # Pro rock is requested but the host is not attached
raise UbuntuProDetachedError
- if ValidatorOptions._DETACHED in options and not self: # type: ignore [reportPrivateUsage]
- # Ubuntu Pro is not requested but attached
+ if (
+ ValidatorOptions._DETACHED in options # type: ignore [reportPrivateUsage]
+ and not self
+ and not self.managed_mode
+ ):
+ # Pro rock is not requested but the host is attached
raise UbuntuProAttachedError
# second, check that the set of enabled pro services in the environment matches
# the services specified in this set
- if ValidatorOptions.ENABLEMENT in options and (
- (available_services := self.get_pro_services()) != self
+ available_services = self._get_pro_services()
+ if (
+ ValidatorOptions.ENABLEMENT in options
+ and str(self) != self.empty_placeholder
+ and not self.issubset(available_services)
):
- raise InvalidUbuntuProStatusError(self, available_services)
+ raise InvalidUbuntuProStatusError(self)
except UbuntuProClientNotFoundError:
-
# If The pro client was not found, we may be on a non Ubuntu
# system, but if Pro services were requested, re-raise error
if self and not self.pro_client_exists():
diff --git a/craft_application/util/system.py b/craft_application/util/system.py
new file mode 100644
index 00000000..f59d89ed
--- /dev/null
+++ b/craft_application/util/system.py
@@ -0,0 +1,105 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""System-level util functions."""
+from __future__ import annotations
+
+import os
+
+from craft_cli import emit
+
+from craft_application.errors import InvalidParameterError
+
+
+def _verify_parallel_build_count(env_name: str, parallel_build_count: int | str) -> int:
+ """Verify the parallel build count is valid.
+
+ :param env_name: The name of the environment variable being checked.
+ :param parallel_build_count: The value of the variable.
+ :return: The parallel build count as an integer.
+ """
+ try:
+ parallel_build_count = int(parallel_build_count)
+ except ValueError as err:
+ raise InvalidParameterError(env_name, str(os.environ[env_name])) from err
+
+ # Ensure the value is valid positive integer
+ if parallel_build_count < 1:
+ raise InvalidParameterError(env_name, str(parallel_build_count))
+
+ return parallel_build_count
+
+
+def get_parallel_build_count(app_name: str) -> int:
+ """Get the number of parallel builds to run.
+
+ The parallel build count is determined by the first available of the
+ following environment variables in the order:
+
+ - _PARALLEL_BUILD_COUNT
+ - CRAFT_PARALLEL_BUILD_COUNT
+ - _MAX_PARALLEL_BUILD_COUNT
+ - CRAFT_MAX_PARALLEL_BUILD_COUNT
+
+ where the MAX_PARALLEL_BUILD_COUNT variables are dynamically compared to
+ the number of CPUs, and the smaller of the two is used.
+
+ If no environment variable is set, the CPU count is used.
+ If the CPU count is not available for some reason, 1 is used as a fallback.
+ """
+ parallel_build_count = None
+
+ # fixed parallel build count environment variable
+ for env_name in [
+ (app_name + "_PARALLEL_BUILD_COUNT").upper(),
+ "CRAFT_PARALLEL_BUILD_COUNT",
+ ]:
+ if os.environ.get(env_name):
+ parallel_build_count = _verify_parallel_build_count(
+ env_name, os.environ[env_name]
+ )
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ f"from environment variable {env_name!r}"
+ )
+ break
+
+ # CPU count related max parallel build count environment variable
+ if parallel_build_count is None:
+ cpu_count = os.cpu_count() or 1
+ for env_name in [
+ (app_name + "_MAX_PARALLEL_BUILD_COUNT").upper(),
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT",
+ ]:
+ if os.environ.get(env_name):
+ parallel_build_count = min(
+ cpu_count,
+ _verify_parallel_build_count(env_name, os.environ[env_name]),
+ )
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ f"from environment variable {env_name!r}"
+ )
+ break
+
+ # Default to CPU count if no max environment variable is set
+ if parallel_build_count is None:
+ parallel_build_count = cpu_count
+ emit.debug(
+ f"Using parallel build count of {parallel_build_count} "
+ "from CPU count"
+ )
+
+ return parallel_build_count
diff --git a/craft_application/util/yaml.py b/craft_application/util/yaml.py
index 2beac5eb..4a7f18e8 100644
--- a/craft_application/util/yaml.py
+++ b/craft_application/util/yaml.py
@@ -113,15 +113,15 @@ def safe_yaml_load(stream: TextIO) -> Any: # noqa: ANN401 - The YAML could be a
@overload
def dump_yaml(
- data: Any, stream: TextIO, **kwargs: Any # noqa: ANN401 Any gets passed to pyyaml
+ data: Any, stream: TextIO, **kwargs: Any # noqa: ANN401 # Any gets passed to pyyaml
) -> None: ... # pragma: no cover
@overload
def dump_yaml(
- data: Any, # noqa: ANN401 Any gets passed to pyyaml
+ data: Any, # noqa: ANN401 # Any gets passed to pyyaml
stream: None = None,
- **kwargs: Any, # noqa: ANN401 Any gets passed to pyyaml
+ **kwargs: Any, # noqa: ANN401 # Any gets passed to pyyaml
) -> str: ... # pragma: no cover
diff --git a/docs/howto/partitions.rst b/docs/howto/partitions.rst
index f3dd4d02..72d4a9d6 100644
--- a/docs/howto/partitions.rst
+++ b/docs/howto/partitions.rst
@@ -45,45 +45,57 @@ Required application changes
To add partition support to an application, two basic changes are needed:
-#. Enable the feature
+#. Enable the feature.
- Use the :class:`Features ` class to specify that the
- application will use partitions:
+ In your Application subclass, override the following method and invoke the
+ :class:`Features ` class:
.. code-block:: python
from craft_parts import Features
- Features.reset()
- Features(enable_partitions=True)
+ class ExampleApplication(Application):
- .. NOTE::
- The ``craft-application`` class :class:`AppFeatures
- ` has a similar name and serves a similar
- purpose to ``craft-parts``'s :class:`Features `,
- but partitions cannot be enabled via :class:`AppFeatures
- `!
+ ...
-#. Define the list of partitions
+ @override
+ def _enable_craft_parts_features(self) -> None:
+ Features(enable_partitions=True)
- We need to tell the :class:`LifecycleManager `
- class about our partitions, but applications do not usually directly
- instantiate the LifecycleManager.
+ You can only be enable partitions with the :class:`Features
+ ` class from craft-parts. In craft-application
+ there's a similarly-named :class:`AppFeatures
+ ` class which serves a similar purpose,
+ but it can't enable partitions.
- Instead, override your :class:`Application
- `'s ``_setup_partitions`` method, and return
- a list of the partitions, which will eventually be passed to the
- :class:`LifecycleManager `:
+ .. Tip::
+ In unit tests, the :class:`Features ` global
+ singleton may raise exceptions when successive tests repeatedly try to
+ enable partitions.
+
+ To prevent these errors, reset the features at the start of each test:
+
+ .. code-block:: python
+
+ Features.reset()
+
+
+
+#. Define the list of partitions.
+
+ Override the ``_setup_partitions`` method of your :class:`Application
+ ` class and return the list of the
+ partitions.
.. code-block:: python
- class SnackcraftApplication(Application):
+ class ExampleApplication(Application):
- ...
+ ...
- @override
- def _setup_partitions(self, yaml_data: dict[str, Any]) -> list[str] | None:
- return ["default", "kernel", "component/bar-baz"]
+ @override
+ def _setup_partitions(self, yaml_data: dict[str, Any]) -> list[str] | None:
+ return ["default", "kernel", "component/bar-baz"]
Using the partitions
====================
diff --git a/docs/index.rst b/docs/index.rst
index 02c4cfc9..7caea660 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -12,26 +12,16 @@ Craft-Application
reference/index
explanation/index
-.. grid:: 1 1 2 2
-
- .. grid-item-card:: :ref:`Tutorial `
-
- **Get started** with a hands-on introduction to Craft-Application
-
- .. grid-item-card:: :ref:`How-to guides `
-
- **Step-by-step guides** covering key operations and common tasks
-
-.. grid:: 1 1 2 2
- :reverse:
-
- .. grid-item-card:: :ref:`Reference `
-
- **Technical information** about Craft-Application
-
- .. grid-item-card:: :ref:`Explanation `
-
- **Discussion and clarification** of key topics
+.. list-table::
+
+ * - | :ref:`Tutorial `
+ | **Get started** with a hands-on introduction to craft-application
+ - | :ref:`How-to guides `
+ | **Step-by-step guides** covering key operations and common tasks
+ * - | :ref:`Reference `
+ | **Technical information** about craft-application
+ - | :ref:`Explanation `
+ | **Discussion and clarification** of key topics
Project and community
=====================
diff --git a/docs/reference/changelog.rst b/docs/reference/changelog.rst
index 474d5fb8..ee6c0aa4 100644
--- a/docs/reference/changelog.rst
+++ b/docs/reference/changelog.rst
@@ -1,7 +1,215 @@
+:tocdepth: 2
+
*********
Changelog
*********
+4.5.0 (2024-Nov-28)
+-------------------
+
+Application
+===========
+
+- The fetch-service integration now assumes that the fetch-service snap is
+ tracking the ``latest/candidate``.
+- Fix an issue where the fetch-service output was not correctly logged when
+ running in a snapped craft tool.
+
+Commands
+========
+
+- Provide a documentation link in help messages.
+- Updates to the ``init`` command:
+
+ - If the ``--name`` argument is provided, the command now checks if the value
+ is a valid project name, and returns an error if it isn't.
+ - If the ``--name`` argument is *not* provided, the command now checks whether
+ the project directory is a valid project name. If it isn't, the command sets
+ the project name to ``my-project``.
+
+Services
+========
+
+- Add version to the template generation context of ``InitService``.
+
+..
+ For a complete list of commits, check out the `4.5.0`_ release on GitHub.
+
+4.4.0 (2024-Nov-08)
+-------------------
+
+Application
+===========
+
+- ``AppCommand`` subclasses now will always receive a valid ``app_config``
+ dict.
+- Fixes a bug where the fetch-service integration would try to spawn the
+ fetch-service process when running in managed mode.
+- Cleans up the output from the fetch-service integration.
+
+Commands
+========
+
+- Adds an ``init`` command for initialising new projects.
+- Lifecycle commands are ordered in the sequence they run rather than
+ alphabetically in help messages.
+- Preserves order of ``CommandGroups`` defined by the application.
+- Applications can override commands defined by Craft Application in the
+ same ``CommandGroup``.
+
+Services
+========
+
+- Adds an ``InitService`` for initialising new projects.
+
+For a complete list of commits, check out the `4.4.0`_ release on GitHub.
+
+4.3.0 (2024-Oct-11)
+-------------------
+
+Application
+===========
+
+- Added compatibility methods for craft-platforms models.
+
+Commands
+========
+
+- The ``clean`` command now supports the ``--platform`` argument to filter
+ which build environments to clean.
+
+Services
+========
+
+- Added an experimental integration with the fetch-service, to generate
+ manifests listing assets that were downloaded during the build.
+
+For a complete list of commits, check out the `4.3.0`_ release on GitHub.
+
+4.2.7 (2024-Oct-08)
+-------------------
+
+- Don't depend on requests >= 2.32.0.
+- Fix: set CRAFT_PARALLEL_BUILD_COUNT correctly in ``override-`` scripts.
+
+For a complete list of commits, check out the `4.2.7`_ release on GitHub.
+
+4.2.6 (2024-Oct-04)
+-------------------
+
+- Remove the ``requests<2.32.0`` constraint to resolve CVE-2024-35195.
+
+For a complete list of commits, check out the `4.2.6`_ release on GitHub.
+
+4.2.5 (2024-Oct-04)
+-------------------
+
+Services
+========
+
+- The config service handles snap issues better.
+
+For a complete list of commits, check out the `4.2.5`_ release on GitHub.
+
+4.2.4 (2024-Sep-19)
+-------------------
+
+Remote build
+============
+
+- Remote build errors are now a subclass of ``CraftError``.
+
+For a complete list of commits, check out the `4.2.4`_ release on GitHub.
+
+4.2.3 (2024-Sep-18)
+-------------------
+
+Application
+===========
+
+- ``get_arg_or_config`` now correctly checks the config service if the passed
+ namespace has ``None`` as the value of the requested item.
+
+For a complete list of commits, check out the `4.2.3`_ release on GitHub.
+
+4.2.2 (2024-Sep-13)
+-------------------
+
+Application
+===========
+
+- Add a ``_run_inner`` method to override or wrap the core run logic.
+
+For a complete list of commits, check out the `4.2.2`_ release on GitHub.
+
+4.2.1 (2024-Sep-13)
+-------------------
+
+Models
+======
+
+- Fix a regression where numeric part properties could not be parsed.
+
+For a complete list of commits, check out the `4.2.1`_ release on GitHub.
+
+4.2.0 (2024-Sep-12)
+-------------------
+
+Application
+===========
+
+- Add a configuration service to unify handling of command line arguments,
+ environment variables, snap configurations, and so on.
+- Use the standard library to retrieve the host's proxies.
+
+Commands
+========
+
+- Properly support ``--shell``, ``--shell-after`` and ``--debug`` on the
+ ``pack`` command.
+
+For a complete list of commits, check out the `4.2.0`_ release on GitHub.
+
+
+4.1.3 (2024-Sep-12)
+-------------------
+
+Models
+======
+
+- Fix a regression where numeric part properties could not be parsed.
+
+For a complete list of commits, check out the `4.1.3`_ release on GitHub.
+
+
+4.1.2 (2024-Sep-05)
+-------------------
+
+Application
+===========
+
+- Managed runs now fail if the build plan is empty.
+- Error message tweaks for invalid YAML files.
+
+Models
+======
+
+- Platform models now correctly accept non-vectorised architectures.
+
+For a complete list of commits, check out the `4.1.2`_ release on GitHub.
+
+4.1.1 (2024-Aug-27)
+-------------------
+
+Application
+===========
+
+* When a build fails due to matching multiple platforms, those matching
+ platforms will be specified in the error message.
+* Show nicer error messages for invalid YAML files.
+
+For a complete list of commits, check out the `4.1.1`_ release on GitHub.
+
4.1.0 (2024-Aug-14)
-------------------
@@ -195,3 +403,17 @@ For a complete list of commits, check out the `2.7.0`_ release on GitHub.
.. _3.2.0: https://github.com/canonical/craft-application/releases/tag/3.2.0
.. _4.0.0: https://github.com/canonical/craft-application/releases/tag/4.0.0
.. _4.1.0: https://github.com/canonical/craft-application/releases/tag/4.1.0
+.. _4.1.1: https://github.com/canonical/craft-application/releases/tag/4.1.1
+.. _4.1.2: https://github.com/canonical/craft-application/releases/tag/4.1.2
+.. _4.1.3: https://github.com/canonical/craft-application/releases/tag/4.1.3
+.. _4.2.0: https://github.com/canonical/craft-application/releases/tag/4.2.0
+.. _4.2.1: https://github.com/canonical/craft-application/releases/tag/4.2.1
+.. _4.2.2: https://github.com/canonical/craft-application/releases/tag/4.2.2
+.. _4.2.3: https://github.com/canonical/craft-application/releases/tag/4.2.3
+.. _4.2.4: https://github.com/canonical/craft-application/releases/tag/4.2.4
+.. _4.2.5: https://github.com/canonical/craft-application/releases/tag/4.2.5
+.. _4.2.6: https://github.com/canonical/craft-application/releases/tag/4.2.6
+.. _4.2.7: https://github.com/canonical/craft-application/releases/tag/4.2.7
+.. _4.3.0: https://github.com/canonical/craft-application/releases/tag/4.3.0
+.. _4.4.0: https://github.com/canonical/craft-application/releases/tag/4.4.0
+.. _4.5.0: https://github.com/canonical/craft-application/releases/tag/4.5.0
diff --git a/docs/reference/environment-variables.rst b/docs/reference/environment-variables.rst
new file mode 100644
index 00000000..0fb29d8f
--- /dev/null
+++ b/docs/reference/environment-variables.rst
@@ -0,0 +1,111 @@
+*********************
+Environment variables
+*********************
+
+Applications built on craft-application have several environment variables that
+can configure their behaviour. They and the behaviour they modify are listed
+below.
+
+Variables passed to managed builders
+------------------------------------
+
+Several environment variables from the host environment are passed to the
+managed build environment. While an application may adjust these by adjusting
+the ``environment`` dictionary attached to the ``ProviderService``,
+craft-application will by default forward the ``http_proxy``, ``https_proxy``
+and ``no_proxy`` environment variables from the host.
+
+Supported variables
+-------------------
+
+These variables are explicitly supported for user configuration.
+
+.. _env-var-craft-build-environment:
+
+``CRAFT_BUILD_ENVIRONMENT``
+===========================
+
+If the value is ``host``, allows an environment to tell a craft application
+to run directly on the host rather than in managed mode. This method is
+roughly equivalent to using ``--destructive-mode``, but is designed for
+configurations where the application is already being run in an appropriate
+container or VM, such as
+`Snapcraft rocks `_ or
+when controlled by a CI system such as `Launchpad `_.
+
+**CAUTION**: Setting the build environment is only recommended if you are
+aware of the exact packages needed to reproduce the build containers created
+by the app.
+
+``CRAFT_BUILD_FOR``
+===================
+
+Sets the default architecture to build for. Overridden by ``--build-for`` in
+lifecycle commands.
+
+``CRAFT_PLATFORM``
+==================
+
+Sets the default platform to build. Overridden by ``--platform`` in lifecycle
+commands.
+
+``CRAFT_SNAP_CHANNEL``
+======================
+
+Overrides the default channel that a craft application's snap is installed from
+if the manager instance is not running as a snap. If unset, the application
+will be installed from the ``latest/stable`` channel. If the application is
+running from a snap, this variable is ignored and the same snap used on
+the host system is injected into the managed builder.
+
+``CRAFT_VERBOSITY_LEVEL``
+=========================
+
+Set the verbosity level for the application. Valid values are: ``quiet``,
+``brief``, ``verbose``, ``debug`` and ``trace``. This is overridden by the
+``--quiet``, ``--verbose`` or ``--verbosity={value}`` global command options.
+
+Development variables
+---------------------
+
+The following variables exist to help developers writing applications using
+craft-application more easily debug their code:
+
+``CRAFT_DEBUG``
+===============
+
+Controls whether the application is in debug mode. If this variable is set to
+``1``, general exceptions will not be caught, instead showing a traceback on
+the command line. This is normally only useful for developers working on
+craft-application or an app that uses the framework, as a traceback is always
+written to the log file as well.
+
+``CRAFT_LAUNCHPAD_INSTANCE``
+============================
+
+For remote builds, allows the user to set an alternative launchpad instance.
+Accepts any string that can be used as the ``service_root`` value in
+`Launchpadlib `_.
+
+Unsupported variables
+---------------------
+
+The following variables cause behaviour changes in craft-application, but
+should not be set except by craft-application itself.
+
+``CRAFT_LXD_REMOTE``
+====================
+
+If using LXD, the application will start containers in the configured remote
+rather than ``local``.
+
+**CAUTION:** Using non-default remotes is experimental and not recommended at
+this time.
+
+``CRAFT_MANAGED_MODE``
+======================
+
+Alerts the application that it is running in managed mode. This should only be
+set by craft-application when creating a provider. Systems designed to wrap
+craft applications may use the :ref:`env-var-craft-build-environment`
+environment variable to make the app run on the host.
diff --git a/docs/reference/index.rst b/docs/reference/index.rst
index b188a0d4..1e7796c6 100644
--- a/docs/reference/index.rst
+++ b/docs/reference/index.rst
@@ -7,6 +7,7 @@ Reference
:maxdepth: 1
changelog
+ environment-variables
platforms
Indices and tables
diff --git a/pyproject.toml b/pyproject.toml
index 0f2868a0..bf8d3186 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -4,10 +4,12 @@ description = "A framework for *craft applications."
dynamic = ["version", "readme"]
dependencies = [
"craft-archives>=2.0.0",
- "craft-cli>=2.6.0",
+ "craft-cli>=2.10.1",
"craft-grammar>=2.0.0",
"craft-parts>=2.0.0",
- "craft-providers>=2.0.0",
+ "craft-platforms>=0.3.1",
+ "craft-providers>=2.0.4",
+ "Jinja2~=3.1",
"snap-helpers>=0.4.2",
"platformdirs>=3.10",
"pydantic~=2.0",
@@ -22,8 +24,7 @@ dependencies = [
# Pygit2 changelog: https://github.com/libgit2/pygit2/blob/master/CHANGELOG.md
"pygit2>=1.13.0,<1.15.0",
"PyYaml>=6.0",
- # see https://github.com/psf/requests/issues/6707
- "requests<2.32",
+ "requests",
"typing_extensions>=4.4.0",
]
classifiers = [
@@ -48,17 +49,17 @@ remote = [
"launchpadlib>=1.10.16",
]
dev = [
- "coverage[toml]==7.6.1",
+ "coverage[toml]==7.6.3",
"hypothesis>=6.0",
"pyfakefs~=5.3",
- "pytest==8.3.2",
- "pytest-check==2.3.1",
+ "pytest==8.3.3",
+ "pytest-check==2.4.1",
"pytest-cov==5.0.0",
+ "pytest-freezegun==0.4.2",
"pytest-mock==3.14.0",
"pytest-rerunfailures==14.0",
+ "pytest-subprocess~=1.5.2",
"pytest-time>=0.3.1",
- # Pin requests because of https://github.com/msabramo/requests-unixsocket/issues/73
- "requests<2.32.0",
"responses~=0.25.0",
"craft-application[remote]"
]
@@ -68,23 +69,23 @@ lint = [
"yamllint==1.35.1"
]
types = [
- "mypy[reports]==1.9.0",
- "pyright==1.1.376",
+ "mypy[reports]==1.12.0",
+ "pyright==1.1.385",
"types-requests",
"types-urllib3",
]
docs = [
- "canonical-sphinx~=0.1",
- "sphinx-autobuild==2024.4.16",
- "sphinx-lint==0.9.1",
+ "canonical-sphinx~=0.2.0",
+ "sphinx-autobuild==2024.10.3",
+ "sphinx-lint==1.0.0",
]
apt = [
- "python-apt>=2.4.0;sys_platform=='linux'"
+ "python-apt>=2.4.0;sys_platform=='linux'",
]
[build-system]
requires = [
- "setuptools==72.2.0",
+ "setuptools==75.2.0",
"setuptools_scm[toml]>=8.1"
]
build-backend = "setuptools.build_meta"
@@ -141,6 +142,10 @@ xfail_strict = true
markers = [
"enable_features: Tests that require specific features",
]
+filterwarnings = [
+ "ignore:The craft-application BuildPlanner:PendingDeprecationWarning",
+ "ignore:BuildInfo:PendingDeprecationWarning",
+]
[tool.coverage.run]
branch = true
@@ -283,6 +288,7 @@ lint.ignore = [
"ANN", # Ignore type annotations in tests
"S101", # Allow assertions in tests
"S103", # Allow `os.chmod` setting a permissive mask `0o555` on file or directory
+ "S105", "S106", "S107", # Allow hardcoded "passwords" in test files.
"S108", # Allow Probable insecure usage of temporary file or directory
"PLR0913", # Allow many arguments for test functions
"PT004", # Allow fixtures that don't return anything to not start with underscores
diff --git a/tests/conftest.py b/tests/conftest.py
index a656604d..05e9d160 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -19,15 +19,21 @@
import os
import pathlib
import shutil
+import subprocess
+from dataclasses import dataclass
from importlib import metadata
from typing import TYPE_CHECKING, Any
import craft_application
import craft_parts
+import jinja2
+import pydantic
import pytest
-from craft_application import application, models, services, util
+from craft_application import application, git, launchpad, models, services, util
from craft_cli import EmitterMode, emit
from craft_providers import bases
+from jinja2 import FileSystemLoader
+from typing_extensions import override
if TYPE_CHECKING: # pragma: no cover
from collections.abc import Iterator
@@ -58,19 +64,39 @@ def test_with_build_secrets(...)
return features
+class FakeConfigModel(craft_application.ConfigModel):
+
+ my_str: str
+ my_int: int
+ my_bool: bool
+ my_default_str: str = "default"
+ my_default_int: int = -1
+ my_default_bool: bool = True
+ my_default_factory: dict[str, str] = pydantic.Field(
+ default_factory=lambda: {"dict": "yes"}
+ )
+ my_arch: launchpad.Architecture
+
+
+@pytest.fixture(scope="session")
+def fake_config_model() -> type[FakeConfigModel]:
+ return FakeConfigModel
+
+
@pytest.fixture(scope="session")
-def default_app_metadata() -> craft_application.AppMetadata:
+def default_app_metadata(fake_config_model) -> craft_application.AppMetadata:
with pytest.MonkeyPatch.context() as m:
m.setattr(metadata, "version", lambda _: "3.14159")
return craft_application.AppMetadata(
"testcraft",
"A fake app for testing craft-application",
source_ignore_patterns=["*.snap", "*.charm", "*.starcraft"],
+ ConfigModel=fake_config_model,
)
@pytest.fixture
-def app_metadata(features) -> craft_application.AppMetadata:
+def app_metadata(features, fake_config_model) -> craft_application.AppMetadata:
with pytest.MonkeyPatch.context() as m:
m.setattr(metadata, "version", lambda _: "3.14159")
return craft_application.AppMetadata(
@@ -78,7 +104,8 @@ def app_metadata(features) -> craft_application.AppMetadata:
"A fake app for testing craft-application",
source_ignore_patterns=["*.snap", "*.charm", "*.starcraft"],
features=craft_application.AppFeatures(**features),
- docs_url="www.craft-app.com/docs/{version}",
+ docs_url="www.testcraft.example/docs/{version}",
+ ConfigModel=fake_config_model,
)
@@ -89,7 +116,7 @@ def app_metadata_docs(features) -> craft_application.AppMetadata:
return craft_application.AppMetadata(
"testcraft",
"A fake app for testing craft-application",
- docs_url="http://craft-app.com",
+ docs_url="http://testcraft.example",
source_ignore_patterns=["*.snap", "*.charm", "*.starcraft"],
features=craft_application.AppFeatures(**features),
)
@@ -272,15 +299,29 @@ def __init__(
return FakeLifecycleService
+@pytest.fixture
+def fake_init_service_class(tmp_path):
+ class FakeInitService(services.InitService):
+ def _get_loader(self, template_dir: pathlib.Path) -> jinja2.BaseLoader:
+ return FileSystemLoader(tmp_path / "templates" / template_dir)
+
+ return FakeInitService
+
+
@pytest.fixture
def fake_services(
- app_metadata, fake_project, fake_lifecycle_service_class, fake_package_service_class
+ app_metadata,
+ fake_project,
+ fake_lifecycle_service_class,
+ fake_package_service_class,
+ fake_init_service_class,
):
return services.ServiceFactory(
app_metadata,
project=fake_project,
PackageClass=fake_package_service_class,
LifecycleClass=fake_lifecycle_service_class,
+ InitClass=fake_init_service_class,
)
@@ -323,3 +364,121 @@ def mock_pro_api_call(endpoint: str):
)
return set_is_attached, set_enabled_services
+
+
+class FakeApplication(application.Application):
+ """An application class explicitly for testing. Adds some convenient test hooks."""
+
+ platform: str = "unknown-platform"
+ build_on: str = "unknown-build-on"
+ build_for: str | None = "unknown-build-for"
+
+ def set_project(self, project):
+ self._Application__project = project
+
+ @override
+ def _extra_yaml_transform(
+ self,
+ yaml_data: dict[str, Any],
+ *,
+ build_on: str,
+ build_for: str | None,
+ ) -> dict[str, Any]:
+ self.build_on = build_on
+ self.build_for = build_for
+
+ return yaml_data
+
+
+@pytest.fixture
+def app(app_metadata, fake_services):
+ return FakeApplication(app_metadata, fake_services)
+
+
+@pytest.fixture
+def manifest_data_dir():
+ return pathlib.Path(__file__).parent / "data/manifest"
+
+
+@pytest.fixture
+def new_dir(tmp_path):
+ """Change to a new temporary directory."""
+ cwd = pathlib.Path.cwd()
+ os.chdir(tmp_path)
+
+ yield tmp_path
+
+ os.chdir(cwd)
+
+
+@pytest.fixture
+def empty_working_directory(
+ tmp_path: pathlib.Path,
+ monkeypatch: pytest.MonkeyPatch,
+) -> pathlib.Path:
+ repo_dir = pathlib.Path(tmp_path, "test-repo")
+ repo_dir.mkdir()
+ monkeypatch.chdir(repo_dir)
+ return repo_dir
+
+
+@pytest.fixture
+def empty_repository(empty_working_directory: pathlib.Path) -> pathlib.Path:
+ subprocess.run(["git", "init"], check=True)
+ return empty_working_directory
+
+
+@dataclass
+class RepositoryDefinition:
+ repository_path: pathlib.Path
+ commit: str
+ tag: str | None = None
+
+ @property
+ def short_commit(self) -> str:
+ """Return abbreviated commit."""
+ return git.short_commit_sha(self.commit)
+
+
+@pytest.fixture
+def repository_with_commit(empty_repository: pathlib.Path) -> RepositoryDefinition:
+ repo = git.GitRepo(empty_repository)
+ (empty_repository / "Some file").touch()
+ repo.add_all()
+ commit_sha = repo.commit("1")
+ return RepositoryDefinition(
+ repository_path=empty_repository,
+ commit=commit_sha,
+ )
+
+
+@pytest.fixture
+def repository_with_annotated_tag(
+ repository_with_commit: RepositoryDefinition,
+) -> RepositoryDefinition:
+ test_tag = "v3.2.1"
+ subprocess.run(
+ ["git", "config", "--local", "user.name", "Testcraft", test_tag], check=True
+ )
+ subprocess.run(
+ ["git", "config", "--local", "user.email", "testcraft@canonical.com", test_tag],
+ check=True,
+ )
+ subprocess.run(["git", "tag", "-a", "-m", "testcraft tag", test_tag], check=True)
+ repository_with_commit.tag = test_tag
+ return repository_with_commit
+
+
+@pytest.fixture
+def repository_with_unannotated_tag(
+ repository_with_commit: RepositoryDefinition,
+) -> RepositoryDefinition:
+ subprocess.run(["git", "config", "--local", "user.name", "Testcraft"], check=True)
+ subprocess.run(
+ ["git", "config", "--local", "user.email", "testcraft@canonical.com"],
+ check=True,
+ )
+ test_tag = "non-annotated"
+ subprocess.run(["git", "tag", test_tag], check=True)
+ repository_with_commit.tag = test_tag
+ return repository_with_commit
diff --git a/tests/data/manifest/craft-manifest-expected.json b/tests/data/manifest/craft-manifest-expected.json
new file mode 100644
index 00000000..c508a551
--- /dev/null
+++ b/tests/data/manifest/craft-manifest-expected.json
@@ -0,0 +1,112 @@
+{
+ "component-name": "full-project",
+ "component-version": "1.0.0.post64+git12345678",
+ "component-description": "A fully-defined craft-application project.",
+ "component-id": {
+ "hashes": {
+ "sha1": "27d3150b433071ec1e2bd5bf04bc6de92b8b12b5",
+ "sha256": "9d7f74856a64282de8cb743fafdba600f18eef2a6f6049746b7cb842e47a3123"
+ }
+ },
+ "architecture": "amd64",
+ "license": "LGPLv3",
+ "metadata-generator": "Craft Application",
+ "creation-timestamp": "2024-09-16T01:02:03.456789+00:00",
+ "dependencies": [
+ {
+ "component-name": "Translation",
+ "component-version": "",
+ "component-description": "",
+ "component-id": {
+ "hashes": {
+ "sha1": "af5834abfa1a537fd383d41f1be33cea47c7b6a7",
+ "sha256": "38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ }
+ },
+ "architecture": "",
+ "type": "application/x.apt.translation",
+ "component-author": "Ubuntu",
+ "component-vendor": "Ubuntu",
+ "size": 111544,
+ "url": [
+ "http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ ]
+ },
+ {
+ "component-name": "Packages.xz",
+ "component-version": "jammy",
+ "component-description": "jammy main Packages file",
+ "component-id": {
+ "hashes": {
+ "sha1": "370c66437d49460dbc16be011209c4de9977212d",
+ "sha256": "37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ }
+ },
+ "architecture": "amd64",
+ "type": "application/x.apt.packages",
+ "component-author": "Ubuntu",
+ "component-vendor": "Ubuntu",
+ "size": 1394768,
+ "url": [
+ "http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ ]
+ },
+ {
+ "component-name": "go",
+ "component-version": "10660",
+ "component-description": "The Go programming language",
+ "component-id": {
+ "hashes": {
+ "sha1": "376506001849698af3f9e07a236a47ee8cddded0",
+ "sha256": "cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577"
+ }
+ },
+ "architecture": "amd64",
+ "type": "application/x.canonical.snap-package",
+ "component-author": "",
+ "component-vendor": "Canonical",
+ "size": 64892928,
+ "url": [
+ "https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40"
+ ]
+ },
+ {
+ "component-name": "",
+ "component-version": "",
+ "component-description": "",
+ "component-id": {
+ "hashes": {
+ "sha1": "e98024bd74a166549e5dad777a48bb25eb6163a4",
+ "sha256": "d0bb57a19474b186870c05a22a29bda1d51e5e01e3d46a0bef27987ec2354896"
+ }
+ },
+ "architecture": "",
+ "type": "application/x.git.upload-pack-result.fetch",
+ "component-author": "",
+ "component-vendor": "",
+ "size": 473833,
+ "url": [
+ "https://github.com:443/canonical/sphinx-docs-starter-pack.git/git-upload-pack"
+ ]
+ },
+ {
+ "component-name": "",
+ "component-version": "",
+ "component-description": "",
+ "component-id": {
+ "hashes": {
+ "sha1": "4a212f314a41bf21ceac5015da94bb82c39cad60",
+ "sha256": "84256a52948edee146161970dca15a3440c7011ce53ed7363bd0b4aaa8fe07c4"
+ }
+ },
+ "architecture": "",
+ "type": "text/plain; charset=utf-8",
+ "component-author": "",
+ "component-vendor": "",
+ "size": 73,
+ "url": [
+ "https://proxy.golang.org:443/github.com/go-mmap/mmap/@v/v0.7.0.mod"
+ ]
+ }
+ ]
+}
diff --git a/tests/data/manifest/project-expected.yaml b/tests/data/manifest/project-expected.yaml
new file mode 100644
index 00000000..6cb1cdc2
--- /dev/null
+++ b/tests/data/manifest/project-expected.yaml
@@ -0,0 +1,11 @@
+component-name: full-project
+component-version: 1.0.0.post64+git12345678
+component-description: A fully-defined craft-application project.
+component-id:
+ hashes:
+ sha1: 27d3150b433071ec1e2bd5bf04bc6de92b8b12b5
+ sha256: 9d7f74856a64282de8cb743fafdba600f18eef2a6f6049746b7cb842e47a3123
+architecture: amd64
+license: LGPLv3
+metadata-generator: Craft Application
+creation-timestamp: '2024-09-16T01:02:03.456789+00:00'
diff --git a/tests/data/manifest/session-manifest-expected.yaml b/tests/data/manifest/session-manifest-expected.yaml
new file mode 100644
index 00000000..d5228b76
--- /dev/null
+++ b/tests/data/manifest/session-manifest-expected.yaml
@@ -0,0 +1,70 @@
+- component-name: Translation
+ component-version: ''
+ component-description: ''
+ component-id:
+ hashes:
+ sha1: af5834abfa1a537fd383d41f1be33cea47c7b6a7
+ sha256: 38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d
+ architecture: ''
+ type: application/x.apt.translation
+ component-author: Ubuntu
+ component-vendor: Ubuntu
+ size: 111544
+ url:
+ - http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d
+- component-name: Packages.xz
+ component-version: jammy
+ component-description: jammy main Packages file
+ component-id:
+ hashes:
+ sha1: 370c66437d49460dbc16be011209c4de9977212d
+ sha256: 37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11
+ architecture: amd64
+ type: application/x.apt.packages
+ component-author: Ubuntu
+ component-vendor: Ubuntu
+ size: 1394768
+ url:
+ - http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11
+- component-name: go
+ component-version: '10660'
+ component-description: The Go programming language
+ component-id:
+ hashes:
+ sha1: 376506001849698af3f9e07a236a47ee8cddded0
+ sha256: cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577
+ architecture: amd64
+ type: application/x.canonical.snap-package
+ component-author: ''
+ component-vendor: Canonical
+ size: 64892928
+ url:
+ - https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40
+- component-name: ''
+ component-version: ''
+ component-description: ''
+ component-id:
+ hashes:
+ sha1: e98024bd74a166549e5dad777a48bb25eb6163a4
+ sha256: d0bb57a19474b186870c05a22a29bda1d51e5e01e3d46a0bef27987ec2354896
+ architecture: ''
+ type: application/x.git.upload-pack-result.fetch
+ component-author: ''
+ component-vendor: ''
+ size: 473833
+ url:
+ - https://github.com:443/canonical/sphinx-docs-starter-pack.git/git-upload-pack
+- component-name: ''
+ component-version: ''
+ component-description: ''
+ component-id:
+ hashes:
+ sha1: 4a212f314a41bf21ceac5015da94bb82c39cad60
+ sha256: 84256a52948edee146161970dca15a3440c7011ce53ed7363bd0b4aaa8fe07c4
+ architecture: ''
+ type: text/plain; charset=utf-8
+ component-author: ''
+ component-vendor: ''
+ size: 73
+ url:
+ - https://proxy.golang.org:443/github.com/go-mmap/mmap/@v/v0.7.0.mod
diff --git a/tests/data/manifest/session-report.json b/tests/data/manifest/session-report.json
new file mode 100644
index 00000000..bd559f1c
--- /dev/null
+++ b/tests/data/manifest/session-report.json
@@ -0,0 +1,143 @@
+{
+ "THIS IS A STRIPPED DOWN SESSION REPORT FOR TESTING PURPOSES": 1,
+ "comment": "Metadata format is unstable and may change without prior notice.",
+ "session-id": "f17e28e952c84d7c955a1eb5277de201",
+ "policy": "",
+ "artefacts": [
+ {
+ "metadata": {
+ "type": "application/x.apt.translation",
+ "sha1": "af5834abfa1a537fd383d41f1be33cea47c7b6a7",
+ "sha256": "38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d",
+ "size": 111544,
+ "name": "Translation",
+ "version": "",
+ "vendor": "Ubuntu",
+ "description": "",
+ "author": "Ubuntu",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "http://archive.ubuntu.com/ubuntu/dists/jammy/multiverse/i18n/by-hash/SHA256/38cbbf5467682ada956d14168e301a383d96aaa2f1f694cbaa47cee38b47847d"
+ }
+ ]
+ },
+ {
+ "metadata": {
+ "type": "application/x.apt.packages",
+ "sha1": "370c66437d49460dbc16be011209c4de9977212d",
+ "sha256": "37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11",
+ "size": 1394768,
+ "name": "Packages.xz",
+ "version": "jammy",
+ "vendor": "Ubuntu",
+ "description": "jammy main Packages file",
+ "author": "Ubuntu",
+ "architecture": "amd64",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "http://archive.ubuntu.com/ubuntu/dists/jammy/main/binary-amd64/by-hash/SHA256/37cb57f1554cbfa71c5a29ee9ffee18a9a8c1782bb0568e0874b7ff4ce8f9c11"
+ }
+ ]
+ },
+ {
+ "metadata": {
+ "type": "application/x.canonical.snap-package",
+ "sha1": "376506001849698af3f9e07a236a47ee8cddded0",
+ "sha256": "cf7e02ebfdaa898107d2dbf84cf1231cee6c244dd5646580d09cfd6f6cf12577",
+ "size": 64892928,
+ "name": "go",
+ "version": "10660",
+ "vendor": "Canonical",
+ "description": "The Go programming language",
+ "author": "",
+ "architecture": "amd64",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "https://canonical-bos01.cdn.snapcraftcontent.com:443/download-origin/canonical-lgw01/Md1HBASHzP4i0bniScAjXGnOII9cEK6e_10660.snap?interactive=1&token=1720738800_68d3c27ac109407168ed776e46653c7883b8ef40"
+ }
+ ]
+ },
+ {
+ "A NON-SHALLOW GIT CLONE": true,
+ "artefact-metadata-version": "0.1",
+ "request-inspection": {
+ "git.upload-pack": {
+ "opinion": "Rejected",
+ "reason": "fetch is only allowed with depth 1"
+ },
+ "go.module.git": {
+ "opinion": "Pending",
+ "reason": "valid URL for go module download"
+ }
+ },
+ "response-inspection": {
+ "git.upload-pack": {
+ "opinion": "Rejected",
+ "reason": "fetch is allowed only on a single ref"
+ },
+ "go.module.git": {
+ "opinion": "Unknown",
+ "reason": "git repository does not contain a go.mod file"
+ }
+ },
+ "result": "Rejected",
+ "metadata": {
+ "type": "application/x.git.upload-pack-result.fetch",
+ "sha1": "e98024bd74a166549e5dad777a48bb25eb6163a4",
+ "sha256": "d0bb57a19474b186870c05a22a29bda1d51e5e01e3d46a0bef27987ec2354896",
+ "size": 473833,
+ "name": "",
+ "version": "",
+ "vendor": "",
+ "description": "",
+ "author": "",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "https://github.com:443/canonical/sphinx-docs-starter-pack.git/git-upload-pack"
+ }
+ ]
+ },
+ {
+ "A GO BUILD WITHOUT GOPROXY=DIRECT": true,
+ "artefact-metadata-version": "0.1",
+ "request-inspection": {
+ "default": {
+ "opinion": "Unknown",
+ "reason": "the request was not recognized by any format inspector"
+ }
+ },
+ "response-inspection": {
+ "default": {
+ "opinion": "Unknown",
+ "reason": "the artefact format is unknown"
+ }
+ },
+ "result": "Rejected",
+ "metadata": {
+ "type": "text/plain; charset=utf-8",
+ "sha1": "4a212f314a41bf21ceac5015da94bb82c39cad60",
+ "sha256": "84256a52948edee146161970dca15a3440c7011ce53ed7363bd0b4aaa8fe07c4",
+ "size": 73,
+ "name": "",
+ "version": "",
+ "vendor": "",
+ "description": "",
+ "author": "",
+ "license": ""
+ },
+ "downloads": [
+ {
+ "url": "https://proxy.golang.org:443/github.com/go-mmap/mmap/@v/v0.7.0.mod"
+ }
+ ]
+ }
+ ]
+}
diff --git a/tests/integration/commands/__init__.py b/tests/integration/commands/__init__.py
new file mode 100644
index 00000000..e69de29b
diff --git a/tests/integration/commands/test_init.py b/tests/integration/commands/test_init.py
new file mode 100644
index 00000000..111502ba
--- /dev/null
+++ b/tests/integration/commands/test_init.py
@@ -0,0 +1,182 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""Tests for init command."""
+import os
+import pathlib
+import textwrap
+
+import pytest
+from craft_application.commands import InitCommand
+
+# init operates in the current working directory
+pytestmark = pytest.mark.usefixtures("new_dir")
+
+
+@pytest.fixture(autouse=True)
+def mock_parent_template_dir(tmp_path, mocker):
+ """Mock the parent template directory."""
+ mocker.patch.object(
+ InitCommand,
+ "parent_template_dir",
+ pathlib.Path(tmp_path) / "templates",
+ )
+
+
+@pytest.fixture
+def fake_template_dirs(tmp_path):
+ """Set up a fake template directories with two templates.
+
+ These templates are very simple because the InitService tests focus on the
+ templates themselves.
+ """
+ parent_template_dir = tmp_path / "templates"
+
+ simple_template_file = parent_template_dir / "simple" / "simple-file.j2"
+ simple_template_file.parent.mkdir(parents=True)
+ simple_template_file.write_text("name={{ name }}")
+
+ other_template_file = parent_template_dir / "other-template" / "other-file.j2"
+ other_template_file.parent.mkdir(parents=True)
+ other_template_file.write_text("name={{ name }}")
+
+
+@pytest.mark.parametrize(
+ ("profile", "expected_file"),
+ [
+ (None, pathlib.Path("simple-file")),
+ ("simple", pathlib.Path("simple-file")),
+ ("other-template", pathlib.Path("other-file")),
+ ],
+)
+@pytest.mark.parametrize("project_dir", [None, "project-dir"])
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_init(
+ app,
+ capsys,
+ monkeypatch,
+ profile,
+ expected_file,
+ project_dir,
+ empty_working_directory,
+):
+ """Initialise a project."""
+ monkeypatch.chdir(empty_working_directory)
+ expected_output = "Successfully initialised project"
+ command = ["testcraft", "init"]
+ if profile:
+ command.extend(["--profile", profile])
+ if project_dir:
+ command.append(project_dir)
+ expected_file = pathlib.Path(project_dir) / expected_file
+ monkeypatch.setattr("sys.argv", command)
+
+ return_code = app.run()
+ stdout, _ = capsys.readouterr()
+
+ assert return_code == os.EX_OK
+ assert expected_output in stdout
+ assert expected_file.is_file()
+ # name is not provided, so use the project directory name
+ assert f"name={expected_file.resolve().parent.name}" == expected_file.read_text()
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+@pytest.mark.parametrize(
+ ("project_dir", "expected_file"),
+ [
+ (None, pathlib.Path("simple-file")),
+ ("project-dir", pathlib.Path("project-dir") / "simple-file"),
+ ],
+)
+def test_init_name(app, capsys, monkeypatch, project_dir, expected_file):
+ """Initialise a project with a name."""
+ expected_output = "Successfully initialised project"
+ command = ["testcraft", "init", "--name", "test-project-name"]
+ if project_dir:
+ command.append(project_dir)
+ monkeypatch.setattr("sys.argv", command)
+
+ return_code = app.run()
+ stdout, _ = capsys.readouterr()
+
+ assert return_code == os.EX_OK
+ assert expected_output in stdout
+ assert expected_file.is_file()
+ assert expected_file.read_text() == "name=test-project-name"
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_init_invalid_profile(app, capsys, monkeypatch):
+ """Give a helpful error message for invalid profiles."""
+ expected_error = "Error: argument --profile: invalid choice: 'bad' (choose from 'other-template', 'simple')"
+ monkeypatch.setattr("sys.argv", ["testcraft", "init", "--profile", "bad"])
+
+ return_code = app.run()
+ _, stderr = capsys.readouterr()
+
+ assert return_code == os.EX_USAGE
+ assert expected_error in stderr
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_init_overlapping_file(app, capsys, monkeypatch, tmp_path):
+ """Give a helpful error message if a file would be overwritten."""
+ pathlib.Path("simple-file").touch()
+ expected_error = textwrap.dedent(
+ f"""
+ Cannot initialise project in {str(tmp_path)!r} because it would overwrite existing files.
+ Existing files are:
+ - simple-file
+ Recommended resolution: Initialise the project in an empty directory or remove the existing files."""
+ )
+ monkeypatch.setattr("sys.argv", ["testcraft", "init", "--profile", "simple"])
+
+ return_code = app.run()
+ _, stderr = capsys.readouterr()
+
+ assert return_code == os.EX_CANTCREAT
+ assert expected_error in stderr
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_init_nonoverlapping_file(app, capsys, monkeypatch):
+ """Files can exist in the project directory if they won't be overwritten."""
+ expected_output = "Successfully initialised project"
+ pathlib.Path("unrelated-file").touch()
+ monkeypatch.setattr("sys.argv", ["testcraft", "init", "--profile", "simple"])
+
+ return_code = app.run()
+ stdout, _ = capsys.readouterr()
+
+ assert return_code == os.EX_OK
+ assert expected_output in stdout
+ assert pathlib.Path("simple-file").is_file()
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_init_invalid_directory(app, monkeypatch, tmp_path):
+ """A default name is used if the project dir is not a valid project name."""
+ invalid_dir = tmp_path / "invalid--name"
+ invalid_dir.mkdir()
+ monkeypatch.chdir(invalid_dir)
+
+ monkeypatch.setattr("sys.argv", ["testcraft", "init", "--profile", "simple"])
+ return_code = app.run()
+
+ assert return_code == os.EX_OK
+ expected_file = invalid_dir / "simple-file"
+ assert expected_file.read_text() == "name=my-project"
diff --git a/tests/integration/data/build-secrets/testcraft.yaml b/tests/integration/data/build-secrets/testcraft.yaml
index ba649b06..5650369b 100644
--- a/tests/integration/data/build-secrets/testcraft.yaml
+++ b/tests/integration/data/build-secrets/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/invalid_projects/build-error/testcraft.yaml b/tests/integration/data/invalid_projects/build-error/testcraft.yaml
index 33e45d52..0d3f9bd3 100644
--- a/tests/integration/data/invalid_projects/build-error/testcraft.yaml
+++ b/tests/integration/data/invalid_projects/build-error/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/adoption/testcraft.yaml b/tests/integration/data/valid_projects/adoption/testcraft.yaml
index 4cbf1d76..1f147afb 100644
--- a/tests/integration/data/valid_projects/adoption/testcraft.yaml
+++ b/tests/integration/data/valid_projects/adoption/testcraft.yaml
@@ -6,7 +6,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/basic/testcraft.yaml b/tests/integration/data/valid_projects/basic/testcraft.yaml
index 88da18b9..8ed5b9cd 100644
--- a/tests/integration/data/valid_projects/basic/testcraft.yaml
+++ b/tests/integration/data/valid_projects/basic/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/data/valid_projects/build-for-all/testcraft.yaml b/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
index a774850b..222be5cb 100644
--- a/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
+++ b/tests/integration/data/valid_projects/build-for-all/testcraft.yaml
@@ -4,7 +4,7 @@ version: 1.0
base: "ubuntu@22.04"
platforms:
platform1:
- build-on: [amd64, arm64, armhf, i386, powerpc, ppc64el, riscv64, s390x]
+ build-on: [amd64, arm64, armhf, i386, ppc64el, riscv64, s390x]
build-for: [all]
parts:
diff --git a/tests/integration/data/valid_projects/environment/testcraft.yaml b/tests/integration/data/valid_projects/environment/testcraft.yaml
index 0e7474e2..a0e995db 100644
--- a/tests/integration/data/valid_projects/environment/testcraft.yaml
+++ b/tests/integration/data/valid_projects/environment/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
@@ -23,5 +22,6 @@ parts:
echo "project_version: \"${CRAFT_PROJECT_VERSION}\"" >> $target_file
echo "arch_build_for: \"${CRAFT_ARCH_BUILD_FOR}\"" >> $target_file
echo "arch_triplet_build_for: \"${CRAFT_ARCH_TRIPLET_BUILD_FOR}\"" >> $target_file
- echo "arch_build_on: \"${CRAFT_ARCH_BUILD_ON}\"" >> $target_file
- echo "arch_triplet_build_on: \"${CRAFT_ARCH_TRIPLET_BUILD_ON}\"" >> $target_file
+ echo "arch_build_on: \"${CRAFT_ARCH_BUILD_ON}\"" >> $target_file
+ echo "arch_triplet_build_on: \"${CRAFT_ARCH_TRIPLET_BUILD_ON}\"" >> $target_file
+ echo "parallel_build_count: \"${CRAFT_PARALLEL_BUILD_COUNT}\"" >> $target_file
diff --git a/tests/integration/data/valid_projects/grammar/testcraft.yaml b/tests/integration/data/valid_projects/grammar/testcraft.yaml
index 2e4be47e..574d00f2 100644
--- a/tests/integration/data/valid_projects/grammar/testcraft.yaml
+++ b/tests/integration/data/valid_projects/grammar/testcraft.yaml
@@ -7,7 +7,6 @@ platforms:
arm64:
armhf:
i386:
- powerpc:
ppc64el:
riscv64:
s390x:
diff --git a/tests/integration/services/test_fetch.py b/tests/integration/services/test_fetch.py
new file mode 100644
index 00000000..92b2028f
--- /dev/null
+++ b/tests/integration/services/test_fetch.py
@@ -0,0 +1,366 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Tests for FetchService."""
+import contextlib
+import io
+import json
+import pathlib
+import shutil
+import socket
+import textwrap
+from functools import cache
+from unittest import mock
+
+import craft_providers
+import pytest
+from craft_application import errors, fetch, services, util
+from craft_application.application import DEFAULT_CLI_LOGGERS
+from craft_application.models import BuildInfo
+from craft_application.services.fetch import _PROJECT_MANIFEST_MANAGED_PATH
+from craft_cli import EmitterMode, emit
+from craft_providers import bases
+
+
+@cache
+def _get_fake_certificate_dir():
+ base_dir = fetch._get_service_base_dir()
+
+ return base_dir / "test-craft-app/fetch-certificate"
+
+
+@pytest.fixture(autouse=True, scope="session")
+def _set_test_certificate_dir():
+ """A session-scoped fixture so that we generate the certificate only once"""
+ cert_dir = _get_fake_certificate_dir()
+ if cert_dir.is_dir():
+ shutil.rmtree(cert_dir)
+
+ with mock.patch.object(fetch, "_get_certificate_dir", return_value=cert_dir):
+ fetch._obtain_certificate()
+
+
+@pytest.fixture(autouse=True)
+def _set_test_base_dirs(mocker):
+ original = fetch._get_service_base_dir()
+ test_dir = original / "test"
+ if test_dir.exists():
+ shutil.rmtree(test_dir)
+ test_dir.mkdir()
+ mocker.patch.object(fetch, "_get_service_base_dir", return_value=test_dir)
+
+ cert_dir = _get_fake_certificate_dir()
+ mocker.patch.object(fetch, "_get_certificate_dir", return_value=cert_dir)
+
+
+@pytest.fixture
+def mock_instance():
+ @contextlib.contextmanager
+ def temporarily_pull_file(*, source, missing_ok): # noqa: ARG001 (unused arguments)
+ yield None
+
+ instance = mock.Mock(spec=craft_providers.Executor)
+ instance.temporarily_pull_file = temporarily_pull_file
+
+ return instance
+
+
+@pytest.fixture
+def app_service(app_metadata, fake_services, fake_project, fake_build_plan):
+ fetch_service = services.FetchService(
+ app_metadata,
+ fake_services,
+ project=fake_project,
+ build_plan=fake_build_plan,
+ session_policy="permissive",
+ )
+ yield fetch_service
+ fetch_service.shutdown(force=True)
+
+
+def test_start_service(app_service):
+ assert not fetch.is_service_online()
+ app_service.setup()
+ assert fetch.is_service_online()
+
+
+def test_start_service_already_up(app_service, request):
+ # Create a fetch-service "manually"
+ fetch_process = fetch.start_service()
+ assert fetch.is_service_online()
+ # Ensure its cleaned up when the test is done
+ if fetch_process is not None:
+ request.addfinalizer(lambda: fetch.stop_service(fetch_process))
+
+ app_service.setup()
+ assert fetch.is_service_online()
+
+
+@pytest.mark.parametrize(
+ "port",
+ [
+ pytest.param(
+ fetch._DEFAULT_CONFIG.control,
+ marks=pytest.mark.xfail(
+ reason="Needs https://github.com/canonical/fetch-service/issues/208 fixed",
+ strict=True,
+ ),
+ ),
+ fetch._DEFAULT_CONFIG.proxy,
+ ],
+)
+def test_start_service_port_taken(app_service, request, port):
+ # "Occupy" one of the necessary ports manually.
+ soc = socket.create_server(("localhost", port), reuse_port=True)
+ request.addfinalizer(soc.close)
+
+ assert not fetch.is_service_online()
+
+ proxy = fetch._DEFAULT_CONFIG.proxy
+ control = fetch._DEFAULT_CONFIG.control
+
+ expected = f"fetch-service ports {proxy} and {control} are already in use."
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ app_service.setup()
+
+
+def test_shutdown_service(app_service):
+ assert not fetch.is_service_online()
+
+ app_service.setup()
+ assert fetch.is_service_online()
+
+ # By default, shutdown() without parameters doesn't actually stop the
+ # fetch-service.
+ app_service.shutdown()
+ assert fetch.is_service_online()
+
+ # shutdown(force=True) must stop the fetch-service.
+ app_service.shutdown(force=True)
+ assert not fetch.is_service_online()
+
+
+def test_create_teardown_session(
+ app_service, mocker, tmp_path, monkeypatch, mock_instance
+):
+ monkeypatch.chdir(tmp_path)
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+ app_service.setup()
+
+ assert len(fetch.get_service_status()["active-sessions"]) == 0
+
+ app_service.create_session(instance=mock_instance)
+ assert len(fetch.get_service_status()["active-sessions"]) == 1
+
+ report = app_service.teardown_session()
+ assert len(fetch.get_service_status()["active-sessions"]) == 0
+
+ assert "artefacts" in report
+
+
+def test_service_logging(app_service, mocker, tmp_path, monkeypatch, mock_instance):
+ monkeypatch.chdir(tmp_path)
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+
+ # Mock get_log_filepath() so that the test doesn't interfere with whatever
+ # fetch-service is running on the system.
+ original_logpath = fetch.get_log_filepath()
+ mocker.patch.object(
+ fetch,
+ "get_log_filepath",
+ return_value=original_logpath.with_name("fetch-testcraft.log"),
+ )
+
+ logfile = fetch.get_log_filepath()
+ logfile.unlink(missing_ok=True)
+
+ app_service.setup()
+
+ # Create and teardown two sessions
+ app_service.create_session(mock_instance)
+ app_service.teardown_session()
+ app_service.create_session(mock_instance)
+ app_service.teardown_session()
+
+ # Check the logfile for the creation/deletion of the two sessions
+ expected = 2
+ assert logfile.is_file()
+ lines = logfile.read_text().splitlines()
+ create = discard = 0
+ for line in lines:
+ if "creating session" in line:
+ create += 1
+ if "discarding session" in line:
+ discard += 1
+ assert create == discard == expected
+
+
+# Bash script to setup the build instance before the actual testing.
+setup_environment = (
+ textwrap.dedent(
+ """
+ #! /bin/bash
+ set -euo pipefail
+
+ apt install -y python3.10-venv
+ python3 -m venv venv
+ venv/bin/pip install requests
+"""
+ )
+ .strip()
+ .encode("ascii")
+)
+
+wheel_url = (
+ "https://files.pythonhosted.org/packages/0f/ec/"
+ "a9b769274512ea65d8484c2beb8c3d2686d1323b450ce9ee6d09452ac430/"
+ "craft_application-3.0.0-py3-none-any.whl"
+)
+# Bash script to fetch the craft-application wheel.
+check_requests = (
+ textwrap.dedent(
+ f"""
+ #! /bin/bash
+ set -euo pipefail
+
+ venv/bin/python -c "import requests; requests.get('{wheel_url}').raise_for_status()"
+"""
+ )
+ .strip()
+ .encode("ascii")
+)
+
+
+@pytest.fixture
+def lxd_instance(snap_safe_tmp_path, provider_service):
+ provider_service.get_provider("lxd")
+
+ arch = util.get_host_architecture()
+ build_info = BuildInfo("foo", arch, arch, bases.BaseName("ubuntu", "22.04"))
+ instance = provider_service.instance(build_info, work_dir=snap_safe_tmp_path)
+
+ with instance as executor:
+ executor.push_file_io(
+ destination=pathlib.Path("/root/setup-environment.sh"),
+ content=io.BytesIO(setup_environment),
+ file_mode="0644",
+ )
+ executor.execute_run(
+ ["bash", "/root/setup-environment.sh"],
+ check=True,
+ capture_output=True,
+ )
+ yield executor
+
+ if executor is not None:
+ with contextlib.suppress(craft_providers.ProviderError):
+ executor.delete()
+
+
+def test_build_instance_integration(
+ app_service,
+ lxd_instance,
+ tmp_path,
+ monkeypatch,
+ fake_project,
+ manifest_data_dir,
+ capsys,
+):
+ emit.init(EmitterMode.BRIEF, "testcraft", "hi", streaming_brief=True)
+ util.setup_loggers(*DEFAULT_CLI_LOGGERS)
+ monkeypatch.chdir(tmp_path)
+
+ app_service.setup()
+
+ env = app_service.create_session(lxd_instance)
+
+ try:
+ # Install the hello Ubuntu package.
+ lxd_instance.execute_run(
+ ["apt", "install", "-y", "hello"], check=True, env=env, capture_output=True
+ )
+
+ # Download the craft-application wheel.
+ lxd_instance.push_file_io(
+ destination=pathlib.Path("/root/check-requests.sh"),
+ content=io.BytesIO(check_requests),
+ file_mode="0644",
+ )
+ lxd_instance.execute_run(
+ ["bash", "/root/check-requests.sh"],
+ check=True,
+ env=env,
+ capture_output=True,
+ )
+
+ # Write the "project" manifest inside the instance, as if a regular
+ # packing was taking place
+ lxd_instance.push_file(
+ source=manifest_data_dir / "project-expected.yaml",
+ destination=_PROJECT_MANIFEST_MANAGED_PATH,
+ )
+
+ finally:
+ report = app_service.teardown_session()
+
+ artefacts_and_types: list[tuple[str, str]] = []
+
+ for artefact in report["artefacts"]:
+ metadata_name = artefact["metadata"]["name"]
+ metadata_type = artefact["metadata"]["type"]
+
+ artefacts_and_types.append((metadata_name, metadata_type))
+
+ # Check that the installation of the "hello" deb went through the inspector.
+ assert ("hello", "application/vnd.debian.binary-package") in artefacts_and_types
+
+ # Check that the fetching of the "craft-application" wheel went through the inspector.
+ assert ("craft-application", "application/x.python.wheel") in artefacts_and_types
+
+ manifest_path = tmp_path / f"{fake_project.name}_{fake_project.version}_foo.json"
+ assert manifest_path.is_file()
+
+ with manifest_path.open("r") as f:
+ manifest_data = json.load(f)
+
+ # Check metadata of the "artifact"
+ assert manifest_data["component-name"] == fake_project.name
+ assert manifest_data["component-version"] == fake_project.version
+ assert manifest_data["architecture"] == "amd64"
+
+ dependencies = {}
+ for dep in manifest_data["dependencies"]:
+ dependencies[dep["component-name"]] = dep
+
+ # Check some of the dependencies
+ assert dependencies["hello"]["type"] == "application/vnd.debian.binary-package"
+ assert dependencies["craft-application"]["type"] == "application/x.python.wheel"
+ assert dependencies["craft-application"]["component-version"] == "3.0.0"
+
+ # Note: the messages don't show up as
+ # 'Configuring fetch-service integration :: Installing certificate' noqa: ERA001 (commented-out code)
+ # because streaming-brief is disabled in non-terminal runs.
+ expected_err = textwrap.dedent(
+ """\
+ Configuring fetch-service integration
+ Installing certificate
+ Configuring pip
+ Configuring snapd
+ Configuring Apt
+ Refreshing Apt package listings
+ """
+ )
+ _, captured_err = capsys.readouterr()
+ assert expected_err in captured_err
diff --git a/tests/integration/services/test_init.py b/tests/integration/services/test_init.py
new file mode 100644
index 00000000..6ed4d72b
--- /dev/null
+++ b/tests/integration/services/test_init.py
@@ -0,0 +1,560 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""Tests for init service."""
+
+import pathlib
+import subprocess
+import sys
+import textwrap
+
+import pytest
+from craft_application import errors
+from craft_application.models.project import Project
+from craft_application.services import InitService
+
+from tests.conftest import RepositoryDefinition
+
+# init operates in the current working directory
+pytestmark = pytest.mark.usefixtures("new_dir")
+
+
+@pytest.fixture
+def init_service(fake_init_service_class, app_metadata, fake_services):
+ _init_service = fake_init_service_class(app_metadata, fake_services)
+ _init_service.setup()
+
+ return _init_service
+
+
+@pytest.fixture
+def fake_empty_template_dir(tmp_path) -> pathlib.Path:
+ empty_template_dir_path = pathlib.Path(tmp_path / "templates")
+ empty_template_dir_path.mkdir(parents=True)
+ return empty_template_dir_path
+
+
+@pytest.fixture
+def project_yaml_filename() -> str:
+ return "testcraft.yaml"
+
+
+def get_testcraft_yaml(*, version: str = "git") -> str:
+ return textwrap.dedent(
+ """
+ # This file configures testcraft.
+
+ # (Required)
+ name: {{ name }}
+
+ # (Required)
+ # The source package version
+ version: <>
+
+ # (Required)
+ # Version of the build base OS
+ base: ubuntu@24.04
+
+ # (Recommended)
+ title: Testcraft Template Package
+
+ # (Required)
+ summary: A very short one-line summary of the package.
+
+ # (Required)
+ description: |
+ A single sentence that says what the source is, concisely and memorably.
+
+ A paragraph of one to three short sentences, that describe what the package does.
+
+ A third paragraph that explains what need the package meets.
+
+ Finally, a paragraph that describes whom the package is useful for.
+
+
+ parts:
+ {{ name }}:
+ plugin: nil
+ source: .
+ platforms:
+ amd64:
+ """.replace(
+ "<>", version
+ )
+ )
+
+
+@pytest.fixture
+def template_dir_with_testcraft_yaml_j2(
+ fake_empty_template_dir: pathlib.Path,
+ project_yaml_filename: str,
+) -> pathlib.Path:
+ """Creates the same testcraft.yaml file in the top-level and nested directories.
+
+ Normally a project would only have one testcraft.yaml file, but two are created for testing.
+ """
+ template_text = get_testcraft_yaml()
+ top_level_template = fake_empty_template_dir / f"{project_yaml_filename}.j2"
+ top_level_template.write_text(template_text)
+ nested_template = fake_empty_template_dir / "nested" / f"{project_yaml_filename}.j2"
+ nested_template.parent.mkdir()
+ nested_template.write_text(template_text)
+
+ return fake_empty_template_dir
+
+
+@pytest.fixture
+def template_dir_with_versioned_testcraft_yaml_j2(
+ fake_empty_template_dir: pathlib.Path,
+ project_yaml_filename: str,
+) -> pathlib.Path:
+ """Creates the testcraft.yaml with {{ version }} marker."""
+ template_text = get_testcraft_yaml(version="{{ version }}")
+ top_level_template = fake_empty_template_dir / f"{project_yaml_filename}.j2"
+ top_level_template.write_text(template_text)
+
+ return fake_empty_template_dir
+
+
+@pytest.fixture
+def template_dir_with_multiple_non_ninja_files(
+ fake_empty_template_dir: pathlib.Path,
+) -> pathlib.Path:
+ file_1 = fake_empty_template_dir / "file1.txt"
+ file_1.write_text("Content of file1.txt")
+ file_2 = fake_empty_template_dir / "nested" / "file2.txt"
+ file_2.parent.mkdir()
+ file_2.write_text("Content of the nested file")
+ return fake_empty_template_dir
+
+
+@pytest.fixture
+def template_dir_with_symlinks(
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+) -> pathlib.Path:
+ symlink_to_python_executable = template_dir_with_testcraft_yaml_j2 / "py3_symlink"
+ symlink_to_python_executable.symlink_to(sys.executable)
+ return template_dir_with_testcraft_yaml_j2
+
+
+@pytest.fixture
+def template_dir_with_executables(
+ fake_empty_template_dir: pathlib.Path,
+) -> pathlib.Path:
+ """Create executable templated and non-templated files."""
+ for filename in [
+ "file.sh",
+ "nested/file.sh",
+ "template.sh.j2",
+ "nested/template.sh.j2",
+ ]:
+ filepath = fake_empty_template_dir / filename
+ filepath.parent.mkdir(exist_ok=True)
+ with filepath.open("wt", encoding="utf8") as file:
+ file.write("#!/bin/bash\necho 'Hello, world!'")
+ filepath.chmod(0o755)
+
+ return fake_empty_template_dir
+
+
+@pytest.fixture
+def fake_empty_project_dir(tmp_path) -> pathlib.Path:
+ empty_project_dir_path = pathlib.Path(tmp_path / "fake-project-dir")
+ empty_project_dir_path.mkdir()
+ return empty_project_dir_path
+
+
+@pytest.fixture
+def non_empty_project_dir(tmp_path) -> pathlib.Path:
+ non_empty_project_dir_path = pathlib.Path(tmp_path / "fake-non-empty-project-dir")
+ non_empty_project_dir_path.mkdir()
+ (non_empty_project_dir_path / "some_project_file").touch()
+ return non_empty_project_dir_path
+
+
+@pytest.mark.usefixtures("fake_empty_template_dir")
+def test_init_works_with_empty_templates_dir(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ fake_empty_template_dir: pathlib.Path,
+ emitter,
+ check,
+):
+ """Initialise a project with an empty templates directory."""
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=fake_empty_template_dir,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+ with check:
+ assert not list(
+ fake_empty_project_dir.iterdir()
+ ), "Project dir should be initialised empty"
+
+
+def test_init_works_with_simple_template(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ emitter,
+ check,
+):
+ """Initialise a project with a simple project template."""
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_paths = [
+ fake_empty_project_dir / project_yaml_filename,
+ fake_empty_project_dir / "nested" / project_yaml_filename,
+ ]
+
+ for project_yaml_path in project_yaml_paths:
+ with check:
+ assert (
+ project_yaml_path.exists()
+ ), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ assert project.name == fake_empty_project_dir.name
+
+
+def test_init_works_with_single_template_and_custom_name(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ emitter,
+ check,
+):
+ """Initialise a project with a single template file and custom name."""
+ name = "some-other-test-project"
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name=name,
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_path = pathlib.Path(fake_empty_project_dir, project_yaml_filename)
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ with check:
+ assert project.name == name
+
+
+def check_file_existence_and_content(
+ check, file_path: pathlib.Path, content: str
+) -> None:
+ """Helper function to ensure a file exists and has the correct content."""
+ with check:
+ assert file_path.exists(), f"{file_path.name} should be created"
+
+ with check:
+ assert file_path.read_text() == content, f"{file_path.name} incorrect content"
+
+
+def test_init_works_with_non_jinja2_templates(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_multiple_non_ninja_files: pathlib.Path,
+ emitter,
+ check,
+):
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_multiple_non_ninja_files,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ check_file_existence_and_content(
+ check, fake_empty_project_dir / "file1.txt", "Content of file1.txt"
+ )
+ check_file_existence_and_content(
+ check,
+ fake_empty_project_dir / "nested" / "file2.txt",
+ "Content of the nested file",
+ )
+
+
+def test_init_does_not_follow_symlinks_but_copies_them_as_is(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_symlinks: pathlib.Path,
+ project_yaml_filename: str,
+ check,
+):
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_symlinks,
+ )
+
+ project = Project.from_yaml_file(fake_empty_project_dir / project_yaml_filename)
+ with check:
+ assert project.name == fake_empty_project_dir.name
+ with check:
+ assert (
+ fake_empty_project_dir / "py3_symlink"
+ ).is_symlink(), "Symlink should be left intact."
+
+
+def test_init_copies_executables(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_executables: pathlib.Path,
+ check,
+):
+ """Executability of template files should be preserved."""
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_executables,
+ )
+
+ for filename in ["file.sh", "nested/file.sh", "template.sh", "nested/template.sh"]:
+ with check:
+ assert (
+ subprocess.check_output(
+ [str(fake_empty_project_dir / filename)], text=True
+ )
+ == "Hello, world!\n"
+ )
+
+
+def test_init_does_not_fail_on_non_empty_dir(
+ init_service: InitService,
+ non_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ emitter,
+ check,
+):
+ init_service.initialise_project(
+ project_dir=non_empty_project_dir,
+ project_name="fake-non-empty-project-dir",
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_path = non_empty_project_dir / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ with check:
+ assert project.name == non_empty_project_dir.name
+
+
+def test_init_does_not_override_existing_craft_yaml(
+ init_service: InitService,
+ non_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2,
+ project_yaml_filename: str,
+ fake_project: Project,
+ emitter,
+ check,
+):
+ fake_project.to_yaml_file(non_empty_project_dir / project_yaml_filename)
+
+ init_service.initialise_project(
+ project_dir=non_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_path = non_empty_project_dir / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ with check:
+ assert project.name == fake_project.name
+
+
+def test_init_with_different_name_and_directory(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ emitter,
+ check,
+):
+ name = "some-custom-project"
+
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name=name,
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_path = fake_empty_project_dir / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ with check:
+ assert project.name == name
+
+
+def test_init_with_default_arguments_uses_current_directory(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ emitter,
+ check,
+):
+ expected_project_name = fake_empty_project_dir.name
+
+ init_service.initialise_project(
+ project_dir=fake_empty_project_dir,
+ project_name="fake-project-dir",
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+ with check:
+ assert emitter.assert_progress("Rendered project.")
+
+ project_yaml_path = fake_empty_project_dir / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ with check:
+ assert project.name == expected_project_name
+
+
+def test_check_for_existing_files(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+):
+ """No-op if there are no overlapping files."""
+ init_service.check_for_existing_files(
+ project_dir=fake_empty_project_dir,
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+
+def test_check_for_existing_files_error(
+ init_service: InitService,
+ fake_empty_project_dir: pathlib.Path,
+ template_dir_with_testcraft_yaml_j2: pathlib.Path,
+):
+ """No-op if there are no overlapping files."""
+ expected_error = textwrap.dedent(
+ f"""\
+ Cannot initialise project in {str(fake_empty_project_dir)!r} because it would overwrite existing files.
+ Existing files are:
+ - nested/testcraft.yaml
+ - testcraft.yaml"""
+ )
+ (fake_empty_project_dir / "testcraft.yaml").touch()
+ (fake_empty_project_dir / "nested").mkdir()
+ (fake_empty_project_dir / "nested" / "testcraft.yaml").touch()
+
+ with pytest.raises(errors.InitError, match=expected_error):
+ init_service.check_for_existing_files(
+ project_dir=fake_empty_project_dir,
+ template_dir=template_dir_with_testcraft_yaml_j2,
+ )
+
+
+def test_init_service_with_version_without_git_repository(
+ init_service: InitService,
+ empty_working_directory: pathlib.Path,
+ template_dir_with_versioned_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ check,
+) -> None:
+ project_path = empty_working_directory
+ init_service.initialise_project(
+ project_dir=project_path,
+ project_name=project_path.name,
+ template_dir=template_dir_with_versioned_testcraft_yaml_j2,
+ )
+ project_yaml_path = project_path / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ assert project.version == init_service.default_version
+
+
+def test_init_service_with_version_based_on_commit(
+ init_service: InitService,
+ repository_with_commit: RepositoryDefinition,
+ template_dir_with_versioned_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ check,
+) -> None:
+ project_path = repository_with_commit.repository_path
+ init_service.initialise_project(
+ project_dir=project_path,
+ project_name=project_path.name,
+ template_dir=template_dir_with_versioned_testcraft_yaml_j2,
+ )
+ project_yaml_path = project_path / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ assert project.version == repository_with_commit.short_commit
+
+
+def test_init_service_with_version_based_on_tag(
+ init_service: InitService,
+ repository_with_annotated_tag: RepositoryDefinition,
+ template_dir_with_versioned_testcraft_yaml_j2: pathlib.Path,
+ project_yaml_filename: str,
+ check,
+) -> None:
+ project_path = repository_with_annotated_tag.repository_path
+ init_service.initialise_project(
+ project_dir=project_path,
+ project_name=project_path.name,
+ template_dir=template_dir_with_versioned_testcraft_yaml_j2,
+ )
+ project_yaml_path = project_path / project_yaml_filename
+
+ with check:
+ assert project_yaml_path.exists(), "Project should be initialised with template"
+ project = Project.from_yaml_file(project_yaml_path)
+ assert project.version == repository_with_annotated_tag.tag
diff --git a/tests/integration/services/test_provider.py b/tests/integration/services/test_provider.py
index 68f5c85f..65d663e0 100644
--- a/tests/integration/services/test_provider.py
+++ b/tests/integration/services/test_provider.py
@@ -36,6 +36,7 @@
),
pytest.param(("ubuntu", "24.04"), id="ubuntu_lts"),
pytest.param(("ubuntu", "22.04"), id="ubuntu_old_lts"),
+ pytest.param(("almalinux", "9"), id="almalinux_9"),
],
)
@pytest.mark.parametrize(
diff --git a/tests/integration/test_application.py b/tests/integration/test_application.py
index c3c2d01e..f5eedfb9 100644
--- a/tests/integration/test_application.py
+++ b/tests/integration/test_application.py
@@ -72,14 +72,16 @@ def app(create_app):
-V, --version: Show the application version and exit
Starter commands:
+ init: Create an initial project filetree
version: Show the application version and exit
Commands can be classified as follows:
- Lifecycle: build, clean, pack, prime, pull, stage
- Other: version
+ Lifecycle: clean, pull, build, stage, prime, pack
+ Other: init, version
For more information about a command, run 'testcraft help '.
For a summary of all commands, run 'testcraft help --all'.
+For more information about testcraft, check out: www.testcraft.example/docs/3.14159
"""
INVALID_COMMAND = """\
@@ -98,14 +100,16 @@ def app(create_app):
@pytest.mark.parametrize(
("argv", "stdout", "stderr", "exit_code"),
[
- (["help"], "", BASIC_USAGE, 0),
- (["--help"], "", BASIC_USAGE, 0),
- (["-h"], "", BASIC_USAGE, 0),
- (["--version"], VERSION_INFO, "", 0),
- (["-V"], VERSION_INFO, "", 0),
- (["-q", "--version"], "", "", 0),
- (["--invalid-parameter"], "", BASIC_USAGE, 64),
- (["non-command"], "", INVALID_COMMAND, 64),
+ pytest.param(["help"], "", BASIC_USAGE, 0, id="help"),
+ pytest.param(["--help"], "", BASIC_USAGE, 0, id="--help"),
+ pytest.param(["-h"], "", BASIC_USAGE, 0, id="-h"),
+ pytest.param(["--version"], VERSION_INFO, "", 0, id="--version"),
+ pytest.param(["-V"], VERSION_INFO, "", 0, id="-V"),
+ pytest.param(["-q", "--version"], "", "", 0, id="-q--version"),
+ pytest.param(
+ ["--invalid-parameter"], "", BASIC_USAGE, 64, id="--invalid-parameter"
+ ),
+ pytest.param(["non-command"], "", INVALID_COMMAND, 64, id="non-command"),
],
)
def test_special_inputs(capsys, monkeypatch, app, argv, stdout, stderr, exit_code):
@@ -135,7 +139,7 @@ def test_project_managed(capsys, monkeypatch, tmp_path, project, create_app):
app = create_app()
app._work_dir = tmp_path
- app.run()
+ assert app.run() == 0
assert (tmp_path / "package_1.0.tar.zst").exists()
captured = capsys.readouterr()
@@ -197,6 +201,7 @@ def test_version(capsys, monkeypatch, app):
assert captured.out == "testcraft 3.14159\n"
+@pytest.mark.usefixtures("emitter")
def test_non_lifecycle_command_does_not_require_project(monkeypatch, app):
"""Run a command without having a project instance shall not fail."""
monkeypatch.setattr("sys.argv", ["testcraft", "nothing"])
@@ -244,6 +249,10 @@ def test_get_command_help(monkeypatch, emitter, capsys, app, cmd, help_param):
stdout, stderr = capsys.readouterr()
assert f"testcraft {cmd} [options]" in stderr
+ assert stderr.endswith(
+ "For more information, check out: "
+ f"www.testcraft.example/docs/3.14159/reference/commands/{cmd}\n\n"
+ )
def test_invalid_command_argument(monkeypatch, capsys, app):
@@ -301,6 +310,10 @@ def test_global_environment(
],
)
+ # Check that this odd value makes its way through to the yaml build script
+ build_count = "5"
+ mocker.patch.dict("os.environ", {"TESTCRAFT_PARALLEL_BUILD_COUNT": build_count})
+
# Run in destructive mode
monkeypatch.setattr(
"sys.argv", ["testcraft", "prime", "--destructive-mode", *arguments]
@@ -325,6 +338,7 @@ def test_global_environment(
assert variables["arch_triplet_build_on"].startswith(
util.convert_architecture_deb_to_platform(util.get_host_architecture())
)
+ assert variables["parallel_build_count"] == build_count
@pytest.fixture
@@ -428,7 +442,7 @@ def test_lifecycle_error_logging(monkeypatch, tmp_path, create_app):
assert parts_message in log_contents
-@pytest.mark.usefixtures("pretend_jammy")
+@pytest.mark.usefixtures("pretend_jammy", "emitter")
def test_runtime_error_logging(monkeypatch, tmp_path, create_app, mocker):
monkeypatch.chdir(tmp_path)
shutil.copytree(INVALID_PROJECTS_DIR / "build-error", tmp_path, dirs_exist_ok=True)
@@ -453,3 +467,24 @@ def test_runtime_error_logging(monkeypatch, tmp_path, create_app, mocker):
# Make sure it's identified as the correct error type
parts_message = "Parts processing internal error: An unexpected error"
assert parts_message in log_contents
+
+
+def test_verbosity_greeting(monkeypatch, create_app, capsys):
+ """Test that 'verbose' runs only show the greeting once."""
+
+ # Set the verbosity *both* through the environment variable and the
+ # command line, to ensure that the greeting is only shown once even with
+ # multiple verbosity "settings".
+ monkeypatch.setenv("CRAFT_VERBOSITY_LEVEL", "verbose")
+ monkeypatch.setattr("sys.argv", ["testcraft", "i-dont-exist", "-v"])
+
+ app = create_app()
+ with pytest.raises(SystemExit):
+ app.run()
+
+ _, err = capsys.readouterr()
+ lines = err.splitlines()
+ greetings = [line for line in lines if line.startswith("Starting testcraft")]
+
+ # Exactly one greeting
+ assert len(greetings) == 1
diff --git a/tests/unit/commands/test_base.py b/tests/unit/commands/test_base.py
index 8b94bae0..5a0aea65 100644
--- a/tests/unit/commands/test_base.py
+++ b/tests/unit/commands/test_base.py
@@ -65,11 +65,11 @@ def test_get_managed_cmd(fake_command, verbosity, app_metadata):
def test_without_config(emitter):
"""Test that a command can be initialised without a config.
-
- This is necessary for providing per-command help.
+ This is pending deprecation but still supported.
"""
- command = base.AppCommand(None)
+ with pytest.deprecated_call():
+ command = base.AppCommand(None)
emitter.assert_trace("Not completing command configuration")
assert not hasattr(command, "_app")
diff --git a/tests/unit/commands/test_init.py b/tests/unit/commands/test_init.py
new file mode 100644
index 00000000..57299e5e
--- /dev/null
+++ b/tests/unit/commands/test_init.py
@@ -0,0 +1,150 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+
+"""Tests for init command."""
+
+import argparse
+import pathlib
+
+import pytest
+from craft_application.commands import InitCommand
+from craft_application.errors import InitError
+
+# init operates in the current working directory
+pytestmark = pytest.mark.usefixtures("new_dir")
+
+
+@pytest.fixture
+def init_command(app_metadata, mock_services, mocker, tmp_path):
+ mocker.patch.object(
+ InitCommand,
+ "parent_template_dir",
+ pathlib.Path(tmp_path) / "templates",
+ )
+ return InitCommand({"app": app_metadata, "services": mock_services})
+
+
+@pytest.fixture
+def fake_template_dirs(tmp_path):
+ """Set up a fake template directories with two templates.
+
+ These templates are very simple because tests focused on the templates themselves
+ are in the InitService tests.
+ """
+ parent_template_dir = tmp_path / "templates"
+
+ (parent_template_dir / "simple").mkdir(parents=True)
+ (parent_template_dir / "other-template").mkdir()
+
+ return parent_template_dir
+
+
+@pytest.mark.parametrize("name", [None, "my-project"])
+def test_init_in_cwd(init_command, name, new_dir, mock_services, emitter):
+ """Test the init command in the current working directory."""
+ expected_name = name or new_dir.name
+ parsed_args = argparse.Namespace(
+ project_dir=None,
+ name=name,
+ profile="test-profile",
+ )
+ mock_services.init.validate_project_name.return_value = expected_name
+
+ init_command.run(parsed_args)
+
+ mock_services.init.initialise_project.assert_called_once_with(
+ project_dir=new_dir,
+ project_name=expected_name,
+ template_dir=init_command.parent_template_dir / "test-profile",
+ )
+ emitter.assert_message("Successfully initialised project.")
+
+
+@pytest.mark.parametrize("name", [None, "my-project"])
+def test_init_run_project_dir(init_command, name, mock_services, emitter):
+ """Test the init command in a project directory."""
+ expected_name = name or "test-project-dir"
+ project_dir = pathlib.Path("test-project-dir")
+ parsed_args = argparse.Namespace(
+ project_dir=project_dir,
+ name=name,
+ profile="test-profile",
+ )
+ mock_services.init.validate_project_name.return_value = expected_name
+
+ init_command.run(parsed_args)
+
+ mock_services.init.initialise_project.assert_called_once_with(
+ project_dir=project_dir.expanduser().resolve(),
+ project_name=expected_name,
+ template_dir=init_command.parent_template_dir / "test-profile",
+ )
+ emitter.assert_message("Successfully initialised project.")
+
+
+@pytest.mark.usefixtures("fake_template_dirs")
+def test_profiles(init_command):
+ """Test profile generation."""
+ assert init_command.default_profile == "simple"
+ assert init_command.profiles == ["other-template", "simple"]
+
+
+def test_existing_files(init_command, tmp_path, mock_services):
+ """Error if the check for existing files fails."""
+ mock_services.init.check_for_existing_files.side_effect = InitError("test-error")
+ parsed_args = argparse.Namespace(
+ project_dir=tmp_path,
+ name="test-project-name",
+ profile="test-profile",
+ )
+
+ with pytest.raises(InitError, match="test-error"):
+ init_command.run(parsed_args)
+
+ mock_services.init.initialise_project.assert_not_called()
+
+
+def test_invalid_name(init_command, mock_services):
+ mock_services.init.validate_project_name.side_effect = InitError("test-error")
+ parsed_args = argparse.Namespace(
+ name="invalid--name",
+ )
+ with pytest.raises(InitError, match="test-error"):
+ init_command.run(parsed_args)
+
+
+def test_invalid_name_directory(init_command, mock_services):
+ def _validate_project_name(_name: str, *, use_default: bool = False):
+ if use_default:
+ return "my-project"
+ raise InitError("test-error")
+
+ mock_services.init.validate_project_name = _validate_project_name
+
+ project_dir = pathlib.Path("invalid--name")
+ parsed_args = argparse.Namespace(
+ project_dir=project_dir,
+ name=None,
+ profile="simple",
+ )
+
+ init_command.run(parsed_args)
+
+ mock_services.init.initialise_project.assert_called_once_with(
+ project_dir=project_dir.expanduser().resolve(),
+ project_name="my-project",
+ template_dir=init_command.parent_template_dir / "simple",
+ )
diff --git a/tests/unit/commands/test_lifecycle.py b/tests/unit/commands/test_lifecycle.py
index a69c0adf..ae4a0858 100644
--- a/tests/unit/commands/test_lifecycle.py
+++ b/tests/unit/commands/test_lifecycle.py
@@ -14,6 +14,7 @@
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see .
"""Tests for lifecycle commands."""
+
import argparse
import pathlib
import subprocess
@@ -73,15 +74,16 @@
]
PRO_SERVICE_CONFIGS = [
- # is_attached, enabled_services, pro_services_args, expected_exception
- (False, [], [], None),
- (True, ["esm-apps"], ["esm-apps"], None),
- (True, ["esm-apps", "fips-updates"],["esm-apps", "fips-updates"], None),
- (True, ["esm-apps"], [], UbuntuProAttachedError),
- (False, [], ["esm-apps"], UbuntuProDetachedError),
- (True, ["esm-apps", "fips-updates"],["fips-updates"], InvalidUbuntuProStatusError),
- (True, ["esm-apps",], ["fips-updates", "fips-updates"], InvalidUbuntuProStatusError),
- (True, ["esm-apps"], ["esm-apps", "invalid-service"], InvalidUbuntuProServiceError),
+ # is_attached, is_managed_mode, enabled_services, pro_services_args, expected_exception
+ (False, True, [], [], None),
+ (True, True, ["esm-apps"], ["esm-apps"], None),
+ (True, True, ["esm-apps", "fips-updates"],["esm-apps", "fips-updates"], None),
+ (True, True, ["esm-apps"], [], None),
+ (True, False, ["esm-apps"], [], UbuntuProAttachedError),
+ (False, True, [], ["esm-apps"], UbuntuProDetachedError),
+ (True, True, ["esm-apps", "fips-updates"],["fips-updates"], None),
+ (True, True, ["esm-apps",], ["fips-updates", "fips-updates"], InvalidUbuntuProStatusError),
+ (True, True, ["esm-apps"], ["esm-apps", "invalid-service"], InvalidUbuntuProServiceError),
]
STEP_NAMES = [step.name.lower() for step in craft_parts.Step]
@@ -115,17 +117,23 @@ def run_managed(self, parsed_args: argparse.Namespace) -> bool: # noqa: ARG002
@pytest.mark.parametrize(
- ("is_attached", "enabled_services", "pro_services_args", "expected_exception"),
+ (
+ "is_attached",
+ "is_managed_mode",
+ "enabled_services",
+ "pro_services_args",
+ "expected_exception",
+ ),
PRO_SERVICE_CONFIGS,
)
def test_validate_pro_services(
mock_pro_api_call,
is_attached,
+ is_managed_mode,
enabled_services,
pro_services_args,
expected_exception,
):
-
# configure api state
set_is_attached, set_enabled_service = mock_pro_api_call
set_is_attached(is_attached)
@@ -138,6 +146,7 @@ def test_validate_pro_services(
with exception_context:
# create and validate pro services
pro_services = ProServices(pro_services_args)
+ pro_services.managed_mode = is_managed_mode
pro_services.validate()
@@ -491,6 +500,7 @@ def test_clean_run_managed(
@pytest.mark.parametrize(("pro_service_dict", "pro_service_args"), PRO_SERVICE_COMMANDS)
@pytest.mark.parametrize(("build_env_dict", "build_env_args"), BUILD_ENV_COMMANDS)
+@pytest.mark.parametrize(("shell_dict", "shell_args"), SHELL_PARAMS)
@pytest.mark.parametrize(("debug_dict", "debug_args"), DEBUG_PARAMS)
@pytest.mark.parametrize("output_arg", [".", "/"])
def test_pack_fill_parser(
@@ -500,6 +510,8 @@ def test_pack_fill_parser(
pro_service_args,
build_env_dict,
build_env_args,
+ shell_dict,
+ shell_args,
debug_dict,
debug_args,
output_arg,
@@ -509,6 +521,8 @@ def test_pack_fill_parser(
"platform": None,
"build_for": None,
"output": pathlib.Path(output_arg),
+ "fetch_service_policy": None,
+ **shell_dict,
**debug_dict,
**build_env_dict,
**pro_service_dict,
@@ -519,7 +533,13 @@ def test_pack_fill_parser(
args_dict = vars(
parser.parse_args(
- [*pro_service_args, *build_env_args, *debug_args, f"--output={output_arg}"]
+ [
+ *pro_service_args,
+ *build_env_args,
+ *shell_args,
+ *debug_args,
+ f"--output={output_arg}",
+ ]
)
)
assert args_dict == expected
@@ -541,7 +561,9 @@ def test_pack_run(
emitter, mock_services, app_metadata, parts, tmp_path, packages, message
):
mock_services.package.pack.return_value = packages
- parsed_args = argparse.Namespace(parts=parts, output=tmp_path)
+ parsed_args = argparse.Namespace(
+ parts=parts, output=tmp_path, fetch_service_policy=None
+ )
command = PackCommand(
{
"app": app_metadata,
@@ -559,6 +581,34 @@ def test_pack_run(
emitter.assert_progress(message, permanent=True)
+@pytest.mark.parametrize(
+ ("fetch_service_policy", "expect_create_called"),
+ [("strict", True), ("permissive", True), (None, False)],
+)
+def test_pack_fetch_manifest(
+ mock_services, app_metadata, tmp_path, fetch_service_policy, expect_create_called
+):
+ packages = [pathlib.Path("package.zip")]
+ mock_services.package.pack.return_value = packages
+ parsed_args = argparse.Namespace(
+ output=tmp_path, fetch_service_policy=fetch_service_policy
+ )
+ command = PackCommand(
+ {
+ "app": app_metadata,
+ "services": mock_services,
+ }
+ )
+
+ command.run(parsed_args)
+
+ mock_services.package.pack.assert_called_once_with(
+ mock_services.lifecycle.prime_dir,
+ tmp_path,
+ )
+ assert mock_services.fetch.create_project_manifest.called == expect_create_called
+
+
def test_pack_run_wrong_step(app_metadata, fake_services):
parsed_args = argparse.Namespace(parts=None, output=pathlib.Path())
command = PackCommand(
@@ -613,6 +663,34 @@ def test_shell(
mock_subprocess_run.assert_called_once_with(["bash"], check=False)
+def test_shell_pack(
+ app_metadata,
+ fake_services,
+ mocker,
+ mock_subprocess_run,
+):
+ parsed_args = argparse.Namespace(shell=True)
+ mock_lifecycle_run = mocker.patch.object(fake_services.lifecycle, "run")
+ mock_pack = mocker.patch.object(fake_services.package, "pack")
+ mocker.patch.object(
+ fake_services.lifecycle.project_info, "execution_finished", return_value=True
+ )
+ command = PackCommand(
+ {
+ "app": app_metadata,
+ "services": fake_services,
+ }
+ )
+ command.run(parsed_args)
+
+ # Must run the lifecycle
+ mock_lifecycle_run.assert_called_once_with(step_name="prime")
+
+ # Must call the shell instead of packing
+ mock_subprocess_run.assert_called_once_with(["bash"], check=False)
+ assert not mock_pack.called
+
+
@pytest.mark.parametrize("command_cls", MANAGED_LIFECYCLE_COMMANDS)
def test_shell_after(
app_metadata, fake_services, mocker, mock_subprocess_run, command_cls
@@ -636,6 +714,35 @@ def test_shell_after(
mock_subprocess_run.assert_called_once_with(["bash"], check=False)
+def test_shell_after_pack(
+ app_metadata,
+ fake_services,
+ mocker,
+ mock_subprocess_run,
+):
+ parsed_args = argparse.Namespace(
+ shell_after=True, output=pathlib.Path(), fetch_service_policy=None
+ )
+ mock_lifecycle_run = mocker.patch.object(fake_services.lifecycle, "run")
+ mock_pack = mocker.patch.object(fake_services.package, "pack")
+ mocker.patch.object(
+ fake_services.lifecycle.project_info, "execution_finished", return_value=True
+ )
+ command = PackCommand(
+ {
+ "app": app_metadata,
+ "services": fake_services,
+ }
+ )
+ command.run(parsed_args)
+
+ # Must run the lifecycle
+ mock_lifecycle_run.assert_called_once_with(step_name="prime")
+ # Must pack, and then shell
+ mock_pack.assert_called_once_with(fake_services.lifecycle.prime_dir, pathlib.Path())
+ mock_subprocess_run.assert_called_once_with(["bash"], check=False)
+
+
@pytest.mark.parametrize("command_cls", [*MANAGED_LIFECYCLE_COMMANDS, PackCommand])
def test_debug(app_metadata, fake_services, mocker, mock_subprocess_run, command_cls):
parsed_args = argparse.Namespace(parts=None, debug=True)
@@ -656,3 +763,33 @@ def test_debug(app_metadata, fake_services, mocker, mock_subprocess_run, command
command.run(parsed_args)
mock_subprocess_run.assert_called_once_with(["bash"], check=False)
+
+
+def test_debug_pack(
+ app_metadata,
+ fake_services,
+ mocker,
+ mock_subprocess_run,
+):
+ """Same as test_debug(), but checking when the error happens when packing."""
+ parsed_args = argparse.Namespace(debug=True, output=pathlib.Path())
+ error_message = "Packing failed!"
+
+ # Lifecycle.run() should work
+ mocker.patch.object(fake_services.lifecycle, "run")
+ # Package.pack() should fail
+ mocker.patch.object(
+ fake_services.package, "pack", side_effect=RuntimeError(error_message)
+ )
+ mocker.patch.object(fake_services.package, "update_project")
+ command = PackCommand(
+ {
+ "app": app_metadata,
+ "services": fake_services,
+ }
+ )
+
+ with pytest.raises(RuntimeError, match=error_message):
+ command.run(parsed_args)
+
+ mock_subprocess_run.assert_called_once_with(["bash"], check=False)
diff --git a/tests/unit/commands/test_other.py b/tests/unit/commands/test_other.py
index fcfef2b5..95a40b5c 100644
--- a/tests/unit/commands/test_other.py
+++ b/tests/unit/commands/test_other.py
@@ -13,15 +13,14 @@
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see .
-"""Tests for lifecycle commands."""
+"""Tests for other commands."""
import argparse
import pytest
+from craft_application.commands import InitCommand
from craft_application.commands.other import VersionCommand, get_other_command_group
-OTHER_COMMANDS = {
- VersionCommand,
-}
+OTHER_COMMANDS = {InitCommand, VersionCommand}
@pytest.mark.parametrize("commands", [OTHER_COMMANDS])
diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py
index 9ad49687..7b9b5c98 100644
--- a/tests/unit/conftest.py
+++ b/tests/unit/conftest.py
@@ -50,4 +50,6 @@ def mock_services(app_metadata, fake_project, fake_package_service_class):
factory.package = mock.Mock(spec=services.PackageService)
factory.provider = mock.Mock(spec=services.ProviderService)
factory.remote_build = mock.Mock(spec_set=services.RemoteBuildService)
+ factory.fetch = mock.Mock(spec=services.FetchService)
+ factory.init = mock.Mock(spec=services.InitService)
return factory
diff --git a/tests/unit/git/test_git.py b/tests/unit/git/test_git.py
index f2c0c5a4..cec290c7 100644
--- a/tests/unit/git/test_git.py
+++ b/tests/unit/git/test_git.py
@@ -15,11 +15,8 @@
"""Tests for the pygit2 wrapper class."""
-import os
-import pathlib
import re
import subprocess
-from collections.abc import Iterator
from pathlib import Path
from typing import cast
from unittest.mock import ANY
@@ -27,29 +24,21 @@
import pygit2
import pygit2.enums
import pytest
-from craft_application.git import GitError, GitRepo, GitType, get_git_repo_type, is_repo
+from craft_application.git import (
+ GitError,
+ GitRepo,
+ GitType,
+ get_git_repo_type,
+ is_repo,
+ parse_describe,
+ short_commit_sha,
+)
from craft_application.remote import (
RemoteBuildInvalidGitRepoError,
check_git_repo_for_remote_build,
)
-
-@pytest.fixture
-def empty_working_directory(tmp_path) -> Iterator[Path]:
- cwd = pathlib.Path.cwd()
-
- repo_dir = Path(tmp_path, "test-repo")
- repo_dir.mkdir()
- os.chdir(repo_dir)
- yield repo_dir
-
- os.chdir(cwd)
-
-
-@pytest.fixture
-def empty_repository(empty_working_directory) -> Path:
- subprocess.run(["git", "init"], check=True)
- return cast(Path, empty_working_directory)
+from tests.conftest import RepositoryDefinition
def test_is_repo(empty_working_directory):
@@ -137,6 +126,23 @@ def test_is_repo_error(empty_working_directory, mocker):
)
+@pytest.mark.parametrize(
+ ("describe", "expected"),
+ [
+ ("cdaea14", "cdaea14"),
+ ("4.1.1-0-gad012482d", "4.1.1"),
+ ("4.1.1-16-g2d8943dbc", "4.1.1.post16+git2d8943dbc"),
+ ("curl-8_11_0-6-g0cdde0f", "curl-8_11_0.post6+git0cdde0f"),
+ ("curl-8_11_0-0-gb1ef0e1", "curl-8_11_0"),
+ ("0ae7c04", "0ae7c04"),
+ ("unknown-format", "unknown-format"),
+ ],
+)
+def test_parsing_describe(describe: str, expected: str) -> None:
+ """Check if describe result is correctly parsed."""
+ assert parse_describe(describe) == expected
+
+
def test_init_repo(empty_working_directory):
"""Initialize a GitRepo object."""
repo = GitRepo(empty_working_directory)
@@ -549,7 +555,7 @@ def test_push_url_hide_token(url, expected_url, mocker, empty_working_directory)
repo.push_url(
remote_url=url,
remote_branch="test-branch",
- token="test-token", # noqa: S106
+ token="test-token",
)
# token should be hidden in the log output
@@ -894,6 +900,88 @@ def test_check_git_repo_for_remote_build_shallow(empty_working_directory):
with pytest.raises(
RemoteBuildInvalidGitRepoError,
- match="Remote build for shallow cloned git repos are no longer supported",
+ match="Remote builds for shallow cloned git repos are not supported",
):
check_git_repo_for_remote_build(git_shallow_path)
+
+
+@pytest.mark.parametrize(
+ "always_use_long_format",
+ [True, False, None],
+ ids=lambda p: f"use_long_format={p!s}",
+)
+def test_describing_commit(
+ repository_with_commit: RepositoryDefinition, always_use_long_format: bool | None
+):
+ """Describe an empty repository."""
+ repo = GitRepo(repository_with_commit.repository_path)
+
+ assert (
+ repo.describe(
+ show_commit_oid_as_fallback=True,
+ always_use_long_format=always_use_long_format,
+ )
+ == repository_with_commit.short_commit
+ )
+
+
+def test_describing_repo_fails_in_empty_repo(empty_repository: Path):
+ """Cannot describe an empty repository."""
+ repo = GitRepo(empty_repository)
+
+ with pytest.raises(GitError):
+ repo.describe(show_commit_oid_as_fallback=True)
+
+
+def test_describing_tags(repository_with_annotated_tag: RepositoryDefinition):
+ """Describe should be able to handle annotated tags."""
+ repo = GitRepo(repository_with_annotated_tag.repository_path)
+ assert repo.describe() == repository_with_annotated_tag.tag
+
+
+@pytest.fixture(params=[True, False, None], ids=lambda p: f"fallback={p!r}")
+def show_commit_oid_as_fallback(request: pytest.FixtureRequest) -> bool | None:
+ return cast(bool | None, request.param)
+
+
+@pytest.fixture(params=[True, False, None], ids=lambda p: f"long={p!r}")
+def always_use_long_format(request: pytest.FixtureRequest) -> bool | None:
+ return cast(bool | None, request.param)
+
+
+def test_describing_commits_following_tags(
+ repository_with_annotated_tag: RepositoryDefinition,
+ show_commit_oid_as_fallback: bool | None,
+ always_use_long_format: bool | None,
+):
+ """Describe should be able to discover commits after tags."""
+ repo = GitRepo(repository_with_annotated_tag.repository_path)
+ (repository_with_annotated_tag.repository_path / "another_file").touch()
+ tag = repository_with_annotated_tag.tag
+ repo.add_all()
+ new_commit = repo.commit("commit after tag")
+ short_new_commit = short_commit_sha(new_commit)
+ describe_result = repo.describe(
+ show_commit_oid_as_fallback=show_commit_oid_as_fallback,
+ always_use_long_format=always_use_long_format,
+ )
+ assert describe_result == f"{tag}-1-g{short_new_commit}"
+ assert parse_describe(describe_result) == f"{tag}.post1+git{short_new_commit}"
+
+
+def test_describing_unanotated_tags(
+ repository_with_unannotated_tag: RepositoryDefinition,
+):
+ """Describe should error out if trying to describe repo without annotated tags."""
+ repo = GitRepo(repository_with_unannotated_tag.repository_path)
+ with pytest.raises(GitError):
+ repo.describe()
+
+
+def test_describing_fallback_to_commit_for_unannotated_tags(
+ repository_with_unannotated_tag: RepositoryDefinition,
+):
+ """Describe should fallback to commit if trying to describe repo without annotated tags."""
+ repo = GitRepo(repository_with_unannotated_tag.repository_path)
+ describe_result = repo.describe(show_commit_oid_as_fallback=True)
+ assert describe_result == repository_with_unannotated_tag.short_commit
diff --git a/tests/unit/models/test_base.py b/tests/unit/models/test_base.py
index 2d9390a8..da7067c6 100644
--- a/tests/unit/models/test_base.py
+++ b/tests/unit/models/test_base.py
@@ -19,6 +19,7 @@
import pydantic
import pytest
from craft_application import errors, models
+from hypothesis import given, strategies
from overrides import override
@@ -58,3 +59,22 @@ def test_model_reference_slug_errors():
)
assert str(err.value) == expected
assert err.value.doc_slug == "/mymodel.html"
+
+
+class CoerceModel(models.CraftBaseModel):
+
+ stringy: str
+
+
+@given(
+ strategies.one_of(
+ strategies.integers(),
+ strategies.floats(),
+ strategies.decimals(),
+ strategies.text(),
+ )
+)
+def test_model_coerces_to_strings(value):
+ result = CoerceModel.model_validate({"stringy": value})
+
+ assert result.stringy == str(value)
diff --git a/tests/unit/models/test_manifest.py b/tests/unit/models/test_manifest.py
new file mode 100644
index 00000000..eed4c2b5
--- /dev/null
+++ b/tests/unit/models/test_manifest.py
@@ -0,0 +1,99 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+import json
+from datetime import datetime
+
+import pytest
+from craft_application import util
+from craft_application.models import BuildInfo
+from craft_application.models.manifest import (
+ CraftManifest,
+ ProjectManifest,
+ SessionArtifactManifest,
+)
+from craft_providers import bases
+from freezegun import freeze_time
+
+
+@pytest.fixture
+@freeze_time(datetime.fromisoformat("2024-09-16T01:02:03.456789"))
+def project_manifest(tmp_path, fake_project):
+ project = fake_project
+ build_info = BuildInfo(
+ platform="amd64",
+ build_on="amd64",
+ build_for="amd64",
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ return ProjectManifest.from_packed_artifact(project, build_info, artifact)
+
+
+@pytest.fixture
+def session_report(manifest_data_dir):
+ report_path = manifest_data_dir / "session-report.json"
+ return json.loads(report_path.read_text())
+
+
+def test_from_packed_artifact(project_manifest, manifest_data_dir):
+ expected = (manifest_data_dir / "project-expected.yaml").read_text()
+ obtained = project_manifest.to_yaml_string()
+
+ assert obtained == expected
+
+
+def test_from_session_report(session_report, manifest_data_dir):
+ deps = SessionArtifactManifest.from_session_report(session_report)
+ obtained = util.dump_yaml([d.marshal() for d in deps])
+
+ expected = (manifest_data_dir / "session-manifest-expected.yaml").read_text()
+ assert obtained == expected
+
+
+def test_create_craft_manifest(
+ tmp_path, project_manifest, session_report, manifest_data_dir
+):
+ project_manifest_path = tmp_path / "project-manifest.yaml"
+ project_manifest.to_yaml_file(project_manifest_path)
+
+ craft_manifest = CraftManifest.create_craft_manifest(
+ project_manifest_path, session_report
+ )
+
+ obtained = json.dumps(craft_manifest.marshal(), indent=2) + "\n"
+ expected = (manifest_data_dir / "craft-manifest-expected.json").read_text()
+
+ assert obtained == expected
+
+
+def test_session_report_rejections(session_report):
+ deps = SessionArtifactManifest.from_session_report(session_report)
+ rejected = [d for d in deps if d.rejected]
+
+ assert len(rejected) == 2 # noqa: PLR2004 (magic value in comparison)
+
+ assert rejected[0].rejection_reasons == [
+ "fetch is allowed only on a single ref",
+ "fetch is only allowed with depth 1",
+ "git repository does not contain a go.mod file",
+ ]
+ assert rejected[1].rejection_reasons == [
+ "the artefact format is unknown",
+ "the request was not recognized by any format inspector",
+ ]
diff --git a/tests/unit/models/test_project.py b/tests/unit/models/test_project.py
index ab0f21b6..c49e8d5d 100644
--- a/tests/unit/models/test_project.py
+++ b/tests/unit/models/test_project.py
@@ -20,6 +20,7 @@
import textwrap
from textwrap import dedent
+import craft_platforms
import craft_providers.bases
import pydantic
import pytest
@@ -30,9 +31,11 @@
DEVEL_BASE_WARNING,
BuildInfo,
BuildPlanner,
+ Platform,
Project,
constraints,
)
+from craft_application.util import platforms
PROJECTS_DIR = pathlib.Path(__file__).parent / "project_models"
PARTS_DICT = {"my-part": {"plugin": "nil"}}
@@ -121,6 +124,115 @@ def full_project_dict():
return copy.deepcopy(FULL_PROJECT_DICT)
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ (
+ craft_platforms.BuildInfo(
+ "my-platform",
+ craft_platforms.DebianArchitecture.RISCV64,
+ "all",
+ craft_platforms.DistroBase("ubuntu", "24.04"),
+ ),
+ BuildInfo(
+ "my-platform",
+ "riscv64",
+ "all",
+ craft_providers.bases.BaseName("ubuntu", "24.04"),
+ ),
+ ),
+ (
+ craft_platforms.BuildInfo(
+ "my-platform",
+ craft_platforms.DebianArchitecture.RISCV64,
+ craft_platforms.DebianArchitecture.AMD64,
+ craft_platforms.DistroBase("almalinux", "9"),
+ ),
+ BuildInfo(
+ "my-platform",
+ "riscv64",
+ "amd64",
+ craft_providers.bases.BaseName("almalinux", "9"),
+ ),
+ ),
+ ],
+)
+def test_build_info_from_platforms(incoming, expected):
+ assert BuildInfo.from_platforms(incoming) == expected
+
+
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ *(
+ pytest.param(
+ {"build-on": arch, "build-for": arch},
+ Platform(build_on=[arch], build_for=[arch]),
+ id=arch,
+ )
+ for arch in platforms._ARCH_TRANSLATIONS_DEB_TO_PLATFORM
+ ),
+ *(
+ pytest.param(
+ {"build-on": arch},
+ Platform(build_on=[arch]),
+ id=f"build-on-only-{arch}",
+ )
+ for arch in platforms._ARCH_TRANSLATIONS_DEB_TO_PLATFORM
+ ),
+ pytest.param(
+ {"build-on": "amd64", "build-for": "riscv64"},
+ Platform(build_on=["amd64"], build_for=["riscv64"]),
+ id="cross-compile",
+ ),
+ ],
+)
+def test_platform_vectorise_architectures(incoming, expected):
+ platform = Platform.model_validate(incoming)
+
+ assert platform == expected
+
+
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ (
+ {"build-on": ["amd64"], "build-for": ["all"]},
+ Platform(build_on=["amd64"], build_for=["all"]),
+ ),
+ ],
+)
+def test_platform_from_platform_dict(incoming, expected):
+ assert Platform.model_validate(incoming) == expected
+
+
+@pytest.mark.parametrize(
+ ("incoming", "expected"),
+ [
+ pytest.param(
+ {
+ craft_platforms.DebianArchitecture.AMD64: None,
+ craft_platforms.DebianArchitecture.ARM64: None,
+ craft_platforms.DebianArchitecture.RISCV64: None,
+ },
+ {
+ "amd64": Platform(build_on=["amd64"], build_for=["amd64"]),
+ "arm64": Platform(build_on=["arm64"], build_for=["arm64"]),
+ "riscv64": Platform(build_on=["riscv64"], build_for=["riscv64"]),
+ },
+ id="architectures",
+ ),
+ pytest.param(
+ {"any string": {"build-on": ["amd64"], "build-for": ["all"]}},
+ {"any string": Platform(build_on=["amd64"], build_for=["all"])},
+ id="stringy",
+ ),
+ ],
+)
+def test_platform_from_platforms(incoming, expected):
+ assert Platform.from_platforms(incoming) == expected
+
+
@pytest.mark.parametrize(
("project_fixture", "project_dict"),
[("basic_project", BASIC_PROJECT_DICT), ("full_project", FULL_PROJECT_DICT)],
diff --git a/tests/unit/remote/test_errors.py b/tests/unit/remote/test_errors.py
index 2ff2f902..a229c223 100644
--- a/tests/unit/remote/test_errors.py
+++ b/tests/unit/remote/test_errors.py
@@ -15,25 +15,21 @@
from craft_application.remote import errors
+def test_git_error():
+ """Test RemoteBuildGitError."""
+ error = errors.RemoteBuildGitError(message="failed to push some refs to 'unknown'")
+
+ assert (
+ str(error) == "Git operation failed with: failed to push some refs to 'unknown'"
+ )
+
+
def test_unsupported_architecture_error():
"""Test UnsupportedArchitectureError."""
error = errors.UnsupportedArchitectureError(architectures=["amd64", "arm64"])
assert str(error) == (
- "Architecture not supported by the remote builder.\nThe following "
- "architectures are not supported by the remote builder: ['amd64', 'arm64'].\n"
- "Please remove them from the architecture list and try again."
- )
- assert repr(error) == (
- "UnsupportedArchitectureError(brief='Architecture not supported by the remote "
- "builder.', details=\"The following architectures are not supported by the "
- "remote builder: ['amd64', 'arm64'].\\nPlease remove them from the "
- 'architecture list and try again.")'
- )
-
- assert error.brief == "Architecture not supported by the remote builder."
- assert error.details == (
"The following architectures are not supported by the remote builder: "
- "['amd64', 'arm64'].\nPlease remove them from the architecture list and "
- "try again."
+ "'amd64' and 'arm64'."
)
+ assert error.resolution == "Remove them from the architecture list and try again."
diff --git a/tests/unit/remote/test_git.py b/tests/unit/remote/test_git.py
new file mode 100644
index 00000000..7bfbc211
--- /dev/null
+++ b/tests/unit/remote/test_git.py
@@ -0,0 +1,57 @@
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License version 3 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+
+"""Remote-build git tests."""
+
+import pytest
+from craft_application.git import GitType
+from craft_application.remote import errors, git
+
+
+@pytest.fixture
+def mock_get_git_repo_type(mocker):
+ return mocker.patch("craft_application.remote.git.get_git_repo_type")
+
+
+def test_git_normal(tmp_path, mock_get_git_repo_type):
+ """No-op for a normal git repo."""
+ mock_get_git_repo_type.return_value = GitType.NORMAL
+
+ assert git.check_git_repo_for_remote_build(tmp_path) is None
+
+
+def test_git_invalid_error(tmp_path, mock_get_git_repo_type):
+ """Raise an error for invalid git repos."""
+ mock_get_git_repo_type.return_value = GitType.INVALID
+
+ with pytest.raises(errors.RemoteBuildInvalidGitRepoError) as err:
+ git.check_git_repo_for_remote_build(tmp_path)
+
+ assert str(err.value) == f"Could not find a git repository in {str(tmp_path)!r}"
+ assert (
+ err.value.resolution == "Initialize a git repository in the project directory"
+ )
+
+
+def test_git_shallow_clone_error(tmp_path, mock_get_git_repo_type):
+ """Raise an error for shallowly cloned repos."""
+ mock_get_git_repo_type.return_value = GitType.SHALLOW
+
+ with pytest.raises(errors.RemoteBuildInvalidGitRepoError) as err:
+ git.check_git_repo_for_remote_build(tmp_path)
+
+ assert (
+ str(err.value) == "Remote builds for shallow cloned git repos are not supported"
+ )
+ assert err.value.resolution == "Make a non-shallow clone of the repository"
diff --git a/tests/unit/remote/test_utils.py b/tests/unit/remote/test_utils.py
index a8897adb..bf4336cc 100644
--- a/tests/unit/remote/test_utils.py
+++ b/tests/unit/remote/test_utils.py
@@ -25,6 +25,7 @@
validate_architectures,
)
from craft_application.remote.utils import _SUPPORTED_ARCHS
+from craft_application.util import humanize_list
###############################
# validate architecture tests #
@@ -57,7 +58,7 @@ def test_validate_architectures_error(archs, expected_archs):
assert (
"The following architectures are not supported by the remote builder: "
- f"{expected_archs}"
+ f"{humanize_list(expected_archs, 'and')}"
) in str(raised.value)
diff --git a/tests/unit/services/test_config.py b/tests/unit/services/test_config.py
new file mode 100644
index 00000000..2eb78196
--- /dev/null
+++ b/tests/unit/services/test_config.py
@@ -0,0 +1,260 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Unit tests for the configuration service."""
+
+import io
+import itertools
+import json
+import string
+import subprocess
+from collections.abc import Iterator
+from unittest import mock
+
+import craft_application
+import craft_cli
+import pytest
+import pytest_subprocess
+import snaphelpers
+from craft_application import launchpad
+from craft_application.services import config
+from hypothesis import given, strategies
+
+CRAFT_APPLICATION_TEST_ENTRY_VALUES = [
+ *(
+ ("verbosity_level", mode.name.lower(), mode)
+ for mode in craft_cli.messages.EmitterMode
+ ),
+ *(("verbosity_level", mode.name, mode) for mode in craft_cli.messages.EmitterMode),
+ ("debug", "true", True),
+ ("debug", "false", False),
+ ("build_environment", "host", "host"),
+ ("secrets", "Cara likes Butterscotch.", "Cara likes Butterscotch."),
+ ("platform", "laptop", "laptop"),
+ ("platform", "mainframe", "mainframe"),
+ ("build_for", "riscv64", "riscv64"),
+ ("build_for", "s390x", "s390x"),
+ *(("parallel_build_count", str(i), i) for i in range(10)),
+ *(("max_parallel_build_count", str(i), i) for i in range(10)),
+]
+APP_SPECIFIC_TEST_ENTRY_VALUES = [
+ ("my_str", "some string", "some string"),
+ ("my_int", "1", 1),
+ ("my_int", "2", 2),
+ ("my_bool", "true", True),
+ ("my_bool", "false", False),
+ ("my_default_str", "something", "something"),
+ ("my_default_int", "4294967296", 2**32),
+ ("my_bool", "1", True),
+ ("my_arch", "riscv64", launchpad.Architecture.RISCV64),
+]
+TEST_ENTRY_VALUES = CRAFT_APPLICATION_TEST_ENTRY_VALUES + APP_SPECIFIC_TEST_ENTRY_VALUES
+
+
+@pytest.fixture(scope="module")
+def app_environment_handler(default_app_metadata) -> config.AppEnvironmentHandler:
+ return config.AppEnvironmentHandler(default_app_metadata)
+
+
+@pytest.fixture(scope="module")
+def craft_environment_handler(default_app_metadata) -> config.CraftEnvironmentHandler:
+ return config.CraftEnvironmentHandler(default_app_metadata)
+
+
+@pytest.fixture(scope="module")
+def snap_config_handler(default_app_metadata) -> Iterator[config.SnapConfigHandler]:
+ with pytest.MonkeyPatch.context() as monkeypatch:
+ monkeypatch.setenv("SNAP", "/snap/testcraft/x1")
+ monkeypatch.setenv("SNAP_COMMON", "/")
+ monkeypatch.setenv("SNAP_DATA", "/")
+ monkeypatch.setenv("SNAP_REAL_HOME", "/")
+ monkeypatch.setenv("SNAP_USER_COMMON", "/")
+ monkeypatch.setenv("SNAP_USER_DATA", "/")
+ monkeypatch.setenv("SNAP_INSTANCE_NAME", "testcraft")
+ monkeypatch.setenv("SNAP_INSTANCE_KEY", "")
+ yield config.SnapConfigHandler(default_app_metadata)
+
+
+@pytest.fixture(scope="module")
+def default_config_handler(default_app_metadata) -> config.DefaultConfigHandler:
+ return config.DefaultConfigHandler(default_app_metadata)
+
+
+@given(
+ item=strategies.text(alphabet=string.ascii_letters + "_", min_size=1),
+ content=strategies.text(
+ alphabet=strategies.characters(categories=["L", "M", "N", "P", "S", "Z"])
+ ),
+)
+def test_app_environment_handler(app_environment_handler, item: str, content: str):
+ with pytest.MonkeyPatch.context() as monkeypatch:
+ monkeypatch.setenv(f"TESTCRAFT_{item.upper()}", content)
+
+ assert app_environment_handler.get_raw(item) == content
+
+
+@given(
+ item=strategies.sampled_from(list(craft_application.ConfigModel.model_fields)),
+ content=strategies.text(
+ alphabet=strategies.characters(categories=["L", "M", "N", "P", "S", "Z"])
+ ),
+)
+def test_craft_environment_handler(craft_environment_handler, item: str, content: str):
+ with pytest.MonkeyPatch.context() as monkeypatch:
+ monkeypatch.setenv(f"CRAFT_{item.upper()}", content)
+
+ assert craft_environment_handler.get_raw(item) == content
+
+
+@pytest.mark.parametrize(("item", "content", "_"), CRAFT_APPLICATION_TEST_ENTRY_VALUES)
+@pytest.mark.usefixtures("_")
+def test_craft_environment_handler_success(
+ monkeypatch, craft_environment_handler, item: str, content: str
+):
+ monkeypatch.setenv(f"CRAFT_{item.upper()}", content)
+
+ assert craft_environment_handler.get_raw(item) == content
+
+
+@pytest.mark.parametrize(("item", "content", "_"), APP_SPECIFIC_TEST_ENTRY_VALUES)
+@pytest.mark.usefixtures("_")
+def test_craft_environment_handler_error(
+ monkeypatch, craft_environment_handler, item: str, content: str
+):
+ monkeypatch.setenv(f"CRAFT_{item.upper()}", content)
+
+ with pytest.raises(KeyError):
+ assert craft_environment_handler.get_raw(item) == content
+
+
+@pytest.mark.parametrize(
+ "error",
+ [
+ KeyError("SNAP_INSTANCE_NAME something or other"),
+ snaphelpers.SnapCtlError(
+ mock.Mock(returncode=1, stderr=io.BytesIO(b"snapd socket asplode"))
+ ),
+ ],
+)
+def test_snap_config_handler_create_error(mocker, default_app_metadata, error):
+ mocker.patch("snaphelpers.is_snap", return_value=True)
+ mock_snap_config = mocker.patch(
+ "snaphelpers.SnapConfig",
+ side_effect=error,
+ )
+ with pytest.raises(OSError, match="Not running as a snap."):
+ config.SnapConfigHandler(default_app_metadata)
+
+ mock_snap_config.assert_called_once_with()
+
+
+def test_snap_config_handler_not_snap(mocker, default_app_metadata):
+ mock_is_snap = mocker.patch("snaphelpers.is_snap", return_value=False)
+
+ with pytest.raises(OSError, match="Not running as a snap."):
+ config.SnapConfigHandler(default_app_metadata)
+
+ mock_is_snap.asssert_called_once_with()
+
+
+@given(
+ item=strategies.text(alphabet=string.ascii_letters + "_", min_size=1),
+ content=strategies.text(
+ alphabet=strategies.characters(categories=["L", "M", "N", "P", "S", "Z"])
+ ),
+)
+def test_snap_config_handler(snap_config_handler, item: str, content: str):
+ snap_item = item.replace("_", "-")
+ with pytest_subprocess.FakeProcess.context() as fp, pytest.MonkeyPatch.context() as mp:
+ mp.setattr("snaphelpers._ctl.Popen", subprocess.Popen)
+ fp.register(
+ ["/usr/bin/snapctl", "get", "-d", snap_item],
+ stdout=json.dumps({snap_item: content}),
+ )
+ assert snap_config_handler.get_raw(item) == content
+
+
+@pytest.mark.parametrize(
+ ("item", "expected"),
+ [
+ ("verbosity_level", craft_cli.EmitterMode.BRIEF),
+ ("debug", False),
+ ("lxd_remote", "local"),
+ ("launchpad_instance", "production"),
+ # Application model items with defaults
+ ("my_default_str", "default"),
+ ("my_default_int", -1),
+ ("my_default_bool", True),
+ ("my_default_factory", {"dict": "yes"}),
+ ],
+)
+def test_default_config_handler_success(default_config_handler, item, expected):
+ assert default_config_handler.get_raw(item) == expected
+
+
+@pytest.mark.parametrize(
+ ("item", "environment_variables", "expected"),
+ [
+ ("verbosity_level", {}, craft_cli.EmitterMode.BRIEF),
+ *(
+ ("verbosity_level", {"TESTCRAFT_VERBOSITY_LEVEL": mode.name}, mode)
+ for mode in craft_cli.EmitterMode
+ ),
+ *(
+ ("verbosity_level", {"TESTCRAFT_VERBOSITY_LEVEL": mode.name.lower()}, mode)
+ for mode in craft_cli.EmitterMode
+ ),
+ *(
+ ("verbosity_level", {"CRAFT_VERBOSITY_LEVEL": mode.name}, mode)
+ for mode in craft_cli.EmitterMode
+ ),
+ *(
+ ("verbosity_level", {"CRAFT_VERBOSITY_LEVEL": mode.name.lower()}, mode)
+ for mode in craft_cli.EmitterMode
+ ),
+ *(
+ ("debug", {var: value}, True)
+ for var, value in itertools.product(
+ ["CRAFT_DEBUG", "TESTCRAFT_DEBUG"], ["true", "1", "yes", "Y"]
+ )
+ ),
+ *(
+ ("debug", {var: value}, False)
+ for var, value in itertools.product(
+ ["CRAFT_DEBUG", "TESTCRAFT_DEBUG"], ["false", "0", "no", "N"]
+ )
+ ),
+ *(
+ ("parallel_build_count", {var: str(value)}, value)
+ for var, value in itertools.product(
+ ["CRAFT_PARALLEL_BUILD_COUNT", "TESTCRAFT_PARALLEL_BUILD_COUNT"],
+ range(10),
+ )
+ ),
+ ],
+)
+def test_config_service_converts_type(
+ monkeypatch: pytest.MonkeyPatch,
+ fake_process: pytest_subprocess.FakeProcess,
+ fake_services,
+ item: str,
+ environment_variables: dict[str, str],
+ expected,
+):
+ monkeypatch.setattr("snaphelpers._ctl.Popen", subprocess.Popen)
+ for key, value in environment_variables.items():
+ monkeypatch.setenv(key, value)
+ fake_process.register(["/usr/bin/snapctl", fake_process.any()], stdout="{}")
+ assert fake_services.config.get(item) == expected
diff --git a/tests/unit/services/test_fetch.py b/tests/unit/services/test_fetch.py
new file mode 100644
index 00000000..9e9ee727
--- /dev/null
+++ b/tests/unit/services/test_fetch.py
@@ -0,0 +1,202 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Unit tests for the FetchService.
+
+Note that most of the fetch-service functionality is already tested either on:
+- unit/test_fetch.py, for unit tests of the endpoint calls, or;
+- integration/services/test_fetch.py, for full integration tests.
+
+As such, this module mostly unit-tests error paths coming from wrong usage of
+the FetchService class.
+"""
+import contextlib
+import json
+import pathlib
+import re
+import textwrap
+from datetime import datetime
+from unittest import mock
+from unittest.mock import MagicMock, call
+
+import craft_providers
+import pytest
+from craft_application import ProviderService, fetch, services
+from craft_application.models import BuildInfo
+from craft_application.services import fetch as service_module
+from craft_providers import bases
+from freezegun import freeze_time
+
+
+@pytest.fixture
+def fetch_service(app, fake_services, fake_project):
+ build_info = BuildInfo(
+ platform="amd64",
+ build_on="amd64",
+ build_for="amd64",
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+ return services.FetchService(
+ app,
+ fake_services,
+ project=fake_project,
+ build_plan=[build_info],
+ session_policy="strict",
+ )
+
+
+def test_create_session_already_exists(fetch_service):
+ fetch_service._session_data = fetch.SessionData(id="id", token="token")
+
+ expected = re.escape(
+ "create_session() called but there's already a live fetch-service session."
+ )
+ with pytest.raises(ValueError, match=expected):
+ fetch_service.create_session(instance=MagicMock())
+
+
+def test_teardown_session_no_session(fetch_service):
+ expected = re.escape(
+ "teardown_session() called with no live fetch-service session."
+ )
+
+ with pytest.raises(ValueError, match=expected):
+ fetch_service.teardown_session()
+
+
+@freeze_time(datetime.fromisoformat("2024-09-16T01:02:03.456789"))
+def test_create_project_manifest(
+ fetch_service, tmp_path, monkeypatch, manifest_data_dir
+):
+ manifest_path = tmp_path / "craft-project-manifest.yaml"
+ monkeypatch.setattr(service_module, "_PROJECT_MANIFEST_MANAGED_PATH", manifest_path)
+ monkeypatch.setenv("CRAFT_MANAGED_MODE", "1")
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ assert not manifest_path.exists()
+ fetch_service.create_project_manifest([artifact])
+
+ assert manifest_path.is_file()
+ expected = manifest_data_dir / "project-expected.yaml"
+
+ assert manifest_path.read_text() == expected.read_text()
+
+
+def test_create_project_manifest_not_managed(fetch_service, tmp_path, monkeypatch):
+ manifest_path = tmp_path / "craft-project-manifest.yaml"
+ monkeypatch.setattr(service_module, "_PROJECT_MANIFEST_MANAGED_PATH", manifest_path)
+ monkeypatch.setenv("CRAFT_MANAGED_MODE", "0")
+
+ artifact = tmp_path / "my-artifact.file"
+ artifact.write_text("this is the generated artifact")
+
+ assert not manifest_path.exists()
+ fetch_service.create_project_manifest([artifact])
+ assert not manifest_path.exists()
+
+
+def test_teardown_session_create_manifest(
+ fetch_service,
+ tmp_path,
+ mocker,
+ manifest_data_dir,
+ monkeypatch,
+ fake_project,
+ emitter,
+):
+ monkeypatch.chdir(tmp_path)
+
+ # A lot of mock setup here but the goal is to have the fake fetch-service
+ # session return the expected report, and the fake CraftManifest return the
+ # expected data.
+
+ # fetch.teardown_session returns a fake session report
+ fake_report = json.loads((manifest_data_dir / "session-report.json").read_text())
+ mocker.patch.object(fetch, "teardown_session", return_value=fake_report)
+
+ # temporarily_pull_file returns a fake project manifest file
+ project_manifest_path = manifest_data_dir / "project-expected.yaml"
+
+ @contextlib.contextmanager
+ def temporarily_pull_file(*, source, missing_ok):
+ assert source == service_module._PROJECT_MANIFEST_MANAGED_PATH
+ assert missing_ok
+ yield project_manifest_path
+
+ mock_instance = mock.Mock(spec=craft_providers.Executor)
+ mock_instance.temporarily_pull_file = temporarily_pull_file
+
+ fetch_service._session_data = {}
+ fetch_service._instance = mock_instance
+
+ fetch_service.teardown_session()
+
+ expected_file = manifest_data_dir / "craft-manifest-expected.json"
+ obtained_file = tmp_path / f"{fake_project.name}_{fake_project.version}_amd64.json"
+
+ assert obtained_file.read_text() + "\n" == expected_file.read_text()
+
+ expected_output = textwrap.dedent(
+ """\
+ The following artifacts were marked as rejected by the fetch-service:
+ - url: https://github.com:443/canonical/sphinx-docs-starter-pack.git/git-upload-pack
+ reasons:
+ - fetch is allowed only on a single ref
+ - fetch is only allowed with depth 1
+ - git repository does not contain a go.mod file
+ - url: https://proxy.golang.org:443/github.com/go-mmap/mmap/@v/v0.7.0.mod
+ reasons:
+ - the artefact format is unknown
+ - the request was not recognized by any format inspector
+ This build will fail on 'strict' fetch-service sessions.
+ """
+ )
+ for line in expected_output.splitlines():
+ emitter.assert_progress(
+ line,
+ permanent=True,
+ )
+
+
+@pytest.mark.parametrize("run_on_host", [True, False])
+def test_warning_experimental(mocker, fetch_service, run_on_host, emitter):
+ """The fetch-service warning should only be emitted when running on the host."""
+ mocker.patch.object(fetch, "start_service")
+ mocker.patch.object(fetch, "verify_installed")
+ mocker.patch.object(fetch, "_get_service_base_dir", return_value=pathlib.Path())
+ mocker.patch.object(ProviderService, "is_managed", return_value=not run_on_host)
+
+ fetch_service.setup()
+
+ logpath = fetch.get_log_filepath()
+ warning = (
+ "Warning: the fetch-service integration is experimental. "
+ f"Logging output to {str(logpath)!r}."
+ )
+ warning_emitted = call("message", warning) in emitter.interactions
+
+ assert warning_emitted == run_on_host
+
+
+def test_setup_managed(mocker, fetch_service):
+ """The fetch-service process should only be checked/started when running on the host."""
+ mock_start = mocker.patch.object(fetch, "start_service")
+ mocker.patch.object(ProviderService, "is_managed", return_value=True)
+
+ fetch_service.setup()
+
+ assert not mock_start.called
diff --git a/tests/unit/services/test_init.py b/tests/unit/services/test_init.py
new file mode 100644
index 00000000..3e1ffdbc
--- /dev/null
+++ b/tests/unit/services/test_init.py
@@ -0,0 +1,352 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+
+"""Unit tests for the InitService."""
+
+import os
+import pathlib
+import textwrap
+
+import jinja2
+import pytest
+import pytest_check
+import pytest_mock
+from craft_application import errors, services
+from craft_application.git import GitRepo, short_commit_sha
+from craft_application.models.constraints import MESSAGE_INVALID_NAME
+from craft_cli.pytest_plugin import RecordingEmitter
+
+
+@pytest.fixture
+def init_service(app_metadata, fake_services):
+ _init_service = services.InitService(app_metadata, fake_services)
+ _init_service.setup()
+ return _init_service
+
+
+@pytest.fixture
+def mock_loader(mocker, tmp_path):
+ """Mock the loader so it does not try to import `testcraft.templates`."""
+ return mocker.patch(
+ "craft_application.services.init.InitService._get_loader",
+ return_value=jinja2.FileSystemLoader(tmp_path / "templates"),
+ )
+
+
+def test_get_context(init_service, tmp_path: pathlib.Path):
+ project_dir = tmp_path / "my-project"
+ project_dir.mkdir()
+ context = init_service._get_context(name="my-project", project_dir=project_dir)
+
+ assert context == {"name": "my-project", "version": init_service.default_version}
+
+
+@pytest.fixture
+def empty_git_repository(tmp_path: pathlib.Path) -> GitRepo:
+ repository = tmp_path / "my-project-git"
+ repository.mkdir()
+ return GitRepo(repository)
+
+
+@pytest.fixture
+def git_repository_with_commit(tmp_path: pathlib.Path) -> tuple[GitRepo, str]:
+ repository = tmp_path / "my-project-git"
+ repository.mkdir()
+ git_repo = GitRepo(repository)
+ (repository / "some_file").touch()
+ git_repo.add_all()
+ commit_sha = git_repo.commit("feat: initialize repo")
+
+ return git_repo, commit_sha
+
+
+@pytest.fixture
+def project_dir(tmp_path: pathlib.Path) -> pathlib.Path:
+ project_dir = tmp_path / "my-project"
+ project_dir.mkdir()
+ return project_dir
+
+
+@pytest.fixture
+def templates_dir(tmp_path: pathlib.Path) -> pathlib.Path:
+ template_dir = tmp_path / "templates"
+ template_dir.mkdir()
+ return template_dir
+
+
+def test_get_context_of_empty_git_repository(
+ init_service, empty_git_repository: GitRepo
+):
+ context = init_service._get_context(
+ name="my-project",
+ project_dir=empty_git_repository.path,
+ )
+
+ assert context == {"name": "my-project", "version": init_service.default_version}
+
+
+def test_get_context_of_git_repository_with_commit(
+ init_service,
+ git_repository_with_commit: tuple[GitRepo, str],
+ emitter: RecordingEmitter,
+):
+ git_repo, commit_sha = git_repository_with_commit
+ expected_version = short_commit_sha(commit_sha)
+ context = init_service._get_context(
+ name="my-project",
+ project_dir=git_repo.path,
+ )
+ assert context == {"name": "my-project", "version": expected_version}
+ emitter.assert_debug(f"Discovered project version: {expected_version!r}")
+
+
+@pytest.mark.parametrize("create_dir", [True, False])
+def test_create_project_dir(init_service, tmp_path, emitter, create_dir):
+ project_dir = tmp_path / "my-project"
+ if create_dir:
+ project_dir.mkdir()
+
+ init_service._create_project_dir(project_dir=project_dir)
+
+ assert project_dir.is_dir()
+ emitter.assert_debug(f"Creating project directory {str(project_dir)!r}.")
+
+
+def test_get_templates_environment(init_service, mocker):
+ """Test that _get_templates_environment returns a Jinja2 environment."""
+ mock_package_loader = mocker.patch("jinja2.PackageLoader")
+ mock_environment = mocker.patch("jinja2.Environment")
+
+ environment = init_service._get_templates_environment(pathlib.Path("test-dir"))
+
+ mock_package_loader.assert_called_once_with("testcraft", "test-dir")
+ mock_environment.assert_called_once_with(
+ loader=mock_package_loader.return_value,
+ autoescape=False,
+ keep_trailing_newline=True,
+ optimized=False,
+ undefined=jinja2.StrictUndefined,
+ )
+ assert environment == mock_environment.return_value
+
+
+@pytest.mark.usefixtures("mock_loader")
+@pytest.mark.parametrize("project_file", [None, "file.txt"])
+def test_check_for_existing_files(init_service, tmp_path, project_file):
+ """No-op if there are no overlapping files."""
+ # create template
+ template_dir = tmp_path / "templates"
+ template_dir.mkdir()
+ (template_dir / "file.txt").touch()
+ # create project with a different file
+ project_dir = tmp_path / "project"
+ if project_file:
+ project_dir.mkdir()
+ (project_dir / "other-file.txt").touch()
+
+ init_service.check_for_existing_files(
+ project_dir=project_dir, template_dir=template_dir
+ )
+
+
+@pytest.mark.usefixtures("mock_loader")
+def test_check_for_existing_files_error(init_service, tmp_path):
+ """Error if there are overlapping files."""
+ expected_error = textwrap.dedent(
+ f"""\
+ Cannot initialise project in {str(tmp_path / 'project')!r} because it would overwrite existing files.
+ Existing files are:
+ - file.txt"""
+ )
+ # create template
+ template_dir = tmp_path / "templates"
+ template_dir.mkdir()
+ (template_dir / "file.txt").touch()
+ # create project with a different file
+ project_dir = tmp_path / "project"
+ project_dir.mkdir()
+ (project_dir / "file.txt").touch()
+
+ with pytest.raises(errors.InitError, match=expected_error):
+ init_service.check_for_existing_files(
+ project_dir=project_dir, template_dir=template_dir
+ )
+
+
+@pytest.mark.parametrize("template_filename", ["file1.txt", "nested/file2.txt"])
+def test_copy_template_file(init_service, tmp_path, template_filename):
+ # create template
+ template_dir = tmp_path / "templates"
+ template_file = template_dir / template_filename
+ template_file.parent.mkdir(parents=True, exist_ok=True)
+ template_file.write_text("content")
+ # create project with an existing file
+ project_dir = tmp_path / "project"
+ project_dir.mkdir()
+
+ init_service._copy_template_file(template_filename, template_dir, project_dir)
+
+ assert (project_dir / template_filename).read_text() == "content"
+
+
+@pytest.mark.parametrize("template_name", ["file1.txt", "nested/file2.txt"])
+def test_copy_template_file_exists(init_service, tmp_path, template_name, emitter):
+ """Do not overwrite existing files."""
+ # create template
+ template_dir = tmp_path / "templates"
+ template_file = template_dir / template_name
+ template_file.parent.mkdir(parents=True, exist_ok=True)
+ template_file.write_text("content")
+ # create project with an existing file
+ project_dir = tmp_path / "project"
+ (project_dir / template_name).parent.mkdir(parents=True, exist_ok=True)
+ (project_dir / template_name).write_text("existing content")
+
+ init_service._copy_template_file(template_name, template_dir, project_dir)
+
+ assert (project_dir / template_name).read_text() == "existing content"
+ emitter.assert_debug(
+ f"Skipping file {template_name} because it is already present."
+ )
+
+
+@pytest.mark.parametrize("filename", ["jinja-file.txt.j2", "nested/jinja-file.txt.j2"])
+@pytest.mark.usefixtures("mock_loader")
+def test_render_project_with_templates(filename, init_service, tmp_path):
+ """Render template files."""
+ project_dir = tmp_path / "project"
+ project_dir.mkdir()
+ template_dir = tmp_path / "templates"
+ (template_dir / filename).parent.mkdir(parents=True, exist_ok=True)
+ (template_dir / filename).write_text("{{ name }}")
+
+ environment = init_service._get_templates_environment(template_dir)
+ init_service._render_project(
+ environment=environment,
+ project_dir=project_dir,
+ template_dir=template_dir,
+ context={"name": "my-project", "version": init_service.default_version},
+ )
+
+ assert (project_dir / filename[:-3]).read_text() == "my-project"
+
+
+@pytest.mark.parametrize("filename", ["file.txt", "nested/file.txt"])
+@pytest.mark.usefixtures("mock_loader")
+def test_render_project_non_templates(filename, init_service, tmp_path):
+ """Copy non-template files when rendering a project."""
+ project_dir = tmp_path / "project"
+ project_dir.mkdir()
+ template_dir = tmp_path / "templates"
+ (template_dir / filename).parent.mkdir(parents=True, exist_ok=True)
+ (template_dir / filename).write_text("test content")
+
+ environment = init_service._get_templates_environment(template_dir)
+ init_service._render_project(
+ environment=environment,
+ project_dir=project_dir,
+ template_dir=template_dir,
+ context={"name": "my-project", "version": init_service.default_version},
+ )
+
+ assert (project_dir / filename).read_text() == "test content"
+
+
+@pytest.mark.usefixtures("mock_loader")
+def test_render_project_executable(init_service, tmp_path):
+ """Test that executable permissions are set on rendered files."""
+ project_dir = tmp_path / "project"
+ project_dir.mkdir()
+ template_dir = tmp_path / "templates"
+ template_dir.mkdir()
+ for filename in ["file-1.sh.j2", "file-2.sh"]:
+ (template_dir / filename).write_text("#!/bin/bash\necho 'Hello, world!'")
+ (template_dir / filename).chmod(0o755)
+ for filename in ["file-3.txt.j2", "file-4.txt"]:
+ (template_dir / filename).write_text("template content")
+
+ environment = init_service._get_templates_environment(template_dir)
+ init_service._render_project(
+ environment=environment,
+ project_dir=project_dir,
+ template_dir=template_dir,
+ context={"name": "my-project", "version": init_service.default_version},
+ )
+
+ pytest_check.is_true(os.access(project_dir / "file-1.sh", os.X_OK))
+ pytest_check.is_true(os.access(project_dir / "file-2.sh", os.X_OK))
+ pytest_check.is_false(os.access(project_dir / "file-3.txt", os.X_OK))
+ pytest_check.is_false(os.access(project_dir / "file-4.txt", os.X_OK))
+
+
+def test_initialise_project(
+ init_service: services.InitService,
+ project_dir: pathlib.Path,
+ templates_dir: pathlib.Path,
+ mocker: pytest_mock.MockerFixture,
+) -> None:
+ project_name = "test-project"
+ fake_env = {"templates": templates_dir}
+ fake_context = {"name": project_name, "version": init_service.default_version}
+ get_templates_mock = mocker.patch.object(
+ init_service, "_get_templates_environment", return_value=fake_env
+ )
+ create_project_dir_mock = mocker.patch.object(
+ init_service,
+ "_create_project_dir",
+ )
+ get_context_mock = mocker.patch.object(
+ init_service,
+ "_get_context",
+ return_value=fake_context,
+ )
+ render_project_mock = mocker.patch.object(
+ init_service,
+ "_render_project",
+ )
+ init_service.initialise_project(
+ project_dir=project_dir,
+ project_name=project_name,
+ template_dir=templates_dir,
+ )
+ get_templates_mock.assert_called_once_with(templates_dir)
+ create_project_dir_mock.assert_called_once_with(project_dir=project_dir)
+ get_context_mock.assert_called_once_with(name=project_name, project_dir=project_dir)
+ render_project_mock.assert_called_once_with(
+ fake_env, project_dir, templates_dir, fake_context
+ )
+
+
+@pytest.mark.parametrize(
+ "invalid_name", ["invalid--name", "-invalid-name", "invalid-name-", "0", "0-0", ""]
+)
+def test_validate_name_invalid(init_service, invalid_name):
+ with pytest.raises(errors.InitError, match=MESSAGE_INVALID_NAME):
+ init_service.validate_project_name(invalid_name)
+
+
+@pytest.mark.parametrize("valid_name", ["valid-name", "a", "a-a", "aaa", "0a"])
+def test_validate_name_valid(init_service, valid_name):
+ obtained = init_service.validate_project_name(valid_name)
+ assert obtained == valid_name
+
+
+def test_valid_name_invalid_use_default(init_service):
+ invalid_name = "invalid--name"
+ init_service._default_name = "my-default-name"
+
+ obtained = init_service.validate_project_name(invalid_name, use_default=True)
+ assert obtained == "my-default-name"
diff --git a/tests/unit/services/test_lifecycle.py b/tests/unit/services/test_lifecycle.py
index 99e3be54..5dcb6c15 100644
--- a/tests/unit/services/test_lifecycle.py
+++ b/tests/unit/services/test_lifecycle.py
@@ -23,10 +23,11 @@
import craft_parts
import craft_parts.callbacks
+import craft_platforms
import pytest
import pytest_check
from craft_application import errors, models, util
-from craft_application.errors import InvalidParameterError, PartsLifecycleError
+from craft_application.errors import PartsLifecycleError
from craft_application.models.project import BuildInfo
from craft_application.services import lifecycle
from craft_application.util import repositories
@@ -372,7 +373,7 @@ def test_get_primed_stage_packages(lifecycle_service):
BuildInfo(
"my-platform",
build_on="any",
- build_for="amd64",
+ build_for=craft_platforms.DebianArchitecture.AMD64,
base=bases.BaseName("ubuntu", "24.04"),
)
],
@@ -611,160 +612,6 @@ def test_lifecycle_package_repositories(
mock_callback.assert_called_once_with(repositories.install_overlay_repositories)
-# endregion
-
-# region parallel build count tests
-
-
-@pytest.mark.parametrize(
- ("env_dict", "cpu_count", "expected"),
- [
- (
- {},
- None,
- 1,
- ),
- (
- {},
- 100,
- 100,
- ),
- (
- {"TESTCRAFT_PARALLEL_BUILD_COUNT": "100"},
- 1,
- 100,
- ),
- (
- {"CRAFT_PARALLEL_BUILD_COUNT": "200"},
- 1,
- 200,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- },
- 50,
- 50,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- },
- 80,
- 80,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_PARALLEL_BUILD_COUNT": "200",
- },
- 1,
- 100,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
- },
- 150,
- 100,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
- },
- None,
- 1,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
- "CRAFT_PARALLEL_BUILD_COUNT": "200",
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "300",
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "400",
- },
- 150,
- 100,
- ),
- ],
-)
-def test_get_parallel_build_count(
- monkeypatch, mocker, fake_parts_lifecycle, env_dict, cpu_count, expected
-):
- mocker.patch("os.cpu_count", return_value=cpu_count)
- for env_dict_key, env_dict_value in env_dict.items():
- monkeypatch.setenv(env_dict_key, env_dict_value)
-
- assert fake_parts_lifecycle._get_parallel_build_count() == expected
-
-
-@pytest.mark.parametrize(
- ("env_dict", "cpu_count"),
- [
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "abc",
- },
- 1,
- ),
- (
- {
- "CRAFT_PARALLEL_BUILD_COUNT": "-",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "*",
- },
- 1,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "$COUNT",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_PARALLEL_BUILD_COUNT": "0",
- },
- 1,
- ),
- (
- {
- "CRAFT_PARALLEL_BUILD_COUNT": "-1",
- },
- 1,
- ),
- (
- {
- "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "5.6",
- },
- 1,
- ),
- (
- {
- "CRAFT_MAX_PARALLEL_BUILD_COUNT": "inf",
- },
- 1,
- ),
- ],
-)
-def test_get_parallel_build_count_error(
- monkeypatch, mocker, fake_parts_lifecycle, env_dict, cpu_count
-):
- mocker.patch("os.cpu_count", return_value=cpu_count)
- for env_dict_key, env_dict_value in env_dict.items():
- monkeypatch.setenv(env_dict_key, env_dict_value)
-
- with pytest.raises(
- InvalidParameterError, match=r"^Value '.*' is invalid for parameter '.*'.$"
- ):
- fake_parts_lifecycle._get_parallel_build_count()
-
-
# endregion
# region project variables
diff --git a/tests/unit/services/test_provider.py b/tests/unit/services/test_provider.py
index 68b20454..d1842ee3 100644
--- a/tests/unit/services/test_provider.py
+++ b/tests/unit/services/test_provider.py
@@ -30,42 +30,136 @@
from craft_providers.actions.snap_installer import Snap
+@pytest.fixture
+def mock_provider(monkeypatch, provider_service):
+ mocked_provider = mock.MagicMock(spec=craft_providers.Provider)
+ monkeypatch.setattr(
+ provider_service,
+ "get_provider",
+ lambda name: mocked_provider, # noqa: ARG005 (unused argument)
+ )
+
+ return mocked_provider
+
+
@pytest.mark.parametrize(
- ("install_snap", "environment", "snaps"),
+ ("given_environment", "expected_environment"),
[
- (True, {}, [Snap(name="testcraft", channel="latest/stable", classic=True)]),
+ ({}, {}),
+ ({"http_proxy": "thing"}, {"http_proxy": "thing", "HTTP_PROXY": "thing"}),
+ ({"HTTP_PROXY": "thing"}, {"http_proxy": "thing", "HTTP_PROXY": "thing"}),
+ ({"ssh_proxy": "thing"}, {"ssh_proxy": "thing", "SSH_PROXY": "thing"}),
+ ({"no_proxy": "thing"}, {"no_proxy": "thing", "NO_PROXY": "thing"}),
+ ({"NO_PROXY": "thing"}, {"no_proxy": "thing", "NO_PROXY": "thing"}),
+ # Special case handled by upstream:
+ # https://docs.python.org/3/library/urllib.request.html#urllib.request.getproxies
(
- True,
+ {
+ "REQUEST_METHOD": "GET",
+ "HTTP_PROXY": "thing",
+ },
+ {},
+ ),
+ ( # But lower-case http_proxy is still allowed
+ {
+ "REQUEST_METHOD": "GET",
+ "http_proxy": "thing",
+ },
+ {"http_proxy": "thing", "HTTP_PROXY": "thing"},
+ ),
+ ],
+)
+def test_setup_proxy_environment(
+ monkeypatch: pytest.MonkeyPatch,
+ app_metadata,
+ fake_services,
+ fake_project,
+ fake_build_plan,
+ given_environment: dict[str, str],
+ expected_environment: dict[str, str],
+):
+ for var, value in given_environment.items():
+ monkeypatch.setenv(var, value)
+
+ expected_environment |= {"CRAFT_MANAGED_MODE": "1"}
+
+ service = provider.ProviderService(
+ app_metadata,
+ fake_services,
+ project=fake_project,
+ work_dir=pathlib.Path(),
+ build_plan=fake_build_plan,
+ )
+ service.setup()
+
+ assert service.environment == expected_environment
+
+
+@pytest.mark.parametrize(
+ ("environment", "snaps"),
+ [
+ pytest.param(
+ {},
+ [Snap(name="testcraft", channel="latest/stable", classic=True)],
+ id="install-from-store-default-channel",
+ ),
+ pytest.param(
{"CRAFT_SNAP_CHANNEL": "something"},
[Snap(name="testcraft", channel="something", classic=True)],
+ id="install-from-store-with-channel",
),
- (
- True,
- {"SNAP_NAME": "testcraft", "SNAP": "/snap/testcraft/x1"},
- [Snap(name="testcraft", channel=None, classic=True)],
+ pytest.param(
+ {
+ "SNAP_NAME": "testcraft",
+ "SNAP_INSTANCE_NAME": "testcraft_1",
+ "SNAP": "/snap/testcraft/x1",
+ },
+ [Snap(name="testcraft_1", channel=None, classic=True)],
+ id="inject-from-host",
),
- (
- True,
+ pytest.param(
{
"SNAP_NAME": "testcraft",
+ "SNAP_INSTANCE_NAME": "testcraft_1",
"SNAP": "/snap/testcraft/x1",
"CRAFT_SNAP_CHANNEL": "something",
},
+ [Snap(name="testcraft_1", channel=None, classic=True)],
+ id="inject-from-host-ignore-channel",
+ ),
+ pytest.param(
+ # SNAP_INSTANCE_NAME may not exist if snapd < 2.43 or feature is disabled
+ {
+ "SNAP_NAME": "testcraft",
+ "SNAP": "/snap/testcraft/x1",
+ },
[Snap(name="testcraft", channel=None, classic=True)],
+ id="missing-snap-instance-name",
),
- (False, {}, []),
- (False, {"CRAFT_SNAP_CHANNEL": "something"}, []),
- (
- False,
+ pytest.param(
+ # SNAP_INSTANCE_NAME may not exist if snapd < 2.43 or feature is disabled
{
"SNAP_NAME": "testcraft",
"SNAP": "/snap/testcraft/x1",
+ # CRAFT_SNAP_CHANNEL should be ignored
"CRAFT_SNAP_CHANNEL": "something",
},
- [],
+ [Snap(name="testcraft", channel=None, classic=True)],
+ id="missing-snap-instance-name-ignore-snap-channel",
+ ),
+ pytest.param(
+ # this can happen when running testcraft from a venv in a snapped terminal
+ {
+ "SNAP_NAME": "kitty",
+ "SNAP_INSTANCE_NAME": "kitty",
+ "SNAP": "/snap/kitty/x1",
+ },
+ [Snap(name="testcraft", channel="latest/stable", classic=True)],
+ id="running-inside-another-snap",
),
],
)
+@pytest.mark.parametrize("install_snap", [True, False])
def test_install_snap(
monkeypatch,
app_metadata,
@@ -90,7 +184,10 @@ def test_install_snap(
)
service.setup()
- assert service.snaps == snaps
+ if install_snap:
+ assert service.snaps == snaps
+ else:
+ assert service.snaps == []
@pytest.mark.parametrize(
@@ -284,7 +381,9 @@ def test_get_provider_from_platform(
("base_name", "base_class", "alias"),
[
(("ubuntu", "devel"), bases.BuilddBase, bases.BuilddBaseAlias.DEVEL),
+ (("ubuntu", "24.04"), bases.BuilddBase, bases.BuilddBaseAlias.NOBLE),
(("ubuntu", "22.04"), bases.BuilddBase, bases.BuilddBaseAlias.JAMMY),
+ (("ubuntu", "20.04"), bases.BuilddBase, bases.BuilddBaseAlias.FOCAL),
],
)
def test_get_base_buildd(
@@ -321,11 +420,14 @@ def test_get_base_packages(provider_service):
"base_name",
[
("ubuntu", "devel"),
+ ("ubuntu", "24.10"),
+ ("ubuntu", "24.04"),
("ubuntu", "22.04"),
+ ("ubuntu", "20.04"),
+ ("almalinux", "9"),
],
)
def test_instance(
- monkeypatch,
check,
emitter,
tmp_path,
@@ -334,13 +436,8 @@ def test_instance(
provider_service,
base_name,
allow_unstable,
+ mock_provider,
):
- mock_provider = mock.MagicMock(spec=craft_providers.Provider)
- monkeypatch.setattr(
- provider_service,
- "get_provider",
- lambda name: mock_provider, # noqa: ARG005 (unused argument)
- )
arch = util.get_host_architecture()
build_info = models.BuildInfo("foo", arch, arch, base_name)
@@ -370,6 +467,33 @@ def test_instance(
emitter.assert_progress("Launching managed .+ instance...", regex=True)
+@pytest.mark.parametrize("clean_existing", [True, False])
+def test_instance_clean_existing(
+ tmp_path,
+ provider_service,
+ mock_provider,
+ clean_existing,
+):
+ arch = util.get_host_architecture()
+ base_name = bases.BaseName("ubuntu", "24.04")
+ build_info = models.BuildInfo("foo", arch, arch, base_name)
+
+ with provider_service.instance(
+ build_info, work_dir=tmp_path, clean_existing=clean_existing
+ ) as _instance:
+ pass
+
+ clean_called = mock_provider.clean_project_environments.called
+ assert clean_called == clean_existing
+
+ if clean_existing:
+ work_dir_inode = tmp_path.stat().st_ino
+ expected_name = f"testcraft-full-project-on-{arch}-for-{arch}-{work_dir_inode}"
+ mock_provider.clean_project_environments.assert_called_once_with(
+ instance_name=expected_name
+ )
+
+
def test_load_bashrc(emitter):
"""Test that we are able to load the bashrc file from the craft-application package."""
bashrc = pkgutil.get_data("craft_application", "misc/instance_bashrc")
diff --git a/tests/unit/services/test_service_factory.py b/tests/unit/services/test_service_factory.py
index 1f4ca25c..223296e8 100644
--- a/tests/unit/services/test_service_factory.py
+++ b/tests/unit/services/test_service_factory.py
@@ -74,7 +74,8 @@ def __new__(cls, *args, **kwargs):
app_metadata, project=fake_project, PackageClass=MockPackageService
)
- factory.set_kwargs("package", **kwargs)
+ with pytest.warns(PendingDeprecationWarning):
+ factory.set_kwargs("package", **kwargs)
check.equal(factory.package, MockPackageService.mock_class.return_value)
with check:
@@ -83,6 +84,50 @@ def __new__(cls, *args, **kwargs):
)
+@pytest.mark.parametrize(
+ ("first_kwargs", "second_kwargs", "expected"),
+ [
+ ({}, {}, {}),
+ (
+ {"arg_1": None},
+ {"arg_b": "something"},
+ {"arg_1": None, "arg_b": "something"},
+ ),
+ (
+ {"overridden": False},
+ {"overridden": True},
+ {"overridden": True},
+ ),
+ ],
+)
+def test_update_kwargs(
+ app_metadata,
+ fake_project,
+ fake_package_service_class,
+ first_kwargs,
+ second_kwargs,
+ expected,
+):
+ class MockPackageService(fake_package_service_class):
+ mock_class = mock.Mock(return_value=mock.Mock(spec_set=services.PackageService))
+
+ def __new__(cls, *args, **kwargs):
+ return cls.mock_class(*args, **kwargs)
+
+ factory = services.ServiceFactory(
+ app_metadata, project=fake_project, PackageClass=MockPackageService
+ )
+
+ factory.update_kwargs("package", **first_kwargs)
+ factory.update_kwargs("package", **second_kwargs)
+
+ pytest_check.is_(factory.package, MockPackageService.mock_class.return_value)
+ with pytest_check.check():
+ MockPackageService.mock_class.assert_called_once_with(
+ app=app_metadata, services=factory, project=fake_project, **expected
+ )
+
+
def test_getattr_cached_service(monkeypatch, check, factory):
mock_getattr = mock.Mock(wraps=factory.__getattr__)
monkeypatch.setattr(services.ServiceFactory, "__getattr__", mock_getattr)
diff --git a/tests/unit/test_application.py b/tests/unit/test_application.py
index 8750ca75..e5f8500b 100644
--- a/tests/unit/test_application.py
+++ b/tests/unit/test_application.py
@@ -26,6 +26,7 @@
import subprocess
import sys
import textwrap
+from io import StringIO
from textwrap import dedent
from typing import Any
from unittest import mock
@@ -39,6 +40,7 @@
import pytest
import pytest_check
from craft_application import (
+ ProviderService,
application,
commands,
errors,
@@ -47,6 +49,11 @@
services,
util,
)
+from craft_application.commands import (
+ AppCommand,
+ get_lifecycle_command_group,
+ get_other_command_group,
+)
from craft_application.models import BuildInfo
from craft_application.util import (
get_host_architecture, # pyright: ignore[reportGeneralTypeIssues]
@@ -56,6 +63,8 @@
from craft_providers import bases, lxd
from overrides import override
+from tests.conftest import FakeApplication
+
EMPTY_COMMAND_GROUP = craft_cli.CommandGroup("FakeCommands", [])
BASIC_PROJECT_YAML = """
name: myproject
@@ -367,35 +376,6 @@ def test_app_metadata_default_mandatory_adoptable_fields():
assert app.mandatory_adoptable_fields == ["version"]
-class FakeApplication(application.Application):
- """An application class explicitly for testing. Adds some convenient test hooks."""
-
- platform: str = "unknown-platform"
- build_on: str = "unknown-build-on"
- build_for: str | None = "unknown-build-for"
-
- def set_project(self, project):
- self._Application__project = project
-
- @override
- def _extra_yaml_transform(
- self,
- yaml_data: dict[str, Any],
- *,
- build_on: str,
- build_for: str | None,
- ) -> dict[str, Any]:
- self.build_on = build_on
- self.build_for = build_for
-
- return yaml_data
-
-
-@pytest.fixture
-def app(app_metadata, fake_services):
- return FakeApplication(app_metadata, fake_services)
-
-
class FakePlugin(craft_parts.plugins.Plugin):
def __init__(self, properties, part_info):
pass
@@ -441,9 +421,9 @@ def mock_dispatcher(monkeypatch):
(
[[]],
[
+ EMPTY_COMMAND_GROUP,
commands.get_lifecycle_command_group(),
commands.get_other_command_group(),
- EMPTY_COMMAND_GROUP,
],
),
],
@@ -484,6 +464,51 @@ def test_merge_command_groups(app):
}
+def test_merge_default_commands(app):
+ """Merge commands with the same name within the same groups."""
+ stage_command = _create_command("stage")
+ extra_lifecycle_command = _create_command("extra")
+ init_command = _create_command("init")
+ extra_other_command = _create_command("extra")
+
+ app.add_command_group("Lifecycle", [stage_command, extra_lifecycle_command])
+ app.add_command_group("Other", [init_command, extra_other_command])
+ command_groups = app.command_groups
+
+ # check against hardcoded list because the order is important
+ assert command_groups == [
+ craft_cli.CommandGroup(
+ name="Lifecycle",
+ commands=[
+ commands.lifecycle.CleanCommand,
+ commands.lifecycle.PullCommand,
+ commands.lifecycle.BuildCommand,
+ stage_command,
+ commands.lifecycle.PrimeCommand,
+ commands.lifecycle.PackCommand,
+ extra_lifecycle_command,
+ ],
+ ordered=True,
+ ),
+ craft_cli.CommandGroup(
+ name="Other",
+ commands=[
+ init_command,
+ commands.other.VersionCommand,
+ extra_other_command,
+ ],
+ ordered=False,
+ ),
+ ]
+
+
+def test_merge_default_commands_only(app):
+ """Use default commands if no app commands are provided."""
+ command_groups = app.command_groups
+
+ assert command_groups == [get_lifecycle_command_group(), get_other_command_group()]
+
+
@pytest.mark.parametrize(
("provider_managed", "expected"),
[(True, pathlib.PurePosixPath("/tmp/testcraft.log")), (False, None)],
@@ -512,6 +537,7 @@ def test_run_managed_success(mocker, app, fake_project, fake_build_plan):
mock.call(
fake_build_plan[0],
work_dir=mock.ANY,
+ clean_existing=False,
)
in mock_provider.instance.mock_calls
)
@@ -640,8 +666,12 @@ def test_run_managed_multiple(app, fake_project):
app.run_managed(None, None)
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) in mock_provider.instance.mock_calls
def test_run_managed_specified_arch(app, fake_project):
@@ -656,8 +686,12 @@ def test_run_managed_specified_arch(app, fake_project):
app.run_managed(None, "arch2")
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) not in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) not in mock_provider.instance.mock_calls
def test_run_managed_specified_platform(app, fake_project):
@@ -672,8 +706,52 @@ def test_run_managed_specified_platform(app, fake_project):
app.run_managed("a2", None)
- assert mock.call(info2, work_dir=mock.ANY) in mock_provider.instance.mock_calls
- assert mock.call(info1, work_dir=mock.ANY) not in mock_provider.instance.mock_calls
+ extra_args = {
+ "work_dir": mock.ANY,
+ "clean_existing": False,
+ }
+ assert mock.call(info2, **extra_args) in mock_provider.instance.mock_calls
+ assert mock.call(info1, **extra_args) not in mock_provider.instance.mock_calls
+
+
+def test_run_managed_empty_plan(app, fake_project):
+ app.set_project(fake_project)
+
+ app._build_plan = []
+ with pytest.raises(errors.EmptyBuildPlanError):
+ app.run_managed(None, None)
+
+
+@pytest.mark.parametrize(
+ ("parsed_args", "environ", "item", "expected"),
+ [
+ (argparse.Namespace(), {}, "build_for", None),
+ (argparse.Namespace(build_for=None), {}, "build_for", None),
+ (
+ argparse.Namespace(build_for=None),
+ {"CRAFT_BUILD_FOR": "arm64"},
+ "build_for",
+ "arm64",
+ ),
+ (
+ argparse.Namespace(build_for=None),
+ {"TESTCRAFT_BUILD_FOR": "arm64"},
+ "build_for",
+ "arm64",
+ ),
+ (
+ argparse.Namespace(build_for="riscv64"),
+ {"TESTCRAFT_BUILD_FOR": "arm64"},
+ "build_for",
+ "riscv64",
+ ),
+ ],
+)
+def test_get_arg_or_config(monkeypatch, app, parsed_args, environ, item, expected):
+ for var, content in environ.items():
+ monkeypatch.setenv(var, content)
+
+ assert app.get_arg_or_config(parsed_args, item) == expected
@pytest.mark.parametrize(
@@ -690,6 +768,7 @@ def test_run_managed_specified_platform(app, fake_project):
),
],
)
+@pytest.mark.usefixtures("emitter")
def test_get_dispatcher_error(
monkeypatch, check, capsys, app, mock_dispatcher, managed, error, exit_code, message
):
@@ -710,7 +789,7 @@ def test_craft_lib_log_level(app_metadata, fake_services):
"craft_parts",
"craft_providers",
"craft_store",
- "craft_application.remote",
+ "craft_application",
]
# The logging module is stateful and global, so first lets clear the logging level
@@ -1034,6 +1113,7 @@ def test_run_success_managed_inside_managed(
),
],
)
+@pytest.mark.usefixtures("emitter")
def test_run_error(
monkeypatch,
capsys,
@@ -1074,7 +1154,7 @@ def test_run_error(
"""\
Failed to run the build script for part 'foo'.
Recommended resolution: Check the build output and verify the project can work with the 'python' plugin.
- For more information, check out: http://craft-app.com/reference/plugins.html
+ For more information, check out: http://testcraft.example/reference/plugins.html
Full execution log:"""
),
),
@@ -1103,6 +1183,7 @@ def test_run_error_with_docs_url(
@pytest.mark.parametrize("error", [KeyError(), ValueError(), Exception()])
+@pytest.mark.usefixtures("emitter")
def test_run_error_debug(monkeypatch, mock_dispatcher, app, fake_project, error):
app.set_project(fake_project)
mock_dispatcher.load_command.side_effect = error
@@ -2187,9 +2268,9 @@ def test_build_planner_errors(tmp_path, monkeypatch, fake_services):
def test_emitter_docs_url(monkeypatch, mocker, app):
"""Test that the emitter is initialized with the correct url."""
- assert app.app.docs_url == "www.craft-app.com/docs/{version}"
+ assert app.app.docs_url == "www.testcraft.example/docs/{version}"
assert app.app.version == "3.14159"
- expected_url = "www.craft-app.com/docs/3.14159"
+ expected_url = "www.testcraft.example/docs/3.14159"
spied_init = mocker.spy(emit, "init")
@@ -2223,3 +2304,104 @@ def test_check_pro_requirement(
for call in pro_services.validate.call_args_list:
if validator_options is not None: # skip assert if default value is passed
assert call.kwargs["options"] == validator_options
+
+
+def test_clean_platform(monkeypatch, tmp_path, app_metadata, fake_services, mocker):
+ """Test that calling "clean --platform=x" correctly filters the build plan."""
+ data = util.safe_yaml_load(StringIO(BASIC_PROJECT_YAML))
+ # Put a few different platforms on the project
+ arch = util.get_host_architecture()
+ build_on_for = {
+ "build-on": [arch],
+ "build-for": [arch],
+ }
+ data["platforms"] = {
+ "plat1": build_on_for,
+ "plat2": build_on_for,
+ "plat3": build_on_for,
+ }
+ project_file = tmp_path / "testcraft.yaml"
+ project_file.write_text(util.dump_yaml(data))
+ monkeypatch.setattr(sys, "argv", ["testcraft", "clean", "--platform=plat2"])
+
+ mocked_clean = mocker.patch.object(ProviderService, "_clean_instance")
+ app = FakeApplication(app_metadata, fake_services)
+ app.project_dir = tmp_path
+
+ fake_services.project = None
+
+ app.run()
+
+ expected_info = models.BuildInfo(
+ platform="plat2",
+ build_on=arch,
+ build_for=arch,
+ base=bases.BaseName("ubuntu", "24.04"),
+ )
+ mocked_clean.assert_called_once_with(mocker.ANY, mocker.ANY, expected_info)
+
+
+class AppConfigCommand(AppCommand):
+
+ name: str = "app-config"
+ help_msg: str = "Help text"
+ overview: str = "Overview"
+
+ def fill_parser(self, parser: argparse.ArgumentParser) -> None:
+
+ name = self._app.name
+ parser.add_argument(
+ "app-name",
+ help=f"The name of the app, which is {name!r}.",
+ )
+
+
+@pytest.mark.usefixtures("emitter")
+def test_app_config_in_help(
+ monkeypatch,
+ capsys,
+ app,
+):
+ app.add_command_group("Test", [AppConfigCommand])
+ monkeypatch.setattr(sys, "argv", ["testcraft", "app-config", "-h"])
+
+ with pytest.raises(SystemExit):
+ app.run()
+
+ expected = "app-name: The name of the app, which is 'testcraft'."
+ _, err = capsys.readouterr()
+ assert expected in err
+
+
+@pytest.mark.parametrize(
+ "help_args",
+ [
+ pytest.param(["--help"], id="simple help"),
+ pytest.param(["help", "--all"], id="detailed help"),
+ ],
+)
+@pytest.mark.usefixtures("emitter")
+def test_doc_url_in_general_help(help_args, monkeypatch, capsys, app):
+ """General help messages contain a link to the documentation."""
+ monkeypatch.setattr(sys, "argv", ["testcraft", *help_args])
+
+ with pytest.raises(SystemExit):
+ app.run()
+
+ expected = "For more information about testcraft, check out: www.testcraft.example/docs/3.14159\n\n"
+ _, err = capsys.readouterr()
+ assert err.endswith(expected)
+
+
+@pytest.mark.usefixtures("emitter")
+def test_doc_url_in_command_help(monkeypatch, capsys, app):
+ """Command help messages contain a link to the command's doc page."""
+ app.add_command_group("Test", [AppConfigCommand])
+ monkeypatch.setattr(sys, "argv", ["testcraft", "app-config", "-h"])
+
+ with pytest.raises(SystemExit):
+ app.run()
+
+ expected = "For more information, check out: www.testcraft.example/docs/3.14159/reference/commands/app-config\n\n"
+ _, err = capsys.readouterr()
+ assert err.endswith(expected)
diff --git a/tests/unit/test_application_fetch.py b/tests/unit/test_application_fetch.py
new file mode 100644
index 00000000..718bba3b
--- /dev/null
+++ b/tests/unit/test_application_fetch.py
@@ -0,0 +1,123 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Unit tests for the interaction between the Application and the FetchService."""
+from typing import Any
+from unittest import mock
+
+import craft_providers
+import pytest
+from craft_application import services
+from typing_extensions import override
+
+
+class FakeFetchService(services.FetchService):
+ """Fake FetchService that tracks calls"""
+
+ def __init__(self, *args, fetch_calls: list[str], **kwargs):
+ super().__init__(*args, **kwargs)
+ self.calls = fetch_calls
+
+ @override
+ def setup(self) -> None:
+ self.calls.append("setup")
+
+ @override
+ def create_session(
+ self,
+ instance: craft_providers.Executor, # (unused-method-argument)
+ ) -> dict[str, str]:
+ self.calls.append("create_session")
+ return {}
+
+ @override
+ def teardown_session(self) -> dict[str, Any]:
+ self.calls.append("teardown_session")
+ return {}
+
+ @override
+ def shutdown(self, *, force: bool = False) -> None:
+ self.calls.append(f"shutdown({force})")
+
+
+@pytest.mark.parametrize("fake_build_plan", [2], indirect=True)
+@pytest.mark.parametrize(
+ ("pack_args", "expected_calls", "expected_clean_existing"),
+ [
+ # No --enable-fetch-service: no calls to the FetchService
+ (
+ [],
+ [],
+ False,
+ ),
+ # --enable-fetch-service: full expected calls to the FetchService
+ (
+ ["--enable-fetch-service", "strict"],
+ [
+ # One call to setup
+ "setup",
+ # Two pairs of create/teardown sessions, for two builds
+ "create_session",
+ "teardown_session",
+ "create_session",
+ "teardown_session",
+ # One call to shut down (with `force`)
+ "shutdown(True)",
+ ],
+ True,
+ ),
+ ],
+)
+def test_run_managed_fetch_service(
+ app,
+ fake_project,
+ fake_build_plan,
+ monkeypatch,
+ pack_args,
+ expected_calls,
+ expected_clean_existing,
+):
+ """Test that the application calls the correct FetchService methods."""
+ mock_provider = mock.MagicMock(spec_set=services.ProviderService)
+ app.services.provider = mock_provider
+ app.set_project(fake_project)
+
+ expected_build_infos = 2
+ assert len(fake_build_plan) == expected_build_infos
+ app._build_plan = fake_build_plan
+
+ fetch_calls: list[str] = []
+ app.services.FetchClass = FakeFetchService
+ app.services.set_kwargs("fetch", fetch_calls=fetch_calls)
+
+ monkeypatch.setattr("sys.argv", ["testcraft", "pack", *pack_args])
+ app.run()
+
+ assert fetch_calls == expected_calls
+
+ # Check that the provider service was correctly instructed to clean, or not
+ # clean, the existing instance.
+
+ # Filter out the various calls to entering and exiting the instance()
+ # context manager.
+ instance_calls = [
+ call
+ for call in mock_provider.instance.mock_calls
+ if "work_dir" in call.kwargs and "clean_existing" in call.kwargs
+ ]
+
+ assert len(instance_calls) == len(fake_build_plan)
+ for call in instance_calls:
+ assert call.kwargs["clean_existing"] == expected_clean_existing
diff --git a/tests/unit/test_errors.py b/tests/unit/test_errors.py
index c7b46432..830efd5d 100644
--- a/tests/unit/test_errors.py
+++ b/tests/unit/test_errors.py
@@ -54,7 +54,7 @@
),
),
YamlError(
- "error parsing 'something.yaml'",
+ "error parsing 'something.yaml': I am a thing",
details='I am a thing\n in "bork", line 1, column 1:\n Hello there\n ^',
resolution="Ensure something.yaml contains valid YAML",
),
diff --git a/tests/unit/test_fetch.py b/tests/unit/test_fetch.py
new file mode 100644
index 00000000..92583464
--- /dev/null
+++ b/tests/unit/test_fetch.py
@@ -0,0 +1,299 @@
+# This file is part of craft-application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program. If not, see .
+"""Tests for fetch-service-related functions."""
+import re
+import subprocess
+from pathlib import Path
+from unittest import mock
+from unittest.mock import call
+
+import pytest
+import responses
+from craft_application import errors, fetch
+from craft_providers.lxd import LXDInstance
+from responses import matchers
+
+CONTROL = fetch._DEFAULT_CONFIG.control
+PROXY = fetch._DEFAULT_CONFIG.proxy
+AUTH = fetch._DEFAULT_CONFIG.auth
+
+assert_requests = responses.activate(assert_all_requests_are_fired=True)
+
+
+@assert_requests
+def test_get_service_status_success():
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ json={"uptime": 10},
+ status=200,
+ )
+ status = fetch.get_service_status()
+ assert status == {"uptime": 10}
+
+
+@assert_requests
+def test_get_service_status_failure():
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ status=404,
+ )
+ expected = "Error with fetch-service GET: 404 Client Error"
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ fetch.get_service_status()
+
+
+@pytest.mark.parametrize(
+ ("status", "json", "expected"),
+ [
+ (200, {"uptime": 10}, True),
+ (200, {"uptime": 10, "other-key": "value"}, True),
+ (200, {"other-key": "value"}, False),
+ (404, {"other-key": "value"}, False),
+ ],
+)
+@assert_requests
+def test_is_service_online(status, json, expected):
+ responses.add(
+ responses.GET,
+ f"http://localhost:{CONTROL}/status",
+ status=status,
+ json=json,
+ )
+ assert fetch.is_service_online() == expected
+
+
+def test_start_service(mocker, tmp_path):
+ mock_is_online = mocker.patch.object(fetch, "is_service_online", return_value=False)
+ mocker.patch.object(fetch, "_check_installed", return_value=True)
+ mock_base_dir = mocker.patch.object(
+ fetch, "_get_service_base_dir", return_value=tmp_path
+ )
+ mock_get_status = mocker.patch.object(
+ fetch, "get_service_status", return_value={"uptime": 10}
+ )
+
+ fake_cert, fake_key = tmp_path / "cert.crt", tmp_path / "key.pem"
+ mock_obtain_certificate = mocker.patch.object(
+ fetch, "_obtain_certificate", return_value=(fake_cert, fake_key)
+ )
+
+ mock_popen = mocker.patch.object(subprocess, "Popen")
+ mock_process = mock_popen.return_value
+ mock_process.poll.return_value = None
+
+ process = fetch.start_service()
+ assert process is mock_process
+
+ assert mock_is_online.called
+ assert mock_base_dir.called
+ assert mock_get_status.called
+ assert mock_obtain_certificate.called
+
+ popen_call = mock_popen.mock_calls[0]
+ assert popen_call == call(
+ [
+ fetch._FETCH_BINARY,
+ f"--control-port={CONTROL}",
+ f"--proxy-port={PROXY}",
+ f"--config={tmp_path / 'config'}",
+ f"--spool={tmp_path / 'spool'}",
+ f"--cert={fake_cert}",
+ f"--key={fake_key}",
+ "--permissive-mode",
+ "--idle-shutdown=300",
+ f"--log-file={tmp_path / 'craft-logs/fetch-service.log'}",
+ ],
+ env={
+ "FETCH_SERVICE_AUTH": AUTH,
+ },
+ stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT,
+ text=True,
+ )
+
+
+def test_start_service_already_up(mocker):
+ """If the fetch-service is already up then a new process is *not* created."""
+ mock_is_online = mocker.patch.object(fetch, "is_service_online", return_value=True)
+ mock_popen = mocker.patch.object(subprocess, "Popen")
+
+ assert fetch.start_service() is None
+
+ assert mock_is_online.called
+ assert not mock_popen.called
+
+
+def test_start_service_not_installed(mocker):
+ mocker.patch.object(fetch, "is_service_online", return_value=False)
+ mocker.patch.object(fetch, "_check_installed", return_value=False)
+
+ expected = re.escape("The 'fetch-service' snap is not installed.")
+ with pytest.raises(errors.FetchServiceError, match=expected):
+ fetch.start_service()
+
+
+@assert_requests
+@pytest.mark.parametrize(
+ ("strict", "expected_policy"), [(True, "strict"), (False, "permissive")]
+)
+def test_create_session(strict, expected_policy):
+ responses.add(
+ responses.POST,
+ f"http://localhost:{CONTROL}/session",
+ json={"id": "my-session-id", "token": "my-session-token"},
+ status=200,
+ match=[matchers.json_params_matcher({"policy": expected_policy})],
+ )
+
+ session_data = fetch.create_session(strict=strict)
+
+ assert session_data.session_id == "my-session-id"
+ assert session_data.token == "my-session-token"
+
+
+@assert_requests
+def test_teardown_session():
+ session_data = fetch.SessionData(id="my-session-id", token="my-session-token")
+
+ # Call to delete token
+ responses.delete(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}/token",
+ match=[matchers.json_params_matcher({"token": session_data.token})],
+ json={},
+ status=200,
+ )
+ # Call to get session report
+ responses.get(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+ # Call to delete session
+ responses.delete(
+ f"http://localhost:{CONTROL}/session/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+ # Call to delete session resources
+ responses.delete(
+ f"http://localhost:{CONTROL}/resources/{session_data.session_id}",
+ json={},
+ status=200,
+ )
+
+ fetch.teardown_session(session_data)
+
+
+def test_configure_build_instance(mocker):
+ mocker.patch.object(fetch, "_get_gateway", return_value="127.0.0.1")
+ mocker.patch.object(
+ fetch, "_obtain_certificate", return_value=("fake-cert.crt", "key.pem")
+ )
+
+ session_data = fetch.SessionData(id="my-session-id", token="my-session-token")
+ instance = mock.MagicMock(spec_set=LXDInstance)
+ assert isinstance(instance, LXDInstance)
+
+ expected_proxy = f"http://my-session-id:my-session-token@127.0.0.1:{PROXY}/"
+ expected_env = {
+ "http_proxy": expected_proxy,
+ "https_proxy": expected_proxy,
+ "REQUESTS_CA_BUNDLE": "/usr/local/share/ca-certificates/local-ca.crt",
+ "CARGO_HTTP_CAINFO": "/usr/local/share/ca-certificates/local-ca.crt",
+ "GOPROXY": "direct",
+ }
+
+ env = fetch.configure_instance(instance, session_data)
+ assert env == expected_env
+
+ default_args = {"check": True, "stdout": subprocess.PIPE, "stderr": subprocess.PIPE}
+
+ # Execution calls on the instance
+ assert instance.execute_run.mock_calls == [
+ call(
+ ["/bin/sh", "-c", "/usr/sbin/update-ca-certificates > /dev/null"],
+ **default_args,
+ ),
+ call(
+ ["mkdir", "-p", "/root/.pip"],
+ **default_args,
+ ),
+ call(
+ ["systemctl", "restart", "snapd"],
+ **default_args,
+ ),
+ call(
+ [
+ "snap",
+ "set",
+ "system",
+ f"proxy.http={expected_proxy}",
+ ],
+ **default_args,
+ ),
+ call(
+ [
+ "snap",
+ "set",
+ "system",
+ f"proxy.https={expected_proxy}",
+ ],
+ **default_args,
+ ),
+ call(
+ ["/bin/rm", "-Rf", "/var/lib/apt/lists"],
+ **default_args,
+ ),
+ call(
+ ["apt", "update"],
+ **default_args,
+ ),
+ ]
+
+ # Files pushed to the instance
+ assert instance.push_file.mock_calls == [
+ call(
+ source="fake-cert.crt",
+ destination=Path("/usr/local/share/ca-certificates/local-ca.crt"),
+ )
+ ]
+
+ assert instance.push_file_io.mock_calls == [
+ call(
+ destination=Path("/root/.pip/pip.conf"),
+ content=mocker.ANY,
+ file_mode="0644",
+ ),
+ call(
+ destination=Path("/etc/apt/apt.conf.d/99proxy"),
+ content=mocker.ANY,
+ file_mode="0644",
+ ),
+ ]
+
+
+def test_get_certificate_dir(mocker):
+ mocker.patch.object(
+ fetch,
+ "_get_service_base_dir",
+ return_value=Path("/home/user/snap/fetch-service/common"),
+ )
+ cert_dir = fetch._get_certificate_dir()
+
+ expected = Path("/home/user/snap/fetch-service/common/craft/fetch-certificate")
+ assert cert_dir == expected
diff --git a/tests/unit/test_secrets.py b/tests/unit/test_secrets.py
index 62f0d88a..22db8525 100644
--- a/tests/unit/test_secrets.py
+++ b/tests/unit/test_secrets.py
@@ -100,7 +100,7 @@ def test_secrets_cache(mocker, monkeypatch):
spied_run.assert_called_once_with("echo ${SECRET_1}")
-_SECRET = "$(HOST_SECRET:echo ${GIT_VERSION})" # noqa: S105 (this is not a password)
+_SECRET = "$(HOST_SECRET:echo ${GIT_VERSION})" # (this is not a password)
@pytest.mark.parametrize(
diff --git a/tests/unit/util/test_system.py b/tests/unit/util/test_system.py
new file mode 100644
index 00000000..343394d3
--- /dev/null
+++ b/tests/unit/util/test_system.py
@@ -0,0 +1,174 @@
+# This file is part of craft_application.
+#
+# Copyright 2024 Canonical Ltd.
+#
+# This program is free software: you can redistribute it and/or modify it
+# under the terms of the GNU Lesser General Public License version 3, as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranties of MERCHANTABILITY,
+# SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
+# See the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License along
+# with this program. If not, see .
+"""Unit tests for system util module."""
+
+import pytest
+from craft_application import util
+from craft_application.errors import InvalidParameterError
+
+
+@pytest.mark.parametrize(
+ ("env_dict", "cpu_count", "expected"),
+ [
+ (
+ {},
+ None,
+ 1,
+ ),
+ (
+ {},
+ 100,
+ 100,
+ ),
+ (
+ {"TESTCRAFT_PARALLEL_BUILD_COUNT": "100"},
+ 1,
+ 100,
+ ),
+ (
+ {"CRAFT_PARALLEL_BUILD_COUNT": "200"},
+ 1,
+ 200,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ },
+ 50,
+ 50,
+ ),
+ (
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ },
+ 80,
+ 80,
+ ),
+ (
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_PARALLEL_BUILD_COUNT": "200",
+ },
+ 1,
+ 100,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
+ },
+ 150,
+ 100,
+ ),
+ (
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "200",
+ },
+ None,
+ 1,
+ ),
+ (
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "100",
+ "CRAFT_PARALLEL_BUILD_COUNT": "200",
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "300",
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "400",
+ },
+ 150,
+ 100,
+ ),
+ ],
+)
+def test_get_parallel_build_count(monkeypatch, mocker, env_dict, cpu_count, expected):
+ mocker.patch("os.cpu_count", return_value=cpu_count)
+ for env_dict_key, env_dict_value in env_dict.items():
+ monkeypatch.setenv(env_dict_key, env_dict_value)
+
+ assert util.get_parallel_build_count("testcraft") == expected
+
+
+@pytest.mark.parametrize(
+ ("env_dict", "cpu_count"),
+ [
+ pytest.param(
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "abc",
+ },
+ 1,
+ id="abc",
+ ),
+ pytest.param(
+ {
+ "CRAFT_PARALLEL_BUILD_COUNT": "-",
+ },
+ 1,
+ id="-",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "*",
+ },
+ 1,
+ id="*",
+ ),
+ pytest.param(
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "$COUNT",
+ },
+ 1,
+ id="COUNT",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_PARALLEL_BUILD_COUNT": "0",
+ },
+ 1,
+ id="0",
+ ),
+ pytest.param(
+ {
+ "CRAFT_PARALLEL_BUILD_COUNT": "-1",
+ },
+ 1,
+ id="-1",
+ ),
+ pytest.param(
+ {
+ "TESTCRAFT_MAX_PARALLEL_BUILD_COUNT": "5.6",
+ },
+ 1,
+ id="5.6",
+ ),
+ pytest.param(
+ {
+ "CRAFT_MAX_PARALLEL_BUILD_COUNT": "inf",
+ },
+ 1,
+ id="inf",
+ ),
+ ],
+)
+def test_get_parallel_build_count_error(monkeypatch, mocker, env_dict, cpu_count):
+
+ mocker.patch("os.cpu_count", return_value=cpu_count)
+ for env_dict_key, env_dict_value in env_dict.items():
+ monkeypatch.setenv(env_dict_key, env_dict_value)
+
+ with pytest.raises(
+ InvalidParameterError, match=r"^Value '.*' is invalid for parameter '.*'.$"
+ ):
+ util.get_parallel_build_count("testcraft")
diff --git a/tests/unit/util/test_yaml.py b/tests/unit/util/test_yaml.py
index af4ea812..a717d8ab 100644
--- a/tests/unit/util/test_yaml.py
+++ b/tests/unit/util/test_yaml.py
@@ -41,7 +41,7 @@ def test_safe_yaml_loader_valid(file):
def test_safe_yaml_loader_invalid(file):
with file.open() as f:
with pytest.raises(
- errors.YamlError, match=f"error parsing {file.name!r}"
+ errors.YamlError, match=f"error parsing {file.name!r}: "
) as exc_info:
yaml.safe_yaml_load(f)
@@ -50,6 +50,29 @@ def test_safe_yaml_loader_invalid(file):
pytest_check.is_in("found", exc_info.value.details)
+@pytest.mark.parametrize(
+ ("yaml_text", "error_msg"),
+ [
+ (
+ "thing: \nthing:\n",
+ "error parsing 'testcraft.yaml': found duplicate key 'thing'",
+ ),
+ (
+ "{{unhashable}}:",
+ "error parsing 'testcraft.yaml': found unhashable key",
+ ),
+ ],
+)
+def test_safe_yaml_loader_specific_error(yaml_text: str, error_msg: str):
+ f = io.StringIO(yaml_text)
+ f.name = "testcraft.yaml"
+
+ with pytest.raises(errors.YamlError) as exc_info:
+ yaml.safe_yaml_load(f)
+
+ assert exc_info.value.args[0] == error_msg
+
+
@pytest.mark.parametrize(
("data", "kwargs", "expected"),
[
diff --git a/tox.ini b/tox.ini
index 1299849e..e4a28313 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,7 +13,7 @@ requires =
# renovate: datasource=pypi
tox-ignore-env-name-mismatch>=0.2.0.post2
# renovate: datasource=pypi
- tox-gh==1.3.1
+ tox-gh==1.4.4
# Allow tox to access the user's $TMPDIR environment variable if set.
# This workaround is required to avoid circular dependencies for TMPDIR,
# since tox will otherwise attempt to use the environment's TMPDIR variable.
@@ -119,7 +119,7 @@ commands = sphinx-build {posargs:-b html} -W {tox_root}/docs {tox_root}/docs/_bu
[testenv:autobuild-docs]
description = Build documentation with an autoupdating server
base = docs
-commands = sphinx-autobuild {posargs:-b html --open-browser --port 8080} -W --watch {tox_root}/craft_application {tox_root}/docs {tox_root}/docs/_build/html
+commands = sphinx-autobuild {posargs:-b html --open-browser --port 8080} -W --watch {tox_root}/craft_application {tox_root}/docs {tox_root}/docs/_build
[testenv:lint-docs]
description = Lint the documentation with sphinx-lint