diff --git a/src/macaron/__main__.py b/src/macaron/__main__.py index d23e76410..2679b9f28 100644 --- a/src/macaron/__main__.py +++ b/src/macaron/__main__.py @@ -22,6 +22,7 @@ from macaron.policy_engine.policy_engine import run_policy_engine, show_prelude from macaron.slsa_analyzer.analyzer import Analyzer from macaron.slsa_analyzer.git_service import GIT_SERVICES +from macaron.slsa_analyzer.package_registry import PACKAGE_REGISTRIES logger: logging.Logger = logging.getLogger(__name__) @@ -142,6 +143,8 @@ def perform_action(action_args: argparse.Namespace) -> None: try: for git_service in GIT_SERVICES: git_service.load_defaults() + for package_registry in PACKAGE_REGISTRIES: + package_registry.load_defaults() except ConfigurationError as error: logger.error(error) sys.exit(os.EX_USAGE) diff --git a/src/macaron/config/defaults.ini b/src/macaron/config/defaults.ini index 5dbaf1cb7..21db2f8d4 100644 --- a/src/macaron/config/defaults.ini +++ b/src/macaron/config/defaults.ini @@ -337,3 +337,20 @@ provenance_extensions = max_download_size = 70000000 # This is the timeout (in seconds) to run the SLSA verifier. timeout = 120 + +# Witness provenance. See: https://github.com/testifysec/witness. +[provenance.witness] +# The allowed values of the `predicateType` field in the provenance (data type: list). +# For more details, see: +# https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement +predicate_types = + https://witness.testifysec.com/attestation-collection/v0.1 +artifact_extensions = + jar + +# Package registries. +# [package_registry.jfrog.maven] +# In this example, the Maven repo can be accessed at `https://internal.registry.org/repo-name`. +# hostname = internal.registry.org +# repo = repo-name +# download_timeout = 120 diff --git a/src/macaron/slsa_analyzer/analyze_context.py b/src/macaron/slsa_analyzer/analyze_context.py index c7d120a24..7148f9278 100644 --- a/src/macaron/slsa_analyzer/analyze_context.py +++ b/src/macaron/slsa_analyzer/analyze_context.py @@ -19,6 +19,7 @@ from macaron.slsa_analyzer.slsa_req import ReqName, SLSAReq, get_requirements_dict from macaron.slsa_analyzer.specs.build_spec import BuildSpec from macaron.slsa_analyzer.specs.ci_spec import CIInfo +from macaron.slsa_analyzer.specs.package_registry_spec import PackageRegistryInfo logger: logging.Logger = logging.getLogger(__name__) @@ -38,6 +39,8 @@ class ChecksOutputs(TypedDict): # class uses inlined functions, which is not supported by Protocol. expectation: Expectation | None """The expectation to verify the provenance for this repository.""" + package_registries: list[PackageRegistryInfo] + """The package registries for this repository.""" class AnalyzeContext: @@ -82,6 +85,7 @@ def __init__( git_service=NoneGitService(), build_spec=BuildSpec(tools=[]), ci_services=[], + package_registries=[], is_inferred_prov=True, expectation=None, ) @@ -93,12 +97,19 @@ def provenances(self) -> dict: Returns ------- dict + A dictionary in which each key is a CI service's name and each value is + the corresponding provenance payload. """ try: ci_services = self.dynamic_data["ci_services"] result = {} for ci_info in ci_services: - result[ci_info["service"].name] = ci_info["provenances"] + result[ci_info["service"].name] = [payload.statement for payload in ci_info["provenances"]] + package_registry_entries = self.dynamic_data["package_registries"] + for package_registry_entry in package_registry_entries: + result[package_registry_entry.package_registry.name] = [ + provenance.payload.statement for provenance in package_registry_entry.provenances + ] return result except KeyError: return {} diff --git a/src/macaron/slsa_analyzer/analyzer.py b/src/macaron/slsa_analyzer/analyzer.py index 25d239fa5..5a60215b6 100644 --- a/src/macaron/slsa_analyzer/analyzer.py +++ b/src/macaron/slsa_analyzer/analyzer.py @@ -45,10 +45,13 @@ from macaron.slsa_analyzer.database_store import store_analyze_context_to_db from macaron.slsa_analyzer.git_service import GIT_SERVICES, BaseGitService from macaron.slsa_analyzer.git_service.base_git_service import NoneGitService +from macaron.slsa_analyzer.package_registry import PACKAGE_REGISTRIES from macaron.slsa_analyzer.provenance.expectations.expectation_registry import ExpectationRegistry +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.specs.ci_spec import CIInfo from macaron.slsa_analyzer.specs.inferred_provenance import Provenance +from macaron.slsa_analyzer.specs.package_registry_spec import PackageRegistryInfo logger: logging.Logger = logging.getLogger(__name__) @@ -808,7 +811,10 @@ def perform_checks(self, analyze_ctx: AnalyzeContext) -> dict[str, CheckResult]: ci_service.load_defaults() ci_service.set_api_client() - if ci_service.is_detected(analyze_ctx.component.repository.fs_path): + if ci_service.is_detected( + repo_path=analyze_ctx.component.repository.fs_path, + git_service=analyze_ctx.dynamic_data["git_service"], + ): logger.info("The repo uses %s CI service.", ci_service.name) # Parse configuration files and generate IRs. @@ -825,7 +831,20 @@ def perform_checks(self, analyze_ctx: AnalyzeContext) -> dict[str, CheckResult]: callgraph=callgraph, provenance_assets=[], latest_release={}, - provenances=[Provenance().payload], + provenances=[InTotoV01Payload(statement=Provenance().payload)], + ) + ) + + # Determine the package registries. + # We match the repo against package registries through build tools. + build_tools = analyze_ctx.dynamic_data["build_spec"]["tools"] + for package_registry in PACKAGE_REGISTRIES: + for build_tool in build_tools: + if package_registry.is_detected(build_tool): + analyze_ctx.dynamic_data["package_registries"].append( + PackageRegistryInfo( + build_tool=build_tool, + package_registry=package_registry, ) ) diff --git a/src/macaron/slsa_analyzer/asset/__init__.py b/src/macaron/slsa_analyzer/asset/__init__.py new file mode 100644 index 000000000..8f1f95395 --- /dev/null +++ b/src/macaron/slsa_analyzer/asset/__init__.py @@ -0,0 +1,40 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module defines classes and interfaces related to assets. + +Assets are files published from some build. +""" + +from typing import Protocol + + +class AssetLocator(Protocol): + """Interface of an asset locator.""" + + @property + def name(self) -> str: + """Get the name (file name) of the asset.""" + + @property + def url(self) -> str: + """Get the url to the asset.""" + + @property + def size_in_bytes(self) -> int: + """Get the size of the asset in bytes.""" + + def download(self, dest: str) -> bool: + """Download the asset. + + Parameters + ---------- + dest : str + The local destination where the asset is downloaded to. + Note that this must include the file name. + + Returns + ------- + bool + ``True`` if the asset is downloaded successfully; ``False`` if not. + """ diff --git a/src/macaron/slsa_analyzer/build_tool/gradle.py b/src/macaron/slsa_analyzer/build_tool/gradle.py index d602ca0d4..b7dbec571 100644 --- a/src/macaron/slsa_analyzer/build_tool/gradle.py +++ b/src/macaron/slsa_analyzer/build_tool/gradle.py @@ -8,7 +8,9 @@ import logging import os +import subprocess # nosec B404 +import macaron from macaron.config.defaults import defaults from macaron.config.global_config import global_config from macaron.dependency_analyzer import DependencyAnalyzer, DependencyAnalyzerError, DependencyTools @@ -135,3 +137,107 @@ def get_dep_analyzer(self, repo_path: str) -> CycloneDxGradle: ) raise DependencyAnalyzerError(f"Unsupported SBOM generator for Gradle: {tool_name}.") + + def get_gradle_exec(self, repo_path: str) -> str: + """Get the Gradle executable for the repo. + + Parameters + ---------- + repo_path: str + The absolute path to a repository containing Gradle projects. + + Returns + ------- + str + The absolute path to the Gradle executable. + """ + # We try to use the gradlew that comes with the repository first. + repo_gradlew = os.path.join(repo_path, "gradlew") + if os.path.isfile(repo_gradlew) and os.access(repo_gradlew, os.X_OK): + return repo_gradlew + + # We use Macaron's built-in gradlew as a fallback option. + return os.path.join(os.path.join(macaron.MACARON_PATH, "resources"), "gradlew") + + def get_group_ids(self, repo_path: str) -> set[str]: + """Get the group ids of all Gradle projects in a repository. + + A Gradle project is a directory containing a ``build.gradle`` file. + According to the Gradle's documentation, there is a one-to-one mapping between + a "project" and a ``build.gradle`` file. + See: https://docs.gradle.org/current/javadoc/org/gradle/api/Project.html. + + Note: This method makes the assumption that projects nested in a parent project + directory has the same group id with the parent. This behavior is consistent with + the behavior of the ``get_build_dirs`` method. + + Parameters + ---------- + repo_path: str + The absolute path to a repository containing Gradle projects. + + Returns + ------- + set[str] + The set of group ids of all Gradle projects in the repository. + """ + gradle_exec = self.get_gradle_exec(repo_path) + group_ids = set() + + for gradle_project_relpath in self.get_build_dirs(repo_path): + gradle_project_path = os.path.join(repo_path, gradle_project_relpath) + group_id = self.get_group_id( + gradle_exec=gradle_exec, + project_path=gradle_project_path, + ) + if group_id: + group_ids.add(group_id) + + return group_ids + + def get_group_id(self, gradle_exec: str, project_path: str) -> str | None: + """Get the group id of a Gradle project. + + A Gradle project is a directory containing a ``build.gradle`` file. + According to the Gradle's documentation, there is a one-to-one mapping between + a "project" and a ``build.gradle`` file. + See: https://docs.gradle.org/current/javadoc/org/gradle/api/Project.html. + + Parameters + ---------- + gradle_exec: str + The absolute path to the Gradle executable. + + project_path : str + The absolute path to the Gradle project. + + Returns + ------- + str | None + The group id of the project, if exists. + """ + try: + result = subprocess.run( # nosec B603 + [gradle_exec, "properties"], + capture_output=True, + cwd=project_path, + check=False, + ) + except (subprocess.CalledProcessError, OSError) as error: + logger.debug("Could not capture the group id of the Gradle project at %s", project_path) + logger.debug("Error: %s", error) + return None + + if result.returncode == 0: + lines = result.stdout.decode().split("\n") + for line in lines: + if line.startswith("group: "): + group = line.replace("group: ", "") + # The value of group here can be an empty string. + if group: + return group + break + + logger.debug("Could not capture the group id of the repo at %s", project_path) + logger.debug("Stderr:\n%s", result.stderr) + return None diff --git a/src/macaron/slsa_analyzer/checks/build_as_code_check.py b/src/macaron/slsa_analyzer/checks/build_as_code_check.py index 4608fd83f..0500ccb79 100644 --- a/src/macaron/slsa_analyzer/checks/build_as_code_check.py +++ b/src/macaron/slsa_analyzer/checks/build_as_code_check.py @@ -5,6 +5,7 @@ import logging import os +from typing import Any from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column @@ -22,6 +23,7 @@ from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName from macaron.slsa_analyzer.specs.ci_spec import CIInfo @@ -202,8 +204,12 @@ def _check_build_tool( else "However, could not find a passing workflow run.", ] check_result["justification"].extend(justification) - if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: - predicate = ci_info["provenances"][0]["predicate"] + if ( + ctx.dynamic_data["is_inferred_prov"] + and ci_info["provenances"] + and isinstance(ci_info["provenances"][0], InTotoV01Payload) + ): + predicate: Any = ci_info["provenances"][0].statement["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" predicate["builder"]["id"] = deploy_action_source_link predicate["invocation"]["configSource"]["uri"] = ( @@ -261,8 +267,12 @@ def _check_build_tool( else "However, could not find a passing workflow run.", ] check_result["justification"].extend(justification_cmd) - if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: - predicate = ci_info["provenances"][0]["predicate"] + if ( + ctx.dynamic_data["is_inferred_prov"] + and ci_info["provenances"] + and isinstance(ci_info["provenances"][0], InTotoV01Payload) + ): + predicate = ci_info["provenances"][0].statement["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" predicate["builder"]["id"] = bash_source_link predicate["invocation"]["configSource"]["uri"] = ( @@ -300,8 +310,13 @@ def _check_build_tool( f"The target repository uses build tool {build_tool.name}" + f" in {ci_service.name} using {deploy_kw} to deploy." ) - if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: - predicate = ci_info["provenances"][0]["predicate"] + + if ( + ctx.dynamic_data["is_inferred_prov"] + and ci_info["provenances"] + and isinstance(ci_info["provenances"][0], InTotoV01Payload) + ): + predicate = ci_info["provenances"][0].statement["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" predicate["builder"]["id"] = config_name predicate["invocation"]["configSource"]["uri"] = ( diff --git a/src/macaron/slsa_analyzer/checks/build_service_check.py b/src/macaron/slsa_analyzer/checks/build_service_check.py index aacb7d201..a4165c978 100644 --- a/src/macaron/slsa_analyzer/checks/build_service_check.py +++ b/src/macaron/slsa_analyzer/checks/build_service_check.py @@ -5,6 +5,7 @@ import logging import os +from typing import Any from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column @@ -20,6 +21,7 @@ from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName from macaron.slsa_analyzer.specs.ci_spec import CIInfo @@ -183,8 +185,12 @@ def _check_build_tool( ) ] - if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: - predicate = ci_info["provenances"][0]["predicate"] + if ( + ctx.dynamic_data["is_inferred_prov"] + and ci_info["provenances"] + and isinstance(ci_info["provenances"][0], InTotoV01Payload) + ): + predicate: Any = ci_info["provenances"][0].statement["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" predicate["builder"]["id"] = bash_source_link predicate["invocation"]["configSource"]["uri"] = ( @@ -219,8 +225,12 @@ def _check_build_tool( ) ] - if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: - predicate = ci_info["provenances"][0]["predicate"] + if ( + ctx.dynamic_data["is_inferred_prov"] + and ci_info["provenances"] + and isinstance(ci_info["provenances"][0], InTotoV01Payload) + ): + predicate = ci_info["provenances"][0].statement["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" predicate["builder"]["id"] = config_name predicate["invocation"]["configSource"]["uri"] = ( diff --git a/src/macaron/slsa_analyzer/checks/provenance_available_check.py b/src/macaron/slsa_analyzer/checks/provenance_available_check.py index f0e01d7a2..050f7f589 100644 --- a/src/macaron/slsa_analyzer/checks/provenance_available_check.py +++ b/src/macaron/slsa_analyzer/checks/provenance_available_check.py @@ -4,7 +4,9 @@ """This module contains the implementation of the Provenance Available check.""" import logging -import re +import os +import tempfile +from collections.abc import Sequence from sqlalchemy import ForeignKey from sqlalchemy.orm import Mapped, mapped_column @@ -12,35 +14,34 @@ from macaron.config.defaults import defaults from macaron.database.table_definitions import CheckFacts +from macaron.errors import MacaronError from macaron.slsa_analyzer.analyze_context import AnalyzeContext +from macaron.slsa_analyzer.asset import AssetLocator +from macaron.slsa_analyzer.build_tool.gradle import Gradle from macaron.slsa_analyzer.checks.base_check import BaseCheck from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.base_ci_service import NoneCIService +from macaron.slsa_analyzer.ci_service.github_actions import GitHubActions +from macaron.slsa_analyzer.package_registry import JFrogMavenRegistry +from macaron.slsa_analyzer.package_registry.jfrog_maven_registry import JFrogMavenAsset +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload +from macaron.slsa_analyzer.provenance.loader import LoadIntotoAttestationError, load_provenance_payload +from macaron.slsa_analyzer.provenance.witness import ( + WitnessProvenanceData, + extract_repo_url, + is_witness_provenance_payload, + load_witness_verifier_config, +) from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName +from macaron.slsa_analyzer.specs.ci_spec import CIInfo +from macaron.slsa_analyzer.specs.package_registry_spec import PackageRegistryInfo logger: logging.Logger = logging.getLogger(__name__) -def is_in_toto_file(file_name: str) -> bool: - """Return true if the file name matches the in-toto file format. - - The format for those files is ``.<6_bytes_key_id>.link``. - - Parameters - ---------- - file_name : str - The name of the file to check. - - Returns - ------- - bool - """ - in_toto_format = re.compile(r"\w+\.[0-9a-f]{6}\.link$") - if in_toto_format.match(file_name): - return True - - return False +class ProvenanceAvailableException(MacaronError): + """When there is an error while checking if a provenance is available.""" class ProvenanceAvailableFacts(CheckFacts): @@ -78,6 +79,354 @@ def __init__(self) -> None: ] super().__init__(check_id=check_id, description=description, depends_on=depends_on, eval_reqs=eval_reqs) + def find_provenance_assets_on_package_registries( + self, + repo_fs_path: str, + repo_remote_path: str, + package_registry_info_entries: list[PackageRegistryInfo], + provenance_extensions: list[str], + ) -> Sequence[AssetLocator]: + """Find provenance assets on package registries. + + Note that we stop going through package registries once we encounter a package + registry that does host provenance assets. + + Parameters + ---------- + repo_fs_path : str + The path to the repo on the local file system. + repo_remote_path : str + The URL to the remote repository. + package_registry_info_entries : list[PackageRegistryInfo] + A list of package registry info entries. + provenance_extensions : list[str] + A list of provenance extensions. Assets with these extensions are assumed + to be provenances. + + Returns + ------- + Sequence[AssetLocator] + A sequence of provenance assets found on one of the package registries. + This sequence is empty if there is no provenance assets found. + + Raises + ------ + ProvenanceAvailableException + If there is an error finding provenance assets that should result in failing + the check altogether. + """ + for package_registry_info_entry in package_registry_info_entries: + match package_registry_info_entry: + case PackageRegistryInfo( + build_tool=Gradle() as gradle, + package_registry=JFrogMavenRegistry() as jfrog_registry, + ) as info_entry: + # Triples of group id, artifact id, version. + gavs: list[tuple[str, str, str]] = [] + + group_ids = gradle.get_group_ids(repo_fs_path) + for group_id in group_ids: + artifact_ids = jfrog_registry.fetch_artifact_ids(group_id) + + for artifact_id in artifact_ids: + latest_version = jfrog_registry.fetch_latest_version( + group_id, + artifact_id, + ) + if not latest_version: + continue + logger.info( + "Found the latest version %s for Maven package %s:%s", + latest_version, + group_id, + artifact_id, + ) + gavs.append((group_id, artifact_id, latest_version)) + + provenance_assets = [] + for group_id, artifact_id, version in gavs: + provenance_assets.extend( + jfrog_registry.fetch_assets( + group_id=group_id, + artifact_id=artifact_id, + version=version, + extensions=set(provenance_extensions), + ) + ) + + if not provenance_assets: + continue + + # We check the size of the provenance against a max valid size. + # This is a prevention against malicious denial-of-service attacks when an + # adversary provides a super large malicious file. + + # TODO: refactor the size checking in this check and the `provenance_l3_check` + # so that we have consistent behavior when checking provenance size. + # The schema of the ini config also needs changing. + max_valid_provenance_size = defaults.getint( + "slsa.verifier", + "max_download_size", + fallback=1000000, + ) + + for provenance_asset in provenance_assets: + if provenance_asset.size_in_bytes > max_valid_provenance_size: + msg = ( + f"The provenance asset {provenance_asset.name} unexpectedly exceeds the " + f"max valid file size of {max_valid_provenance_size} (bytes). " + "The check will not proceed due to potential security risks." + ) + logger.error(msg) + raise ProvenanceAvailableException(msg) + + provenances = self.obtain_witness_provenances( + provenance_assets=provenance_assets, + repo_remote_path=repo_remote_path, + ) + + witness_provenance_assets = [] + + logger.info("Found the following provenance assets:") + for provenance in provenances: + logger.info("* %s", provenance.asset.url) + witness_provenance_assets.append(provenance.asset) + + # Persist the provenance assets in the package registry info entry. + info_entry.provenances.extend(provenances) + return provenance_assets + + return [] + + def obtain_witness_provenances( + self, + provenance_assets: Sequence[AssetLocator], + repo_remote_path: str, + ) -> list[WitnessProvenanceData]: + """Obtain the witness provenances produced from a repository. + + Parameters + ---------- + provenance_assets : Sequence[Asset] + A list of provenance assets, some of which can be witness provenances. + repo_remote_path : str + The remote path of the repo being analyzed. + + Returns + ------- + list[WitnessProvenance] + A list of witness provenances that are produced by the repo being analyzed. + """ + provenances = [] + witness_verifier_config = load_witness_verifier_config() + + with tempfile.TemporaryDirectory() as temp_dir: + for provenance_asset in provenance_assets: + provenance_filepath = os.path.join(temp_dir, provenance_asset.name) + if not provenance_asset.download(provenance_filepath): + logger.debug( + "Could not download the provenance %s. Skip verifying...", + provenance_asset.name, + ) + continue + + try: + provenance_payload = load_provenance_payload(provenance_filepath) + except LoadIntotoAttestationError as error: + logger.error("Error while loading provenance: %s", error) + continue + + if not is_witness_provenance_payload( + provenance_payload, + witness_verifier_config.predicate_types, + ): + continue + + repo_url = extract_repo_url(provenance_payload) + if repo_url != repo_remote_path: + continue + + provenances.append( + WitnessProvenanceData( + asset=provenance_asset, + payload=provenance_payload, + ) + ) + + return provenances + + def download_provenances_from_jfrog_maven_package_registry( + self, + download_dir: str, + provenance_assets: list[JFrogMavenAsset], + jfrog_maven_registry: JFrogMavenRegistry, + ) -> dict[str, InTotoPayload]: + """Download provenances from a JFrog Maven package registry. + + Parameters + ---------- + download_dir : str + The directory where provenance assets are downloaded to. + provenance_assets : list[JFrogMavenAsset] + The list of provenance assets. + jfrog_maven_registry : JFrogMavenRegistry + The JFrog Maven registry instance. + + Returns + ------- + dict[str, InTotoStatement] + The downloaded provenance payloads. Each key is the URL where the provenance + asset is hosted and each value is the corresponding provenance payload. + """ + # Note: In certain cases, Macaron can find the same provenance file in + # multiple different places on a package registry. + # + # We may consider de-duplicating this file, so that we do not run the same + # steps on the same file multiple times. + + # Download the provenance assets and load them into dictionaries. + provenances = {} + + for prov_asset in provenance_assets: + provenance_filepath = os.path.join(download_dir, prov_asset.name) + if not jfrog_maven_registry.download_asset(prov_asset.url, provenance_filepath): + logger.debug( + "Could not download the provenance %s. Skip verifying...", + prov_asset.name, + ) + continue + + try: + provenances[prov_asset.url] = load_provenance_payload( + provenance_filepath, + ) + except LoadIntotoAttestationError as error: + logger.error("Error while loading provenance: %s", error) + continue + + return provenances + + def find_provenance_assets_on_ci_services( + self, + repo_full_name: str, + ci_info_entries: list[CIInfo], + provenance_extensions: list[str], + ) -> Sequence[AssetLocator]: + """Find provenance assets on CI services. + + Note that we stop going through the CI services once we encounter a CI service + that does host provenance assets. + + This method also loads the provenance payloads into the ``CIInfo`` object where + the provenance assets are found. + + Parameters + ---------- + repo_full_name: str + The full name of the repo, in the format of ``owner/repo_name``. + package_registry_info_entries : list[PackageRegistryInfo] + A list of package registry info entries. + provenance_extensions : list[str] + A list of provenance extensions. Assets with these extensions are assumed + to be provenances. + + Returns + ------- + Sequence[Asset] + A sequence of assets found on the given CI services. + """ + for ci_info in ci_info_entries: + ci_service = ci_info["service"] + + if isinstance(ci_service, NoneCIService): + continue + + if isinstance(ci_service, GitHubActions): + # Only get the latest release. + latest_release_payload = ci_service.api_client.get_latest_release(repo_full_name) + if not latest_release_payload: + logger.debug("Could not fetch the latest release payload from %s.", ci_service.name) + continue + + # Store the release data for other checks. + ci_info["latest_release"] = latest_release_payload + + # Get the provenance assets. + for prov_ext in provenance_extensions: + provenance_assets = ci_service.api_client.fetch_assets( + latest_release_payload, + ext=prov_ext, + ) + if not provenance_assets: + continue + + logger.info("Found the following provenance assets:") + for provenance_asset in provenance_assets: + logger.info("* %s", provenance_asset.url) + + # Store the provenance assets for other checks. + ci_info["provenance_assets"].extend(provenance_assets) + + # Download the provenance assets and load the provenance payloads. + self.download_provenances_from_github_actions_ci_service( + ci_info, + ) + + return ci_info["provenance_assets"] + + return [] + + def download_provenances_from_github_actions_ci_service(self, ci_info: CIInfo) -> None: + """Download provenances from GitHub Actions. + + Parameters + ---------- + ci_info: CIInfo, + A ``CIInfo`` instance that holds a GitHub Actions git service object. + """ + ci_service = ci_info["service"] + prov_assets = ci_info["provenance_assets"] + + with tempfile.TemporaryDirectory() as temp_path: + downloaded_provs = [] + for prov_asset in prov_assets: + # Check the size before downloading. + if prov_asset.size_in_bytes > defaults.getint( + "slsa.verifier", + "max_download_size", + fallback=1000000, + ): + logger.info( + "Skip verifying the provenance %s: asset size too large.", + prov_asset.name, + ) + continue + + provenance_filepath = os.path.join(temp_path, prov_asset.name) + + if not ci_service.api_client.download_asset( + prov_asset.url, + provenance_filepath, + ): + logger.debug( + "Could not download the provenance %s. Skip verifying...", + prov_asset.name, + ) + continue + + # Read the provenance. + try: + payload = load_provenance_payload(provenance_filepath) + except LoadIntotoAttestationError as error: + logger.error("Error logging provenance: %s", error) + continue + + # Add the provenance file. + downloaded_provs.append(payload) + + # Persist the provenance payloads into the CIInfo object. + ci_info["provenances"] = downloaded_provs + def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResultType: """Implement the check in this method. @@ -93,42 +442,46 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu CheckResultType The result type of the check (e.g. PASSED). """ - ci_services = ctx.dynamic_data["ci_services"] - for ci_info in ci_services: - ci_service = ci_info["service"] - # Checking if a CI service is discovered for this repo. - if isinstance(ci_service, NoneCIService): - continue - # Only get the latest release. - release = ci_service.api_client.get_latest_release(ctx.component.repository.full_name) - if release: - # Store the release data for other checks. - ci_info["latest_release"] = release + provenance_extensions = defaults.get_list( + "slsa.verifier", + "provenance_extensions", + fallback=["intoto.jsonl"], + ) - # Get the provenance assets. - for prov_ext in defaults.get_list("slsa.verifier", "provenance_extensions"): - assets = ci_service.api_client.get_assets(release, ext=prov_ext) - if not assets: - continue + # We look for the provenances in the package registries first, then CI services. + # (Note the short-circuit evaluation with OR.) + try: + provenance_assets = self.find_provenance_assets_on_package_registries( + repo_fs_path=ctx.component.repository.fs_path, + repo_remote_path=ctx.component.repository.remote_path, + package_registry_info_entries=ctx.dynamic_data["package_registries"], + provenance_extensions=provenance_extensions, + ) or self.find_provenance_assets_on_ci_services( + repo_full_name=ctx.component.repository.full_name, + ci_info_entries=ctx.dynamic_data["ci_services"], + provenance_extensions=provenance_extensions, + ) + except ProvenanceAvailableException as error: + check_result["justification"] = [str(error)] + return CheckResultType.FAILED + + if provenance_assets: + ctx.dynamic_data["is_inferred_prov"] = False + + check_result["justification"].append("Found provenance in release assets:") + check_result["justification"].extend( + [asset.name for asset in provenance_assets], + ) + # We only write the result to the database when the check is PASSED. + check_result["result_tables"] = [ + ProvenanceAvailableFacts( + asset_name=asset.name, + asset_url=asset.url, + ) + for asset in provenance_assets + ] + return CheckResultType.PASSED - # Store the provenance assets for other checks. - ci_info["provenance_assets"].extend(assets) - - check_result["justification"].append("Found provenance in release assets:") - check_result["justification"].extend([asset["name"] for asset in assets]) - asset_results = [ - { - "asset_name": asset["name"], - "asset_url": asset["url"], - } - for asset in assets - ] - check_result["result_tables"] = [ProvenanceAvailableFacts(**res) for res in asset_results] - - return CheckResultType.PASSED - - else: - logger.info("Could not find any release for %s in the repository.", ci_service.name) check_result["justification"].append("Could not find any SLSA provenances.") return CheckResultType.FAILED diff --git a/src/macaron/slsa_analyzer/checks/provenance_l3_check.py b/src/macaron/slsa_analyzer/checks/provenance_l3_check.py index 04dec4841..def379f0d 100644 --- a/src/macaron/slsa_analyzer/checks/provenance_l3_check.py +++ b/src/macaron/slsa_analyzer/checks/provenance_l3_check.py @@ -24,11 +24,14 @@ from macaron.config.global_config import global_config from macaron.database.table_definitions import CheckFacts, HashDigest, Provenance, ReleaseArtifact from macaron.slsa_analyzer.analyze_context import AnalyzeContext +from macaron.slsa_analyzer.asset import AssetLocator from macaron.slsa_analyzer.checks.base_check import BaseCheck from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.base_ci_service import BaseCIService, NoneCIService from macaron.slsa_analyzer.git_url import get_repo_dir_name -from macaron.slsa_analyzer.provenance.loader import ProvPayloadLoader, SLSAProvenanceError +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload, v01 +from macaron.slsa_analyzer.provenance.intoto.errors import InTotoAttestationError, UnsupportedInTotoVersionError +from macaron.slsa_analyzer.provenance.loader import load_provenance_payload from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName @@ -108,12 +111,12 @@ def __init__(self) -> None: result_on_skip=CheckResultType.FAILED, ) - def _size_large(self, asset_size: str) -> bool: + def _size_large(self, asset_size: int) -> bool: """Check the size of the asset.""" - return int(asset_size) > defaults.getint("slsa.verifier", "max_download_size", fallback=1000000) + return asset_size > defaults.getint("slsa.verifier", "max_download_size", fallback=1000000) def _verify_slsa( - self, macaron_path: str, temp_path: str, prov_asset: dict, asset_name: str, repository_url: str + self, macaron_path: str, temp_path: str, prov_asset: AssetLocator, asset_name: str, repository_url: str ) -> _VerifyArtifactResult: """Run SLSA verifier to verify the artifact.""" source_path = get_repo_dir_name(repository_url, sanitize=False) @@ -128,7 +131,7 @@ def _verify_slsa( "verify-artifact", os.path.join(temp_path, asset_name), "--provenance-path", - os.path.join(temp_path, prov_asset["name"]), + os.path.join(temp_path, prov_asset.name), "--source-uri", source_path, ] @@ -221,7 +224,11 @@ def _validate_path_traversal(path: str) -> bool: return False def _find_asset( - self, subject: dict, all_assets: list[dict[str, str]], temp_path: str, ci_service: BaseCIService + self, + subject: v01.InTotoSubject, + all_assets: list[dict[str, str]], + temp_path: str, + ci_service: BaseCIService, ) -> dict | None: """Find the artifacts that appear in the provenance subject. @@ -314,21 +321,28 @@ class Feedback(NamedTuple): downloaded_provs = [] for prov_asset in prov_assets: # Check the size before downloading. - if self._size_large(prov_asset["size"]): - logger.info("Skip verifying the provenance %s: asset size too large.", prov_asset["name"]) + if self._size_large(prov_asset.size_in_bytes): + logger.info("Skip verifying the provenance %s: asset size too large.", prov_asset.name) continue if not ci_service.api_client.download_asset( - prov_asset["url"], os.path.join(temp_path, prov_asset["name"]) + prov_asset.url, os.path.join(temp_path, prov_asset.name) ): - logger.info("Could not download the provenance %s. Skip verifying...", prov_asset["name"]) + logger.info("Could not download the provenance %s. Skip verifying...", prov_asset.name) continue # Read the provenance. - payload = ProvPayloadLoader.load(os.path.join(temp_path, prov_asset["name"])) + provenance_payload = load_provenance_payload( + os.path.join(temp_path, prov_asset.name), + ) + + if not isinstance(provenance_payload, InTotoV01Payload): + raise UnsupportedInTotoVersionError( + f"The provenance asset '{prov_asset.name}' is under an unsupported in-toto version." + ) # Add the provenance file. - downloaded_provs.append(payload) + downloaded_provs.append(provenance_payload.statement) # Output provenance prov = Provenance() @@ -336,14 +350,14 @@ class Feedback(NamedTuple): # implemented ensure the provenance commit matches the actual release analyzed prov.version = "0.2" prov.release_commit_sha = "" - prov.provenance_json = json.dumps(payload) + prov.provenance_json = json.dumps(provenance_payload.statement) prov.release_tag = ci_info["latest_release"]["tag_name"] prov.component = ctx.component check_result["result_tables"].append(prov) # Iterate through the subjects and verify. - for subject in payload["subject"]: + for subject in provenance_payload.statement["subject"]: sub_asset = self._find_asset(subject, all_assets, temp_path, ci_service) result: None | _VerifyArtifactResult = None @@ -390,7 +404,7 @@ class Feedback(NamedTuple): all_feedback.append( Feedback( ci_service_name=ci_service.name, - asset_url=prov_asset["url"], + asset_url=prov_asset.url, verify_result=result, ) ) @@ -410,13 +424,7 @@ class Feedback(NamedTuple): digest.artifact = artifact check_result["result_tables"].append(digest) - if downloaded_provs: - # Store the provenance available results for other checks. - # Note: this flag should only be turned off here. - ctx.dynamic_data["is_inferred_prov"] = False - ci_info["provenances"] = downloaded_provs - - except (OSError, SLSAProvenanceError) as error: + except (OSError, InTotoAttestationError) as error: logger.error(" %s: %s.", self.check_id, error) check_result["justification"].append("Could not verify level 3 provenance.") return CheckResultType.FAILED diff --git a/src/macaron/slsa_analyzer/checks/provenance_l3_content_check.py b/src/macaron/slsa_analyzer/checks/provenance_l3_content_check.py index d32178cc9..1d17286d5 100644 --- a/src/macaron/slsa_analyzer/checks/provenance_l3_content_check.py +++ b/src/macaron/slsa_analyzer/checks/provenance_l3_content_check.py @@ -1,7 +1,7 @@ # Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. -"""This module checks if a SLSA provenances conforms to a given expectation.""" +"""This module checks if a SLSA provenance conforms to a given expectation.""" import logging @@ -10,9 +10,11 @@ from macaron.slsa_analyzer.checks.base_check import BaseCheck, CheckResultType from macaron.slsa_analyzer.checks.check_result import CheckResult from macaron.slsa_analyzer.ci_service.base_ci_service import NoneCIService -from macaron.slsa_analyzer.provenance.loader import SLSAProvenanceError +from macaron.slsa_analyzer.package_registry import JFrogMavenRegistry +from macaron.slsa_analyzer.provenance.loader import LoadIntotoAttestationError from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName +from macaron.slsa_analyzer.specs.package_registry_spec import PackageRegistryInfo logger: logging.Logger = logging.getLogger(__name__) @@ -27,7 +29,7 @@ def __init__(self) -> None: """Initialize instance.""" check_id = "mcn_provenance_expectation_1" description = "Check whether the SLSA provenance for the produced artifact conforms to the expected value." - depends_on: list[tuple[str, CheckResultType]] = [("mcn_provenance_level_three_1", CheckResultType.PASSED)] + depends_on: list[tuple[str, CheckResultType]] = [("mcn_provenance_available_1", CheckResultType.PASSED)] eval_reqs = [ReqName.EXPECTATION] super().__init__( check_id=check_id, @@ -58,7 +60,35 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu logger.info("%s check was unable to find any expectations.", self.check_id) return CheckResultType.UNKNOWN + package_registry_info_entries = ctx.dynamic_data["package_registries"] ci_services = ctx.dynamic_data["ci_services"] + + # Check the provenances in package registries. + for package_registry_info_entry in package_registry_info_entries: + match package_registry_info_entry: + case PackageRegistryInfo( + package_registry=JFrogMavenRegistry(), + ) as info_entry: + for provenance in info_entry.provenances: + try: + logger.info( + "Validating the provenance %s against %s.", + provenance.asset.url, + expectation, + ) + + if expectation.validate(provenance.payload): + check_result["result_tables"].append(expectation) # type: ignore[arg-type] + check_result["justification"].append( + f"Successfully verified the expectation against the provenance {provenance.asset.url}." + ) + return CheckResultType.PASSED + + except (LoadIntotoAttestationError, ExpectationRuntimeError) as error: + logger.error(error) + check_result["justification"].append("Could not verify expectation against the provenance.") + return CheckResultType.FAILED + for ci_info in ci_services: ci_service = ci_info["service"] # Checking if a CI service is discovered for this repo. @@ -72,7 +102,7 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu for payload in ci_info["provenances"]: try: - logger.info("Validating the provenance against %s.", expectation) + logger.info("Validating a provenance from %s against %s.", ci_info["service"].name, expectation) # TODO: Is it worth returning more information rather than returning early? if expectation.validate(payload): @@ -84,12 +114,12 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu ) return CheckResultType.PASSED - except (SLSAProvenanceError, ExpectationRuntimeError) as error: + except (LoadIntotoAttestationError, ExpectationRuntimeError) as error: logger.error(error) check_result["justification"].append("Could not verify expectation against the provenance.") return CheckResultType.FAILED - check_result["justification"].append("Could not verify expectation against the provenance.") + check_result["justification"].append("Failed to successfully verify expectation against any provenance files.") return CheckResultType.FAILED diff --git a/src/macaron/slsa_analyzer/checks/provenance_witness_l1_check.py b/src/macaron/slsa_analyzer/checks/provenance_witness_l1_check.py new file mode 100644 index 000000000..9dd7ec265 --- /dev/null +++ b/src/macaron/slsa_analyzer/checks/provenance_witness_l1_check.py @@ -0,0 +1,187 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This check examines a witness provenance (https://github.com/testifysec/witness).""" + +import logging + +from sqlalchemy import ForeignKey +from sqlalchemy.orm import Mapped, mapped_column + +from macaron.database.database_manager import ORMBase +from macaron.database.table_definitions import CheckFacts +from macaron.slsa_analyzer.analyze_context import AnalyzeContext +from macaron.slsa_analyzer.checks.base_check import BaseCheck +from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType +from macaron.slsa_analyzer.package_registry import JFrogMavenAsset, JFrogMavenRegistry +from macaron.slsa_analyzer.provenance.witness import ( + WitnessProvenanceSubject, + extract_witness_provenance_subjects, + is_witness_provenance_payload, + load_witness_verifier_config, +) +from macaron.slsa_analyzer.registry import registry +from macaron.slsa_analyzer.slsa_req import ReqName +from macaron.slsa_analyzer.specs.package_registry_spec import PackageRegistryInfo + +logger: logging.Logger = logging.getLogger(__name__) + + +def verify_artifact_assets( + artifact_assets: list[JFrogMavenAsset], + subjects: set[WitnessProvenanceSubject], +) -> list[str]: + """Verify artifact assets against subjects in the witness provenance payload. + + Parameters + ---------- + artifact_assets : list[JFrogMavenAsset] + List of artifact assets to verify. + subjects : list[WitnessProvenanceSubject] + List of subjects extracted from the in the witness provenance. + + Returns + ------- + list[str] + A list of justifications if the verification fails. + If the verification is successful, an empty list is returned. + """ + fail_justifications = [] + + # A look-up table to verify: + # 1. if the name of the artifact appears in any subject of the witness provenance, then + # 2. if the digest of the artifact could be found + look_up: dict[str, dict[str, WitnessProvenanceSubject]] = {} + + for subject in subjects: + if subject.artifact_name not in look_up: + look_up[subject.artifact_name] = {} + look_up[subject.artifact_name][subject.sha256_digest] = subject + + for asset in artifact_assets: + if asset.name not in look_up: + message = f"Could not find subject with name {asset.name} in the provenance." + logger.info(message) + fail_justifications.append(message) + + if asset.sha256_digest not in look_up[asset.name]: + message = f"Failed to verify the SHA256 digest of the asset '{asset.name}' in the provenance." + logger.info(message) + fail_justifications.append(message) + + subject = look_up[asset.name][asset.sha256_digest] + + logger.info( + "Successfully verified asset '%s' against the subject '%s' in the provenance.", + asset.name, + subject.subject_name, + ) + + return fail_justifications + + +class ProvenanceWitnessL1Table(CheckFacts, ORMBase): + """Result table for provenenance l3 check.""" + + __tablename__ = "_provenance_witness_l1_check" + + # The primary key. + id: Mapped[int] = mapped_column(ForeignKey("_check_facts.id"), primary_key=True) # noqa: A003 + + __mapper_args__ = { + "polymorphic_identity": "_provenance_witness_l1_check", + } + + +class ProvenanceWitnessL1Check(BaseCheck): + """This check examines a Witness provenance (https://github.com/testifysec/witness). + + At the moment, we are only checking the actual digests of the artifacts + against the digests in the provenance. + """ + + def __init__(self) -> None: + """Initialize a check instance.""" + check_id = "mcn_provenance_witness_level_one_1" + description = "Check whether the target has a level-1 witness provenance." + depends_on: list[tuple[str, CheckResultType]] = [ + ("mcn_provenance_available_1", CheckResultType.PASSED), + ] + eval_reqs = [ + ReqName.PROV_AVAILABLE, + ReqName.PROV_CONT_BUILD_INS, + ReqName.PROV_CONT_ARTI, + ReqName.PROV_CONT_BUILDER, + ] + super().__init__( + check_id=check_id, + description=description, + depends_on=depends_on, + eval_reqs=eval_reqs, + result_on_skip=CheckResultType.FAILED, + ) + + def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResultType: + """Implement the check in this method. + + Parameters + ---------- + ctx : AnalyzeContext + The object containing processed data for the target repo. + check_result : CheckResult + The object containing result data of a check. + + Returns + ------- + CheckResultType + The result type of the check (e.g. PASSED). + """ + witness_verifier_config = load_witness_verifier_config() + verified_provenances = [] + verified_artifact_assets = [] + + for package_registry_info_entry in ctx.dynamic_data["package_registries"]: + match package_registry_info_entry: + case PackageRegistryInfo( + package_registry=JFrogMavenRegistry() as jfrog_registry, + provenances=provenances, + ): + for provenance in provenances: + if not isinstance(provenance.asset, JFrogMavenAsset): + continue + if not is_witness_provenance_payload( + payload=provenance.payload, + predicate_types=witness_verifier_config.predicate_types, + ): + continue + + artifact_assets = jfrog_registry.fetch_assets( + group_id=provenance.asset.group_id, + artifact_id=provenance.asset.artifact_id, + version=provenance.asset.version, + extensions=witness_verifier_config.artifact_extensions, + ) + subjects = extract_witness_provenance_subjects(provenance.payload) + failure_justification = verify_artifact_assets(artifact_assets, subjects) + + if failure_justification: + check_result["justification"].extend(failure_justification) + return CheckResultType.FAILED + + verified_artifact_assets.extend(artifact_assets) + verified_provenances.append(provenance) + + # When this check passes, it means: "the project produces verifiable witness provenances". + # Therefore, If Macaron cannot discover any witness provenance, we "fail" the check. + if len(verified_provenances) > 0: + check_result["justification"].append("Successfully verified the following artifacts:") + for asset in verified_artifact_assets: + check_result["justification"].append(f"* {asset.url}") + check_result["result_tables"].append(ProvenanceWitnessL1Table()) + return CheckResultType.PASSED + + check_result["justification"].append("Failed to discover any witness provenance.") + return CheckResultType.FAILED + + +registry.register(ProvenanceWitnessL1Check()) diff --git a/src/macaron/slsa_analyzer/checks/trusted_builder_l3_check.py b/src/macaron/slsa_analyzer/checks/trusted_builder_l3_check.py index 97bc717c3..16984e26c 100644 --- a/src/macaron/slsa_analyzer/checks/trusted_builder_l3_check.py +++ b/src/macaron/slsa_analyzer/checks/trusted_builder_l3_check.py @@ -18,6 +18,7 @@ from macaron.slsa_analyzer.checks.base_check import BaseCheck from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.github_actions import GHWorkflowType, GitHubActions +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload from macaron.slsa_analyzer.registry import registry from macaron.slsa_analyzer.slsa_req import ReqName from macaron.slsa_analyzer.specs.inferred_provenance import Provenance @@ -137,7 +138,7 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu ) if ctx.dynamic_data["is_inferred_prov"]: - provenance: dict[str, Any] = Provenance().payload + provenance: Any = Provenance().payload predicate = provenance["predicate"] predicate["buildType"] = f"Trusted {ci_service.name}" predicate["builder"]["id"] = callee.name @@ -147,7 +148,7 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu predicate["invocation"]["configSource"]["digest"]["sha1"] = ctx.component.repository.commit_sha predicate["invocation"]["configSource"]["entryPoint"] = caller_link predicate["metadata"]["buildInvocationId"] = html_url - inferred_provenances.append(provenance) + inferred_provenances.append(InTotoV01Payload(statement=provenance)) check_result["justification"].extend( [ {f"Found trusted builder GitHub Actions: {callee.name} triggered by": caller_link}, diff --git a/src/macaron/slsa_analyzer/ci_service/base_ci_service.py b/src/macaron/slsa_analyzer/ci_service/base_ci_service.py index 1288fc6b0..6106dafb3 100644 --- a/src/macaron/slsa_analyzer/ci_service/base_ci_service.py +++ b/src/macaron/slsa_analyzer/ci_service/base_ci_service.py @@ -11,6 +11,7 @@ from macaron.code_analyzer.call_graph import BaseNode, CallGraph from macaron.parsers.bashparser import BashCommands from macaron.slsa_analyzer.git_service.api_client import BaseAPIClient +from macaron.slsa_analyzer.git_service.base_git_service import BaseGitService logger: logging.Logger = logging.getLogger(__name__) @@ -59,7 +60,9 @@ def get_workflows(self, repo_path: str) -> list: """ raise NotImplementedError - def is_detected(self, repo_path: str) -> bool: + def is_detected( + self, repo_path: str, git_service: BaseGitService | None = None # pylint: disable=unused-argument + ) -> bool: """Return True if this CI service is used in the target repo. Parameters @@ -67,6 +70,9 @@ def is_detected(self, repo_path: str) -> bool: repo_path : str The path to the target repo. + git_service : BaseGitService + The Git service that hosts the target repo (currently an unused argument). + Returns ------- bool diff --git a/src/macaron/slsa_analyzer/ci_service/github_actions.py b/src/macaron/slsa_analyzer/ci_service/github_actions.py index 7ae85dee9..011a273a8 100644 --- a/src/macaron/slsa_analyzer/ci_service/github_actions.py +++ b/src/macaron/slsa_analyzer/ci_service/github_actions.py @@ -1,4 +1,4 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module analyzes GitHub Actions CI.""" @@ -17,6 +17,8 @@ from macaron.parsers.bashparser import BashCommands, extract_bash_from_ci from macaron.slsa_analyzer.ci_service.base_ci_service import BaseCIService from macaron.slsa_analyzer.git_service.api_client import GhAPIClient, get_default_gh_client +from macaron.slsa_analyzer.git_service.base_git_service import BaseGitService +from macaron.slsa_analyzer.git_service.github import GitHub logger: logging.Logger = logging.getLogger(__name__) @@ -96,7 +98,7 @@ def load_defaults(self) -> None: self, "max_workflow_persist", defaults.getint("ci.github_actions", "max_workflow_persist", fallback=90) ) - def is_detected(self, repo_path: str) -> bool: + def is_detected(self, repo_path: str, git_service: BaseGitService | None = None) -> bool: """Return True if this CI service is used in the target repo. Parameters @@ -104,11 +106,17 @@ def is_detected(self, repo_path: str) -> bool: repo_path : str The path to the target repo. + git_service : BaseGitService + The Git service hosting the target repo. + Returns ------- bool True if this CI service is detected, else False. """ + if git_service and not isinstance(git_service, GitHub): + return False + # GitHub Actions need a special detection implementation. # We need to check if YAML files exist in the workflows dir. exists = False diff --git a/src/macaron/slsa_analyzer/git_service/api_client.py b/src/macaron/slsa_analyzer/git_service/api_client.py index 270ce0dc7..65150ecbb 100644 --- a/src/macaron/slsa_analyzer/git_service/api_client.py +++ b/src/macaron/slsa_analyzer/git_service/api_client.py @@ -1,17 +1,59 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """The module provides API clients for VCS services, such as GitHub.""" +from __future__ import annotations + import logging +from collections.abc import Sequence from enum import Enum +from typing import NamedTuple from macaron.config.defaults import defaults +from macaron.slsa_analyzer.asset import AssetLocator from macaron.util import construct_query, download_github_build_log, send_get_http, send_get_http_raw logger: logging.Logger = logging.getLogger(__name__) +class GitHubReleaseAsset(NamedTuple): + """An asset published from a GitHub Release. + + Attributes + ---------- + name : str + The asset name. + url : str + The URL to the asset. + size_in_bytes : int + The size of the asset, in bytes. + api_client : GhAPIClient + The GitHub API client. + """ + + name: str + url: str + size_in_bytes: int + api_client: GhAPIClient + + def download(self, dest: str) -> bool: + """Download the asset. + + Parameters + ---------- + dest : str + The local destination where the asset is downloaded to. + Note that this must include the file name. + + Returns + ------- + bool + ``True`` if the asset is downloaded successfully; ``False`` if not. + """ + return self.api_client.download_asset(self.url, dest) + + class BaseAPIClient: """This is the base class for API clients.""" @@ -32,7 +74,7 @@ def get_latest_release(self, full_name: str) -> dict: # pylint: disable=unused- """ return {} - def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict]: # pylint: disable=unused-argument + def fetch_assets(self, release: dict, ext: str = "") -> Sequence[AssetLocator]: # pylint: disable=unused-argument """Return the release assets that match or empty if it doesn't exist. The extension is ignored if name is set. @@ -41,8 +83,6 @@ def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict] ---------- release : dict The release object in JSON format. - name : str - The asset name to find. ext : str The asset extension to find; this parameter is ignored if name is set. @@ -418,6 +458,7 @@ def get_latest_release(self, full_name: str) -> dict: ------- dict The latest release object in JSON format. + Schema: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release. """ logger.debug("Get the latest release for %s.", full_name) url = f"{GhAPIClient._REPO_END_POINT}/{full_name}/releases/latest" @@ -425,7 +466,7 @@ def get_latest_release(self, full_name: str) -> dict: return response_data or {} - def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict]: + def fetch_assets(self, release: dict, ext: str = "") -> Sequence[AssetLocator]: """Return the release assets that match or empty if it doesn't exist. The extension is ignored if name is set. @@ -433,26 +474,48 @@ def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict] Parameters ---------- release : dict - The release object in JSON format. - name : str - The asset name to find. + The release payload in JSON format. + Schema: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release. ext : str The asset extension to find; this parameter is ignored if name is set. Returns ------- - list[dict] - The list of release assets that match or empty if it doesn't exist. + Sequence[AssetLocator] + A sequence of release assets. """ - if "assets" in release: - if name: - logger.debug("Search for the asset %s in the release.", name) - return [item for item in release["assets"] if item["name"] == name] - - if ext: - logger.debug("Search for the asset extension %s in the release.", ext) - return [item for item in release["assets"] if item["name"].endswith(ext)] - return [] + assets = release.get("assets", []) + if not isinstance(assets, list): + return [] + + asset_locators = [] + + for asset in assets: + name = asset.get("name") + if name is None or not isinstance(name, str): + continue + + if ext and not name.endswith(ext): + continue + + url = asset.get("url") + if url is None or not isinstance(url, str): + continue + + size_in_bytes = asset.get("size") + if size_in_bytes is None or not isinstance(size_in_bytes, int): + continue + + asset_locators.append( + GitHubReleaseAsset( + name=name, + url=url, + size_in_bytes=size_in_bytes, + api_client=self, + ) + ) + + return asset_locators def download_asset(self, url: str, download_path: str) -> bool: """Download the assets of the release that match the pattern (if specified). diff --git a/src/macaron/slsa_analyzer/package_registry/__init__.py b/src/macaron/slsa_analyzer/package_registry/__init__.py new file mode 100644 index 000000000..070ae38bb --- /dev/null +++ b/src/macaron/slsa_analyzer/package_registry/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module defines the package registries.""" + +from macaron.slsa_analyzer.package_registry.jfrog_maven_registry import JFrogMavenAsset, JFrogMavenRegistry +from macaron.slsa_analyzer.package_registry.package_registry import PackageRegistry + +__all__ = [ + "JFrogMavenAsset", + "JFrogMavenRegistry", + "PackageRegistry", +] + +PACKAGE_REGISTRIES: list[PackageRegistry] = [JFrogMavenRegistry()] diff --git a/src/macaron/slsa_analyzer/package_registry/jfrog_maven_registry.py b/src/macaron/slsa_analyzer/package_registry/jfrog_maven_registry.py new file mode 100644 index 000000000..a2009ac78 --- /dev/null +++ b/src/macaron/slsa_analyzer/package_registry/jfrog_maven_registry.py @@ -0,0 +1,860 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Assets on a package registry.""" + +from __future__ import annotations + +import json +import logging +from typing import NamedTuple +from urllib.parse import SplitResult, urlunsplit + +import requests + +from macaron.config.defaults import defaults +from macaron.errors import ConfigurationError +from macaron.slsa_analyzer.build_tool.base_build_tool import BaseBuildTool +from macaron.slsa_analyzer.build_tool.gradle import Gradle +from macaron.slsa_analyzer.build_tool.maven import Maven +from macaron.slsa_analyzer.package_registry.package_registry import PackageRegistry +from macaron.util import JsonType + +logger: logging.Logger = logging.getLogger(__name__) + + +class JFrogMavenAsset(NamedTuple): + """An asset hosted on a JFrog Artifactory repository with Maven layout. + + Attributes + ---------- + name : str + The name of the Maven asset. + group_id : str + The group id. + artifact_id : str + The artifact id. + version : str + The version of the Maven asset. + metadata : JFrogMavenAssetMetadata + The metadata of the JFrog Maven asset. + jfrog_maven_registry : JFrogMavenRegistry + The metadata of the JFrog Maven asset. + """ + + name: str + group_id: str + artifact_id: str + version: str + metadata: JFrogMavenAssetMetadata + jfrog_maven_registry: JFrogMavenRegistry + + @property + def url(self) -> str: + """Get the URL to the asset. + + This URL can be used to download the asset. + """ + return self.metadata.download_uri + + @property + def sha256_digest(self) -> str: + """Get the SHA256 digest of the asset.""" + return self.metadata.sha256_digest + + @property + def size_in_bytes(self) -> int: + """Get the size of the asset (in bytes).""" + return self.metadata.size_in_bytes + + def download(self, dest: str) -> bool: + """Download the asset. + + Parameters + ---------- + dest : str + The local destination where the asset is downloaded to. + Note that this must include the file name. + + Returns + ------- + bool + ``True`` if the asset is downloaded successfully; ``False`` if not. + """ + return self.jfrog_maven_registry.download_asset(self.url, dest) + + +class JFrogMavenAssetMetadata(NamedTuple): + """Metadata of an asset on a JFrog Maven registry. + + Attributes + ---------- + size_in_bytes : int + The size of the asset (in bytes). + sha256_digest : str + The SHA256 digest of the asset. + download_uri : str + The download URI of the asset. + """ + + size_in_bytes: int + sha256_digest: str + download_uri: str + + +class JFrogMavenRegistry(PackageRegistry): + """A JFrog Artifactory repository that acts as a package registry with Maven layout. + + For more details on JFrog Artifactory repository, see: + https://jfrog.com/help/r/jfrog-artifactory-documentation/repository-management + + Attributes + ---------- + hostname : str + The hostname of the JFrog instance. + repo : str + The Artifactory repository with Maven layout on the JFrog instance. + request_timeout : int + The timeout (in seconds) for regular requests made to the package registry. + download_timeout : int + The timeout (in seconds) for downloading files from the package registry. + enabled : bool + Whether the package registry should be active in the analysis or not. + "Not active" means no target repo/software component can be matched against + this package registry. + """ + + def __init__( + self, + hostname: str | None = None, + repo: str | None = None, + request_timeout: int | None = None, + download_timeout: int | None = None, + enabled: bool | None = None, + ) -> None: + self.hostname = hostname or "" + self.repo = repo or "" + self.request_timeout = request_timeout or 10 + self.download_timeout = download_timeout or 120 + self.enabled = enabled or False + super().__init__("JFrog Maven Registry") + + def load_defaults(self) -> None: + """Load the .ini configuration for the current package registry. + + Raises + ------ + ConfigurationError + If there is a schema violation in the ``package_registry.jfrog.maven`` section. + """ + section_name = "package_registry.jfrog.maven" + if not defaults.has_section(section_name): + return + section = defaults[section_name] + + self.hostname = section.get("hostname") + if not self.hostname: + raise ConfigurationError( + f'The "hostname" key is missing in section [{section_name}] of the .ini configuration file.' + ) + + self.repo = section.get("repo") + if not self.repo: + raise ConfigurationError( + f'The "repo" key is missing in section [{section_name}] of the .ini configuration file.' + ) + + try: + self.request_timeout = defaults.getint("requests", "timeout", fallback=10) + except ValueError as error: + raise ConfigurationError( + f'The value of "timeout" in section [requests] ' f"of the .ini configuration file is invalid: {error}", + ) from error + + try: + self.download_timeout = section.getint( + "download_timeout", + fallback=self.request_timeout, + ) + except ValueError as error: + raise ConfigurationError( + f'The value of "download_timeout" in section [{section_name}] ' + f"of the .ini configuration file is invalid: {error}", + ) from error + + self.enabled = True + + def is_detected(self, build_tool: BaseBuildTool) -> bool: + """Detect if artifacts of the repo under analysis can possibly be published to this package registry. + + The detection here is based on the repo's detected build tool. + If the package registry is compatible with the given build tool, it can be a + possible place where the artifacts produced from the repo are published. + + ``JFrogMavenRegistry`` is compatible with Maven and Gradle. + + Parameters + ---------- + build_tool : BaseBuildTool + A detected build tool of the repository under analysis. + + Returns + ------- + bool + ``True`` if the repo under analysis can be published to this package registry, + based on the given build tool. + """ + if not self.enabled: + return False + compatible_build_tool_classes = [Maven, Gradle] + for build_tool_class in compatible_build_tool_classes: + if isinstance(build_tool, build_tool_class): + return True + return False + + def construct_maven_repository_path( + self, + group_id: str, + artifact_id: str | None = None, + version: str | None = None, + asset_name: str | None = None, + ) -> str: + """Construct a path to a folder or file on the registry, assuming Maven repository layout. + + For more details regarding Maven repository layout, see the following: + - https://maven.apache.org/repository/layout.html + - https://maven.apache.org/guides/mini/guide-naming-conventions.html + + Parameters + ---------- + group_id : str + The group id of a Maven package. + artifact_id : str + The artifact id of a Maven package. + version : str + The version of a Maven package. + asset_name : str + The asset name. + + Returns + ------- + str + The path to a folder or file on the registry. + """ + path = group_id.replace(".", "/") + if artifact_id: + path = "/".join([path, artifact_id]) + if version: + path = "/".join([path, version]) + if asset_name: + path = "/".join([path, asset_name]) + return path + + def fetch_artifact_ids(self, group_id: str) -> list[str]: + """Get all artifact ids under a group id. + + This is done by fetching all children folders under the group folder on the registry. + + Parameters + ---------- + group_id : str + The group id. + + Returns + ------- + list[str] + The artifacts ids under the group. + """ + folder_info_url = self.construct_folder_info_url( + folder_path=self.construct_maven_repository_path(group_id), + ) + + try: + response = requests.get(url=folder_info_url, timeout=self.request_timeout) + except requests.exceptions.RequestException as error: + logger.debug("Failed to retrieve artifact ids for group %s: %s", group_id, error) + return [] + + if response.status_code == 200: + folder_info_payload = response.text + else: + logger.debug( + "Error retrieving artifact ids of group %s: got response with status code %d.", + group_id, + response.status_code, + ) + return [] + + artifact_ids = self.extract_folder_names_from_folder_info_payload(folder_info_payload) + return artifact_ids + + def construct_folder_info_url(self, folder_path: str) -> str: + """Construct a URL for the JFrog Folder Info API. + + Documentation: https://jfrog.com/help/r/jfrog-rest-apis/folder-info. + + Parameters + ---------- + folder_path : str + The path to the folder. + + Returns + ------- + str + The URL to request the info of the folder. + """ + url = urlunsplit( + SplitResult( + scheme="https", + netloc=self.hostname, + path=f"/api/storage/{self.repo}/{folder_path}", + query="", + fragment="", + ) + ) + return url + + def construct_file_info_url(self, file_path: str) -> str: + """Construct a URL for the JFrog File Info API. + + Documentation: https://jfrog.com/help/r/jfrog-rest-apis/file-info. + + Parameters + ---------- + file_path : str + The path to the file. + + Returns + ------- + str + The URL to request the info of the file. + """ + return urlunsplit( + SplitResult( + scheme="https", + netloc=self.hostname, + path=f"/api/storage/{self.repo}/{file_path}", + query="", + fragment="", + ) + ) + + def construct_latest_version_url( + self, + group_id: str, + artifact_id: str, + ) -> str: + """Construct a URL for the JFrog Latest Version Search API. + + The response payload includes the latest version of the package with the given + group id and artifact id. + Documentation: https://jfrog.com/help/r/jfrog-rest-apis/artifact-latest-version-search-based-on-layout. + + Parameters + ---------- + group_id : str + The group id of the package. + artifact_id: str + The artifact id of the package. + + Returns + ------- + str + The URL to request the latest version of the package. + """ + return urlunsplit( + SplitResult( + scheme="https", + netloc=self.hostname, + path="/api/search/latestVersion", + query="&".join( + [ + f"repos={self.repo}", + f"g={group_id}", + f"a={artifact_id}", + ] + ), + fragment="", + ) + ) + + def fetch_latest_version(self, group_id: str, artifact_id: str) -> str | None: + """Fetch the latest version of a Java package on this JFrog Maven registry. + + Parameters + ---------- + group_id : str + The group id of the Java package. + artifact_id : str + The artifact id of the Java package. + + Returns + ------- + str | None + The latest version of the Java package if it could be retrieved, or ``None`` otherwise. + """ + logger.debug( + "Retrieving latest version of Java package %s:%s.", + group_id, + artifact_id, + ) + + url = self.construct_latest_version_url( + group_id=group_id, + artifact_id=artifact_id, + ) + + try: + response = requests.get(url, timeout=self.request_timeout) + except requests.exceptions.RequestException as error: + logger.debug( + "Failed to retrieve the latest version of Java package %s:%s: %s", + group_id, + artifact_id, + error, + ) + return None + + if response.status_code == 200: + version = response.text + return version + + logger.debug( + "Failed to retrieve the latest version of Java package %s:%s. Got response with status code %d: %s", + group_id, + artifact_id, + response.status_code, + response.text, + ) + return None + + def fetch_asset_names( + self, + group_id: str, + artifact_id: str, + version: str, + extensions: set[str] | None = None, + ) -> list[str]: + """Retrieve the metadata of assets published for a version of a Maven package. + + Parameters + ---------- + group_id : str + The group id of the Maven package. + artifact_id : str + The artifact id of the Maven package. + version : str + The version of the Maven package. + extensions : set[str] | None + The set of asset extensions. + Only assets with names ending in these extensions are fetched. + If this is ``None``, then all assets are returned regardless of their extensions. + + Returns + ------- + list[str] + The list of asset names. + """ + folder_path = self.construct_maven_repository_path( + group_id=group_id, + artifact_id=artifact_id, + version=version, + ) + url = self.construct_folder_info_url(folder_path=folder_path) + + try: + response = requests.get(url=url, timeout=self.request_timeout) + except requests.exceptions.RequestException as error: + logger.debug( + "Failed to fetch assets of Java package %s:%s: %s", + group_id, + artifact_id, + error, + ) + return [] + + if response.status_code != 200: + logger.debug( + "Failed to fetch the assets of Java package %s:%s: got response with status code %d.", + group_id, + artifact_id, + response.status_code, + ) + return [] + + return self.extract_file_names_from_folder_info_payload( + folder_info_payload=response.text, + extensions=extensions, + ) + + def _extract_children_form_folder_info_payload(self, folder_info_payload: str) -> list[JsonType]: + """Extract the ``children`` field from the JFrog Folder Info payload. + + Note: Currently, we do not try to validate the schema of the payload. Rather, we only + try to read things that we can recognise. + + Parameters + ---------- + folder_info_payload : JsonType + The JSON payload of a Folder Info request. + Documentation: https://jfrog.com/help/r/jfrog-rest-apis/folder-info. + + Returns + ------- + list[JsonType] + The result of extracting the ``children`` field from the Folder Info payload. + """ + try: + json_payload: JsonType = json.loads(folder_info_payload) + except json.JSONDecodeError as error: + logger.debug("Failed to decode the Folder Info payload: %s.", error) + return [] + + if not isinstance(json_payload, dict): + logger.debug("Got unexpected value type for the Folder Info payload. Expected a JSON object.") + return [] + + children = json_payload.get("children", []) + if not isinstance(children, list): + logger.debug("Got unexpected value for the 'children' field in the Folder Info payload. Expected a list.") + return [] + + return children + + def extract_folder_names_from_folder_info_payload( + self, + folder_info_payload: str, + ) -> list[str]: + """Extract a list of folder names from the Folder Info payload of a Maven group folder. + + Parameters + ---------- + folder_info_payload : str + The Folder Info payload. + + Returns + ------- + list[str] + The artifact ids found in the payload. + """ + children = self._extract_children_form_folder_info_payload(folder_info_payload) + + folder_names = [] + + for child in children: + if not isinstance(child, dict): + continue + + is_folder = child.get("folder", True) + if not isinstance(is_folder, bool) or not is_folder: + continue + + uri = child.get("uri", "") + if not isinstance(uri, str) or not uri: + continue + folder_name = uri.lstrip("/") + folder_names.append(folder_name) + + return folder_names + + def extract_file_names_from_folder_info_payload( + self, + folder_info_payload: str, + extensions: set[str] | None = None, + ) -> list[str]: + """Extract file names from the Folder Info response payload. + + For the schema of this payload and other details regarding the API, see: + https://jfrog.com/help/r/jfrog-rest-apis/folder-info. + + Note: Currently, we do not try to validate the schema of the payload. Rather, we only + try to read as much as possible things that we can recognise. + + Parameters + ---------- + folder_info_payload : JsonType + The JSON payload of a Folder Info reponse. + extensions : set[str] | None + The set of allowed extensions. + Filenames not ending in these extensions are omitted from the result. + If this is ``None``, then all file names are returned regardless of their extensions. + + Returns + ------- + list[str] + The list of filenames in the folder, extracted from the payload. + """ + children = self._extract_children_form_folder_info_payload(folder_info_payload) + + asset_names = [] + + for child in children: + if not isinstance(child, dict): + continue + + is_folder = child.get("folder", True) + if not isinstance(is_folder, bool) or is_folder: + continue + + uri = child.get("uri", "") + if not isinstance(uri, str) or not uri: + continue + asset_name = uri.lstrip("/") + if not extensions or any(asset_name.endswith(extension) for extension in extensions): + asset_names.append(asset_name) + + return asset_names + + def fetch_asset_metadata( + self, + group_id: str, + artifact_id: str, + version: str, + asset_name: str, + ) -> JFrogMavenAssetMetadata | None: + """Fetch an asset's metadata from JFrog. + + Parameters + ---------- + group_id : str + The group id of the package containing the asset. + artifact_id : str + The artifact id of the package containing the asset. + version : str + The version of the package containing the asset. + asset_name : str + The name of the asset. + + Returns + ------- + JFrogMavenAsset | None + The asset's metadata, or ``None`` if the metadata cannot be retrieved. + """ + file_path = self.construct_maven_repository_path( + group_id=group_id, + artifact_id=artifact_id, + version=version, + asset_name=asset_name, + ) + url = self.construct_file_info_url(file_path) + + try: + response = requests.get(url=url, timeout=self.request_timeout) + except requests.exceptions.RequestException as error: + logger.debug( + "Failed to fetch metadata of package %s:%s:%s: %s", + group_id, + artifact_id, + version, + error, + ) + return None + + if response.status_code == 200: + file_info_payload = response.text + else: + logger.debug( + "Failed to fetch metadata of package %s:%s:%s. Got response with status code %d: %s", + group_id, + artifact_id, + version, + response.status_code, + response.text, + ) + return None + + try: + return self.extract_asset_metadata_from_file_info_payload(file_info_payload) + except KeyError as error: + logger.debug("Failed to fetch metadata of package %s:%s:%s: %s", group_id, artifact_id, version, error) + return None + + def extract_asset_metadata_from_file_info_payload( + self, + file_info_payload: str, + ) -> JFrogMavenAssetMetadata | None: + """Extract the metadata of an asset from the File Info request payload. + + Documentation: https://jfrog.com/help/r/jfrog-rest-apis/file-info. + + """ + try: + json_payload: JsonType = json.loads(file_info_payload) + except json.JSONDecodeError as error: + logger.debug("Failed to decode the File Info payload: %s.", error) + return None + + if not isinstance(json_payload, dict): + logger.debug("Got unexpected value for File Info payload. Expected a JSON object.") + return None + + checksums = json_payload.get("checksums", {}) + + if not isinstance(checksums, dict): + logger.debug( + "Got unexpected value for the 'checksums' field in the File Info payload. Expected a JSON object." + ) + return None + + sha256_checksum = checksums.get("sha256") + if not sha256_checksum or not isinstance(sha256_checksum, str): + logger.debug("Could not extract the SHA256 checksum from the File Info payload.") + return None + + size_in_bytes_input = json_payload.get("size") + if not size_in_bytes_input or not isinstance(size_in_bytes_input, str): + logger.debug("Could not extract the value of the 'size' field from the File Info payload.") + return None + + try: + size_in_bytes = int(size_in_bytes_input) + except ValueError: + logger.debug("Invalid value for the 'size' field in the File Info payload.") + return None + + download_uri = json_payload.get("downloadUri") + if not download_uri or not isinstance(download_uri, str): + logger.debug("Could not extract the value of the 'ownload_uri' field from the File Info payload.") + return None + + return JFrogMavenAssetMetadata( + size_in_bytes=size_in_bytes, + sha256_digest=sha256_checksum, + download_uri=download_uri, + ) + + def fetch_assets( + self, + group_id: str, + artifact_id: str, + version: str, + extensions: set[str] | None = None, + ) -> list[JFrogMavenAsset]: + """Fetch the assets of a Maven package. + + Parameters + ---------- + group_id : str + The group id of the Maven package. + artifact_id : str + The artifact id of the Maven package. + version : str + The version of the Maven package. + extensions : set[str] | None + The extensions of the assets to fetch. + If this is ``None``, all available assets are fetched. + + Returns + ------- + list[JFrogMavenAsset] + The list of assets of the package. + """ + asset_names = self.fetch_asset_names( + group_id=group_id, + artifact_id=artifact_id, + version=version, + extensions=extensions, + ) + + assets = [] + + for asset_name in asset_names: + asset_metadata = self.fetch_asset_metadata( + group_id=group_id, + artifact_id=artifact_id, + version=version, + asset_name=asset_name, + ) + if asset_metadata: + assets.append( + JFrogMavenAsset( + name=asset_name, + group_id=group_id, + artifact_id=artifact_id, + version=version, + metadata=asset_metadata, + jfrog_maven_registry=self, + ) + ) + + return assets + + def construct_asset_url( + self, + group_id: str, + artifact_id: str, + version: str, + asset_name: str, + ) -> str: + """Get the URL to download an asset. + + Parameters + ---------- + group_id : str + The group id of the package containing the asset. + artifact_id : str + The artifact id of the package containing the asset. + version : str + The version of the package containing the asset. + asset_name : str + The name of the asset. + + Returns + ------- + str + The URL to the asset, which can be use for downloading the asset. + """ + group_path = self.construct_maven_repository_path(group_id) + return urlunsplit( + SplitResult( + scheme="https", + netloc=self.hostname, + path=f"{self.repo}/{group_path}/{artifact_id}/{version}/{asset_name}", + query="", + fragment="", + ) + ) + + def download_asset(self, url: str, dest: str) -> bool: + """Download an asset from the given URL to a given location. + + Parameters + ---------- + url : str + The URL to the asset on the package registry. + dest : str + The local destination where the asset is downloaded to. + + Returns + ------- + bool + ``True`` if the file is downloaded successfully; ``False`` if not. + """ + try: + response = requests.get(url=url, timeout=self.download_timeout) + except requests.exceptions.RequestException as error: + logger.debug("Failed to download asset from %s. Error: %s", url, error) + return False + + if response.status_code != 200: + logger.debug( + "Failed to download asset from %s. Got response with status code %d: %s", + url, + response.status_code, + response.text, + ) + return False + + try: + with open(dest, "wb") as file: + file.write(response.content) + except OSError as error: + logger.debug( + "Failed to write the downloaded asset from %s to %s. Error: %s", + url, + dest, + error, + ) + return False + + return True diff --git a/src/macaron/slsa_analyzer/package_registry/package_registry.py b/src/macaron/slsa_analyzer/package_registry/package_registry.py new file mode 100644 index 000000000..e7e68f8c5 --- /dev/null +++ b/src/macaron/slsa_analyzer/package_registry/package_registry.py @@ -0,0 +1,42 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module defines package registries.""" + +import logging +from abc import ABC, abstractmethod + +from macaron.slsa_analyzer.build_tool.base_build_tool import BaseBuildTool + +logger: logging.Logger = logging.getLogger(__name__) + + +class PackageRegistry(ABC): + """Base package registry class.""" + + def __init__(self, name: str) -> None: + self.name = name + + @abstractmethod + def load_defaults(self) -> None: + """Load the .ini configuration for the current package registry.""" + + @abstractmethod + def is_detected(self, build_tool: BaseBuildTool) -> bool: + """Detect if artifacts of the repo under analysis can possibly be published to this package registry. + + The detection here is based on the repo's detected build tool. + If the package registry is compatible with the given build tool, it can be a + possible place where the artifacts produced from the repo are published. + + Parameters + ---------- + build_tool : BaseBuildTool + A detected build tool of the repository under analysis. + + Returns + ------- + bool + ``True`` if the repo under analysis can be published to this package registry, + based on the given build tool. + """ diff --git a/src/macaron/slsa_analyzer/provenance/expectations/expectation.py b/src/macaron/slsa_analyzer/provenance/expectations/expectation.py index a9ba9e0f7..eb98a5200 100644 --- a/src/macaron/slsa_analyzer/provenance/expectations/expectation.py +++ b/src/macaron/slsa_analyzer/provenance/expectations/expectation.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Mapped, mapped_column from macaron.errors import ExpectationRuntimeError -from macaron.util import JsonType +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload ExpectationFn = Callable[[Any], bool] @@ -60,7 +60,7 @@ def make_expectation(cls, expectation_path: str) -> Self | None: def __str__(self) -> str: return f"Expectation(description='{self.description}', path='{self.path}', target='{self.target}')" - def validate(self, prov: JsonType) -> bool: + def validate(self, prov: InTotoPayload) -> bool: """Validate the provenance against this expectation. Parameters @@ -80,4 +80,4 @@ def validate(self, prov: JsonType) -> bool: if not self._validator: raise ExpectationRuntimeError(f"Cannot find the validator for expectation {self.path}") - return self._validator(prov) # pylint: disable=not-callable + return self._validator(prov.statement) # pylint: disable=not-callable diff --git a/src/macaron/slsa_analyzer/provenance/intoto/__init__.py b/src/macaron/slsa_analyzer/provenance/intoto/__init__.py new file mode 100644 index 000000000..d366a1351 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/intoto/__init__.py @@ -0,0 +1,120 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""In-toto provenance schemas and validation.""" + +from __future__ import annotations + +from collections.abc import Mapping +from dataclasses import dataclass +from typing import Generic, TypeVar + +from macaron.slsa_analyzer.provenance.intoto import v01, v1 +from macaron.slsa_analyzer.provenance.intoto.errors import ValidateInTotoPayloadError +from macaron.util import JsonType + +StatementT = TypeVar("StatementT", bound=Mapping) +"""Type of an in-toto statement. + +This is currently either a v0.1 statement or v1 statement. +""" + + +@dataclass(frozen=True) # objects of this class are immutable and hashable +class InTotoPayload(Generic[StatementT]): + """The payload of an in-toto provenance. + + The payload is a field within a DSSE envelope, having the type "Statement". + + For more details, see the following pages in in-toto spec: + - In-toto attestation layers: https://github.com/in-toto/attestation/tree/main/spec + v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#attestation-spec + v1 : https://github.com/in-toto/attestation/tree/main/spec/v1#specification-for-in-toto-attestation-layers + - Envelope layer: + v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#envelope + v1 : https://github.com/in-toto/attestation/blob/main/spec/v1/envelope.md + - Statement layer: + v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement + v1: https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md + """ + + statement: StatementT + + +class InTotoV01Payload(InTotoPayload[v01.InTotoStatement]): + """The provenance payload following in-toto v0.1 schema. + + The payload is a field within a DSSE envelope, having the type "Statement". + + In-toto spec (v0.1): + - In-toto attestation layers: + https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#attestation-spec + - Envelope layer: + https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#envelope + - Statement layer: + https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement + """ + + +class InTotoV1Payload(InTotoPayload[v1.InTotoStatement]): + """The provenance payload following in-toto v1 schema. + + The payload is a field within a DSSE envelope, having the type "Statement". + + In-toto spec (v1): + - In-toto attestation layers: + https://github.com/in-toto/attestation/tree/main/spec/v1#specification-for-in-toto-attestation-layers + - Envelope layer: + https://github.com/in-toto/attestation/blob/main/spec/v1/envelope.md + - Statement layer: + https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md + """ + + +def validate_intoto_payload(payload: dict[str, JsonType]) -> InTotoPayload: + """Validate the schema of an in-toto provenance payload. + + TODO: Consider using the in-toto-attestation package (https://github.com/in-toto/attestation/tree/main/python), + which contains Python bindings for in-toto attestation. + See issue: https://github.com/oracle/macaron/issues/426. + + Parameters + ---------- + payload : dict[str, JsonType] + The in-toto payload. + + Returns + ------- + InTotoPayload + The validated in-toto payload. + + Raises + ------ + ValidateInTotoPayloadError + When there is an error validating the payload. + """ + type_ = payload.get("_type") + if type_ is None: + raise ValidateInTotoPayloadError( + "The attribute '_type' of the in-toto statement is missing.", + ) + if not isinstance(type_, str): + raise ValidateInTotoPayloadError( + "The value of attribute '_type' in the in-toto statement is invalid: expecting a string.", + ) + + if type_ == "https://in-toto.io/Statement/v0.1": + # The type must always be this value for version v0.1. + # See specification: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement. + + try: + if v01.validate_intoto_statement(payload): + return InTotoV01Payload(statement=payload) + + raise ValidateInTotoPayloadError("Unexpected error while validating the in-toto statement.") + except ValidateInTotoPayloadError as error: + raise error + + # TODO: add support for version 1. + + raise ValidateInTotoPayloadError("Invalid value for the attribute '_type' of the provenance payload.") diff --git a/src/macaron/slsa_analyzer/provenance/intoto/errors.py b/src/macaron/slsa_analyzer/provenance/intoto/errors.py new file mode 100644 index 000000000..f999c1c66 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/intoto/errors.py @@ -0,0 +1,22 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Error types related to in-toto attestations.""" + +from macaron.errors import MacaronError + + +class InTotoAttestationError(MacaronError): + """The base error type for all in-toto related errors.""" + + +class ValidateInTotoPayloadError(InTotoAttestationError): + """Happens when there is an issue validating an in-toto payload, usually against a schema.""" + + +class UnsupportedInTotoVersionError(InTotoAttestationError): + """Happens when encountering a provenance under an unsupported in-toto version.""" + + +class LoadIntotoAttestationError(InTotoAttestationError): + """Happens when there is an issue decoding and loading the payload of an in-toto provenance.""" diff --git a/src/macaron/slsa_analyzer/provenance/intoto/v01/__init__.py b/src/macaron/slsa_analyzer/provenance/intoto/v01/__init__.py new file mode 100644 index 000000000..11c9a961c --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/intoto/v01/__init__.py @@ -0,0 +1,175 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module handles in-toto version 0.1 attestations.""" + +from __future__ import annotations + +from typing import TypedDict, TypeGuard + +from macaron.slsa_analyzer.provenance.intoto.errors import ValidateInTotoPayloadError +from macaron.util import JsonType + + +class InTotoStatement(TypedDict): + """An in-toto version 0.1 statement. + + This is the type of the payload in an in-toto version 0.1 attestation. + Specification: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement. + """ + + _type: str + subject: list[InTotoSubject] + predicateType: str # noqa: N815 + predicate: dict[str, JsonType] | None + + +class InTotoSubject(TypedDict): + """An in-toto subject. + + Specification: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement. + """ + + name: str + digest: dict[str, str] + + +def validate_intoto_statement(payload: dict[str, JsonType]) -> TypeGuard[InTotoStatement]: + """Validate the statement of an in-toto attestation. + + Specification: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement. + + TODO: Consider using the in-toto-attestation package (https://github.com/in-toto/attestation/tree/main/python), + which contains Python bindings for in-toto attestation. + See issue: https://github.com/oracle/macaron/issues/426. + + Parameters + ---------- + payload : dict[str, JsonType] + The JSON statement after being base64-decoded. + + Returns + ------- + TypeGuard[InTotoStatement] + ``True`` if the attestation statement is valid, in which case its type is narrowed to an + ``InTotoStatement``; ``False`` otherwise. + + Raises + ------ + ValidateInTotoPayloadError + When the payload does not follow the expected schema. + """ + type_ = payload.get("_type") + if type_ is None: + raise ValidateInTotoPayloadError( + "The attribute '_type' of the in-toto statement is missing.", + ) + if not isinstance(type_, str): + raise ValidateInTotoPayloadError( + "The value of attribute '_type' in the in-toto statement is invalid: expecting a string.", + ) + + subjects_payload = payload.get("subject") + if subjects_payload is None: + raise ValidateInTotoPayloadError( + "The attribute 'subject' of the in-toto statement is missing.", + ) + if not isinstance(subjects_payload, list): + raise ValidateInTotoPayloadError( + "The value of attribute 'subject' in the in-toto statement is invalid: expecting a list.", + ) + + for subject_json in subjects_payload: + validate_intoto_subject(subject_json) + + predicate_type = payload.get("predicateType") + if predicate_type is None: + raise ValidateInTotoPayloadError( + "The attribute 'predicateType' of the in-toto statement is missing.", + ) + + if not isinstance(predicate_type, str): + raise ValidateInTotoPayloadError( + "The value of attribute 'predicateType' in the in-toto statement is invalid: expecting a string." + ) + + predicate = payload.get("predicate") + if predicate is not None and not isinstance(predicate, dict): + raise ValidateInTotoPayloadError( + "The value attribute 'predicate' in the in-toto statement is invalid: expecting an object.", + ) + + return True + + +def validate_intoto_subject(subject: JsonType) -> TypeGuard[InTotoSubject]: + """Validate a single subject in the in-toto statement. + + See specification: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement. + + TODO: Consider using the in-toto-attestation package (https://github.com/in-toto/attestation/tree/main/python), + which contains Python bindings for in-toto attestation. + See issue: https://github.com/oracle/macaron/issues/426. + + Parameters + ---------- + subject : JsonType + The JSON element representing a single subject. + + Returns + ------- + TypeGuard[InTotoSubject] + ``True`` if the subject element is valid, in which case its type is narrowed to an + ``InTotoSubject``; ``False`` otherwise. + + Raises + ------ + ValidateInTotoPayloadError + When the payload does not follow the expecting schema. + """ + if not isinstance(subject, dict): + raise ValidateInTotoPayloadError( + "A subject in the in-toto statement is invalid: expecting an object.", + ) + + name = subject.get("name") + if name is None: + raise ValidateInTotoPayloadError("The attribute 'name' is missing from a subject.") + if not isinstance(name, str): + raise ValidateInTotoPayloadError( + "The value of the attribute 'name' is invalid for a subject.", + ) + + digest_set = subject.get("digest") + if digest_set is None: + raise ValidateInTotoPayloadError( + "The attribute 'digest' is missing from a subject.", + ) + if not isinstance(digest_set, dict) or not is_valid_digest_set(digest_set): + raise ValidateInTotoPayloadError( + "The value of the attribute 'digest' is invalid for a subject.", + ) + + return True + + +def is_valid_digest_set(digest: dict[str, JsonType]) -> TypeGuard[dict[str, str]]: + """Validate the digest set. + + Specification for the digest set: https://github.com/in-toto/attestation/blob/main/spec/v0.1.0/field_types.md#DigestSet. + + Parameters + ---------- + digest : dict[str, JsonType] + The digest set. + + Returns + ------- + TypeGuard[dict[str, str]] + ``True`` if the digest set is valid according to the spec, in which case its type + is narrowed to a ``dict[str, str]``; ``False`` otherwise. + """ + for value in digest.values(): + if not isinstance(value, str): + return False + return True diff --git a/src/macaron/slsa_analyzer/provenance/intoto/v1/__init__.py b/src/macaron/slsa_analyzer/provenance/intoto/v1/__init__.py new file mode 100644 index 000000000..0ab4b9367 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/intoto/v1/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module handles in-toto version version 1 attestations.""" + +from typing import TypedDict + + +class InTotoStatement(TypedDict): + """An in-toto version 1 statement. + + This is the type of the payload in a version 1 in-toto attestation. + Specification: https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md. + """ diff --git a/src/macaron/slsa_analyzer/provenance/loader.py b/src/macaron/slsa_analyzer/provenance/loader.py index ecf941c08..34c4b88f7 100644 --- a/src/macaron/slsa_analyzer/provenance/loader.py +++ b/src/macaron/slsa_analyzer/provenance/loader.py @@ -1,54 +1,93 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module contains the loaders for SLSA provenances.""" import base64 import json -from typing import Any - - -class SLSAProvenanceError(Exception): - """This error happens when the provenance cannot be loaded.""" - - -class ProvPayloadLoader: - """The loader for SLSA attestation files.""" - - @classmethod - def load(cls, path: str) -> Any: - """Load a SLSA attestation file. - - This method returned the JSON deserialized ``Message``/``Statement`` section of the SLSA attestation. - - For more information on the terminology: - - https://slsa.dev/attestation-model - - Parameters - ---------- - path : str - The path to the provenance file. - - Returns - ------- - Any - The JSON deserialized ``Message``/``Statement`` section of the SLSA attestation. - - Raises - ------ - SLSAProvenanceError - If there are errors when loading the file or decoding the content of the SLSA attestation. - """ - try: - with open(path, encoding="utf-8") as file: - provenance = json.load(file) - decoded_payload = base64.b64decode(provenance["payload"]) - return json.loads(decoded_payload) - except json.JSONDecodeError as error: - raise SLSAProvenanceError(f"Cannot deserialize the file content as JSON - {error}") from error - except KeyError as error: - raise SLSAProvenanceError(f"Cannot find the payload in the SLSA provenance - {error}") from error - except UnicodeDecodeError as error: - raise SLSAProvenanceError( - f"Cannot decode the message content of the SLSA attestation - {error.reason}" - ) from error + +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload, validate_intoto_payload +from macaron.slsa_analyzer.provenance.intoto.errors import LoadIntotoAttestationError, ValidateInTotoPayloadError +from macaron.util import JsonType + + +def load_provenance_file(filepath: str) -> dict[str, JsonType]: + """Load a provenance file and obtain the payload. + + Inside a provenance file is a DSSE envelope containing a base64-encoded + provenance JSON payload. See: https://github.com/secure-systems-lab/dsse. + + Parameters + ---------- + filepath : str + Path to the provenance file. + + Returns + ------- + dict[str, JsonType] + The provenance JSON payload. + + Raises + ------ + LoadIntotoAttestationError + If there is an error loading the provenance JSON payload. + """ + try: + with open(filepath, encoding="utf-8") as file: + provenance = json.load(file) + except (json.JSONDecodeError, TypeError) as error: + raise LoadIntotoAttestationError( + "Cannot deserialize the file content as JSON.", + ) from error + + provenance_payload = provenance.get("payload", None) + if not provenance_payload: + raise LoadIntotoAttestationError( + 'Cannot find the "payload" field in the decoded provenance.', + ) + + try: + decoded_payload = base64.b64decode(provenance_payload) + except UnicodeDecodeError as error: + raise LoadIntotoAttestationError("Cannot decode the payload.") from error + + try: + json_payload = json.loads(decoded_payload) + except (json.JSONDecodeError, TypeError) as error: + raise LoadIntotoAttestationError( + "Cannot deserialize the provenance payload as JSON.", + ) from error + + if not isinstance(json_payload, dict): + raise LoadIntotoAttestationError("The provenance payload is not a JSON object.") + + return json_payload + + +def load_provenance_payload(filepath: str) -> InTotoPayload: + """Load, verify, and construct an in-toto payload. + + Parameters + ---------- + filepath : str + Absolute path to the provenance file. + + Returns + ------- + InTotoPayload + The in-toto payload. + + Raises + ------ + LoadIntotoAttestationError + If there is an error while loading and verifying the provenance payload. + """ + try: + payload_json = load_provenance_file(filepath) + except LoadIntotoAttestationError as error: + raise error + + try: + return validate_intoto_payload(payload_json) + except ValidateInTotoPayloadError as error: + raise LoadIntotoAttestationError("Failed to deserialize the payload.") from error diff --git a/src/macaron/slsa_analyzer/provenance/provenance.py b/src/macaron/slsa_analyzer/provenance/provenance.py new file mode 100644 index 000000000..4425cb1d5 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/provenance.py @@ -0,0 +1,36 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module defines classes and interfaces related to provenances.""" + +from typing import Protocol + +from macaron.slsa_analyzer.asset import AssetLocator +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload + + +class DownloadedProvenanceData(Protocol): + """Interface of a provenance that has been downloaded (e.g. from a CI service or a package registry).""" + + @property + def asset(self) -> AssetLocator: + """Get the asset.""" + + @property + def payload(self) -> InTotoPayload: + """Get the JSON payload of the provenance, in in-toto format. + + The payload is a field within a DSSE envelope, having the type "Statement". + + For more details, see the following pages in in-toto spec: + + In-toto attestation layers: https://github.com/in-toto/attestation/tree/main/spec + - v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#attestation-spec + - v1 : https://github.com/in-toto/attestation/tree/main/spec/v1#specification-for-in-toto-attestation-layers + Envelope layer: + - v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#envelope + - v1 : https://github.com/in-toto/attestation/blob/main/spec/v1/envelope.md + Statement layer: + - v0.1: https://github.com/in-toto/attestation/tree/main/spec/v0.1.0#statement + - v1: https://github.com/in-toto/attestation/blob/main/spec/v1/statement.md + """ diff --git a/src/macaron/slsa_analyzer/provenance/witness/__init__.py b/src/macaron/slsa_analyzer/provenance/witness/__init__.py new file mode 100644 index 000000000..234d6f2a5 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/witness/__init__.py @@ -0,0 +1,181 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Witness provenance (https://github.com/testifysec/witness).""" + +import logging +from typing import NamedTuple + +from macaron.config.defaults import defaults +from macaron.slsa_analyzer.asset import AssetLocator +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload, InTotoV01Payload +from macaron.slsa_analyzer.provenance.witness.attestor import GitLabWitnessAttestor, RepoAttestor + +logger: logging.Logger = logging.getLogger(__name__) + + +class WitnessProvenanceData(NamedTuple): + """Data of a downloaded witness provenance. + + Attributes + ---------- + asset : AssetLocator + The provenance asset. + payload : InTotoPayload + The provenance payload. + """ + + asset: AssetLocator + payload: InTotoPayload + + +class WitnessVerifierConfig(NamedTuple): + """Configuration for verifying witness provenances. + + Attributes + ---------- + predicate_types : set[str] + A provenance payload is recognized by Macaron to be a witness provenance if its + ``predicateType`` value is present within this set. + artifact_extensions : set[str] + A set of artifact extensions to verify. Artifacts having an extension outside this list + are not verified. + """ + + predicate_types: set[str] + artifact_extensions: set[str] + + +def load_witness_verifier_config() -> WitnessVerifierConfig: + """Load configuration for verifying witness provenances. + + Returns + ------- + WitnessVerifierConfig + Configuration for verifying witness provenances. + """ + return WitnessVerifierConfig( + predicate_types=set( + defaults.get_list( + "provenance.witness", + "predicate_types", + fallback=[], + ) + ), + artifact_extensions=set( + defaults.get_list( + "provenance.witness", + "artifact_extensions", + fallback=[], + ) + ), + ) + + +def is_witness_provenance_payload( + payload: InTotoPayload, + predicate_types: set[str], +) -> bool: + """Check if the given provenance payload is a witness provenance payload. + + Parameters + ---------- + payload : InTotoPayload + The provenance payload. + predicate_types : set[str] + The allowed values for the ``"predicateType"`` field of the provenance payload. + + Returns + ------- + bool + ``True`` if the payload is a witness provenance payload, ``False`` otherwise. + """ + # TODO: add support for in-toto v1 provenances. + return isinstance(payload, InTotoV01Payload) and payload.statement["predicateType"] in predicate_types + + +class WitnessProvenanceSubject(NamedTuple): + """A helper class to store elements of the ``subject`` list in the provenances. + + Attributes + ---------- + subject_name : str + The ``"name"`` field of each ``subject``. + sha256 : str + The SHA256 digest of the corresponding asset to the subject. + """ + + subject_name: str + sha256_digest: str + + @property + def artifact_name(self) -> str: + """Get the artifact name, which should be the last part of the subject.""" + _, _, artifact_name = self.subject_name.rpartition("/") + return artifact_name + + +def extract_repo_url(witness_payload: InTotoPayload) -> str | None: + """Extract the repo URL from the witness provenance payload. + + Parameters + ---------- + witness_payload : InTotoPayload + The witness provenance payload. + + Returns + ------- + str | None + The repo URL within the witness provenance payload, if the provenance payload + can be processed and the repo URL is found. + """ + repo_attestors: list[RepoAttestor] = [GitLabWitnessAttestor()] + + for attestor in repo_attestors: + repo_url = attestor.extract_repo_url(witness_payload) + if repo_url is not None: + return repo_url + + return None + + +def extract_witness_provenance_subjects(witness_payload: InTotoPayload) -> set[WitnessProvenanceSubject]: + """Read the ``"subjects"`` field of the provenance to obtain the hash digests of each subject. + + Parameters + ---------- + witness_payload : InTotoPayload + The witness provenance payload. + extensions : list[str] + The allowed extensions of the subjects. + All subjects with names not ending in these extensions are ignored. + + Returns + ------- + dict[str, str] + A dictionary in which each key is a subject name and each value is the corresponding SHA256 digest. + """ + # TODO: add support for in-toto v1 provenances. + + if isinstance(witness_payload, InTotoV01Payload): + subjects = witness_payload.statement["subject"] + subject_digests = set() + + for subject in subjects: + name = subject["name"] + digest = subject["digest"] + + sha256 = digest.get("sha256") + if not sha256 or not isinstance(sha256, str): + continue + + subject_digests.add( + WitnessProvenanceSubject( + subject_name=name, + sha256_digest=sha256, + ) + ) + + return subject_digests + + return set() diff --git a/src/macaron/slsa_analyzer/provenance/witness/attestor.py b/src/macaron/slsa_analyzer/provenance/witness/attestor.py new file mode 100644 index 000000000..7fc2e3f24 --- /dev/null +++ b/src/macaron/slsa_analyzer/provenance/witness/attestor.py @@ -0,0 +1,95 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Witness Attestors.""" + +from typing import Protocol + +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload, InTotoV01Payload + + +class RepoAttestor(Protocol): + """Interface for witness attestors that record repo URLs.""" + + def extract_repo_url(self, payload: InTotoPayload) -> str | None: + """Extract the repo URL from a witness provenance payload. + + Parameters + ---------- + payload : InTotoStatement + The witness provenance payload. + + Returns + ------- + str | None + The repo URL, or ``None`` if it cannot be located in the provenance payload. + """ + + +class GitLabWitnessAttestor: + """Witness attestor for GitLab. + + In the payload of a witness provenance, each subject corresponds to an attestor. + Docs: https://github.com/testifysec/witness/blob/main/docs/attestors/gitlab.md + """ + + def extract_repo_url(self, payload: InTotoPayload) -> str | None: + """Extract the repo URL from a witness provenance payload. + + Parameters + ---------- + payload : InTotoStatement + The witness provenance payload. + + Returns + ------- + str | None + The repo URL, or ``None`` if it cannot be located in the provenance payload. + """ + if isinstance(payload, InTotoV01Payload): + return self.extract_repo_url_intoto_v01(payload) + return None + + def extract_repo_url_intoto_v01(self, payload: InTotoV01Payload) -> str | None: + """Extract the repo URL from a witness provenance payload following in-toto v0.1 schema. + + Note: the current implementation inspects the ``predicate`` field of the payload + to locate the repo URL. The schema of this field is currently undocumented by witness. + + Parameters + ---------- + payload : InTotoV01Statement + The in-toto v0.1 payload. + + Returns + ------- + str | None + The repo URL, or ``None`` if it cannot be located in the provenance payload. + """ + if payload.statement["predicate"] is None: + return None + + attestations = payload.statement["predicate"].get("attestations", []) + + if attestations is None or not isinstance(attestations, list): + return None + + for attestation_entry in attestations: + if not isinstance(attestation_entry, dict): + return None + + attestation_type = attestation_entry.get("type") + if attestation_type != "https://witness.dev/attestations/gitlab/v0.1": + continue + + attestation = attestation_entry.get("attestation") + if attestation is None or not isinstance(attestation, dict): + return None + + project_url = attestation.get("projecturl") + if project_url is None or not isinstance(project_url, str): + return None + + return project_url + + return None diff --git a/src/macaron/slsa_analyzer/specs/ci_spec.py b/src/macaron/slsa_analyzer/specs/ci_spec.py index 8c5dda6b3..d1f2fde24 100644 --- a/src/macaron/slsa_analyzer/specs/ci_spec.py +++ b/src/macaron/slsa_analyzer/specs/ci_spec.py @@ -1,13 +1,16 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module contains the BuildSpec class.""" +from collections.abc import Sequence from typing import TypedDict from macaron.code_analyzer.call_graph import CallGraph from macaron.parsers.bashparser import BashCommands +from macaron.slsa_analyzer.asset import AssetLocator from macaron.slsa_analyzer.ci_service.base_ci_service import BaseCIService +from macaron.slsa_analyzer.provenance.intoto import InTotoPayload class CIInfo(TypedDict): @@ -22,11 +25,18 @@ class CIInfo(TypedDict): callgraph: CallGraph """The call graph for this CI service.""" - provenance_assets: list[dict] - """Release assets for SLSA provenances, e.g., asset for attestation.intoto.jsonl.""" + provenance_assets: list[AssetLocator] + """Release assets for SLSA provenances, e.g., asset for attestation.intoto.jsonl. + + For GitHub Actions, each asset is a member of the ``assets`` list in the GitHub + Actions latest release payload. + See: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release. + """ latest_release: dict - """The latest release.""" + """The latest release. + Schema: https://docs.github.com/en/rest/releases/releases?apiVersion=2022-11-28#get-the-latest-release. + """ - provenances: list[dict] - """The SLSA provenances in in-toto format.""" + provenances: Sequence[InTotoPayload] + """The JSON payloads of SLSA provenances in in-toto format.""" diff --git a/src/macaron/slsa_analyzer/specs/inferred_provenance.py b/src/macaron/slsa_analyzer/specs/inferred_provenance.py index f0e9b5869..7b0584c91 100644 --- a/src/macaron/slsa_analyzer/specs/inferred_provenance.py +++ b/src/macaron/slsa_analyzer/specs/inferred_provenance.py @@ -1,15 +1,18 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module contains the inferred SLSA provenance spec.""" +from macaron.slsa_analyzer.provenance.intoto import v01 + + class Provenance: """This class implements the inferred SLSA provenance.""" def __init__(self) -> None: """Initialize instance.""" - self.payload = { + self.payload: v01.InTotoStatement = { "_type": "https://in-toto.io/Statement/v0.1", "subject": [], "predicateType": "https://slsa.dev/provenance/v0.2", diff --git a/src/macaron/slsa_analyzer/specs/package_registry_spec.py b/src/macaron/slsa_analyzer/specs/package_registry_spec.py new file mode 100644 index 000000000..770a6fb6a --- /dev/null +++ b/src/macaron/slsa_analyzer/specs/package_registry_spec.py @@ -0,0 +1,32 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + + +"""This module contains data related to one package registry that is matched against a repository.""" + +from dataclasses import dataclass, field + +from macaron.slsa_analyzer.build_tool import BaseBuildTool +from macaron.slsa_analyzer.package_registry import PackageRegistry +from macaron.slsa_analyzer.provenance.provenance import DownloadedProvenanceData + + +@dataclass +class PackageRegistryInfo: + """This class contains data for one package registry that is matched against a repository. + + Attributes + ---------- + build_tool : BaseBuildTool + The build tool matched against the repository. + + package_registry : PackageRegistry + The package registry matched against the repository. This is dependent on the build tool detected. + + provenances : list[IsProvenance] + The provenances matched against the current repo. + """ + + build_tool: BaseBuildTool + package_registry: PackageRegistry + provenances: list[DownloadedProvenanceData] = field(default_factory=list) diff --git a/tests/e2e/expected_results/jackson-databind/jackson-databind.json b/tests/e2e/expected_results/jackson-databind/jackson-databind.json index 47f76d905..3bfb62b98 100644 --- a/tests/e2e/expected_results/jackson-databind/jackson-databind.json +++ b/tests/e2e/expected_results/jackson-databind/jackson-databind.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 14:08:20" + "timestamps": "2023-07-08 03:35:08" }, "target": { "info": { @@ -103,7 +103,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -181,7 +181,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -199,6 +199,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -251,6 +265,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/maven/guava.json b/tests/e2e/expected_results/maven/guava.json index 459c620f7..d68be2be2 100644 --- a/tests/e2e/expected_results/maven/guava.json +++ b/tests/e2e/expected_results/maven/guava.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 13:33:34" + "timestamps": "2023-07-08 03:36:15" }, "target": { "info": { @@ -61,7 +61,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -139,7 +139,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -157,6 +157,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -209,6 +223,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/maven/maven.json b/tests/e2e/expected_results/maven/maven.json index 8eb1d63cc..158f1c047 100644 --- a/tests/e2e/expected_results/maven/maven.json +++ b/tests/e2e/expected_results/maven/maven.json @@ -103,7 +103,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -194,6 +194,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -245,7 +259,11 @@ }, { "check_id": "mcn_build_service_1", - "num_deps_pass": 2 + "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [ diff --git a/tests/e2e/expected_results/maven/mockito.json b/tests/e2e/expected_results/maven/mockito.json index 91260e4af..e7da37718 100644 --- a/tests/e2e/expected_results/maven/mockito.json +++ b/tests/e2e/expected_results/maven/mockito.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 13:33:34" + "timestamps": "2023-07-08 03:36:15" }, "target": { "info": { @@ -61,7 +61,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -139,7 +139,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -157,6 +157,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -209,6 +223,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/micronaut-core/caffeine.json b/tests/e2e/expected_results/micronaut-core/caffeine.json index 4e0b4cca4..869788261 100644 --- a/tests/e2e/expected_results/micronaut-core/caffeine.json +++ b/tests/e2e/expected_results/micronaut-core/caffeine.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 13:20:54" + "timestamps": "2023-07-08 03:33:32" }, "target": { "info": { @@ -103,7 +103,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -181,7 +181,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -199,6 +199,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -251,6 +265,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/micronaut-core/micronaut-core.json b/tests/e2e/expected_results/micronaut-core/micronaut-core.json index 19746e8ef..92f6cfc21 100644 --- a/tests/e2e/expected_results/micronaut-core/micronaut-core.json +++ b/tests/e2e/expected_results/micronaut-core/micronaut-core.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 13:20:54" + "timestamps": "2023-07-08 03:33:32" }, "target": { "info": { @@ -20,1192 +20,502 @@ "predicateType": "https://slsa.dev/provenance/v0.2", "subject": [ { - "name": "build/repo/io/micronaut/micronaut-aop/3.9.3/micronaut-aop-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-aop/4.0.0-RC5/micronaut-aop-4.0.0-RC5.jar", "digest": { - "sha256": "f0c6c9effd0326dc0693cab5459a126d48ba65541a6eafa13ce1bc8f89e5021a" + "sha256": "fe81c7b4e6c95178604b96ef73f039fdc11c5f1d9975283b80a41a7b33c3e0c9" } }, { - "name": "build/repo/io/micronaut/micronaut-aop/3.9.3/micronaut-aop-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-aop/4.0.0-RC5/micronaut-aop-4.0.0-RC5.pom", "digest": { - "sha256": "a061e7236808a89689c263c5b77841ab6dd853746941dab26fc61e76c18b0bd2" + "sha256": "2011e5d598945d4b57ff26be6095fd1112d5f2b6b05ac582da31d4c48cb85796" } }, { - "name": "build/repo/io/micronaut/micronaut-aop/3.9.3/micronaut-aop-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-buffer-netty/4.0.0-RC5/micronaut-buffer-netty-4.0.0-RC5.jar", "digest": { - "sha256": "35e5c74dfcd95ee1fc5d9a02008cd1094e6ec19ecd5fe9dd9ae7e57e7cfad818" + "sha256": "23d6730c4151324238b7feae3ef96ea3e6a9519b14f70e10e65282232d426f58" } }, { - "name": "build/repo/io/micronaut/micronaut-aop/3.9.3/micronaut-aop-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-buffer-netty/4.0.0-RC5/micronaut-buffer-netty-4.0.0-RC5.pom", "digest": { - "sha256": "bf486650a774829a7519b34c803727a4640ab907d4db534db35b2294a5a49bea" + "sha256": "4de36c140199d1b8d1a62a15b8a05d3448da6ee34064ce5bf3a71c8f92a90224" } }, { - "name": "build/repo/io/micronaut/micronaut-bom/3.9.3/micronaut-bom-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-context-propagation/4.0.0-RC5/micronaut-context-propagation-4.0.0-RC5.jar", "digest": { - "sha256": "b18b167c24f1d7819871089b7b95acdec506d7739fc372ceaede6712ef25fdfc" + "sha256": "6386eaae3efb5dd4c083ddc6180453a6d4069d28aa508cb9866e56b1dfa98b61" } }, { - "name": "build/repo/io/micronaut/micronaut-bom/3.9.3/micronaut-bom-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-context-propagation/4.0.0-RC5/micronaut-context-propagation-4.0.0-RC5.pom", "digest": { - "sha256": "d4439629b4074d7330355557f614c8128b660cca3713a9d5bc74d5f01507d90a" + "sha256": "35581cc3c527aa476e915f7dde7fed08ab3850e08f4cd09b24cb0328cf192309" } }, { - "name": "build/repo/io/micronaut/micronaut-bom/3.9.3/micronaut-bom-3.9.3.toml", + "name": "build/repo/io/micronaut/micronaut-context/4.0.0-RC5/micronaut-context-4.0.0-RC5.jar", "digest": { - "sha256": "554892313d132a9e312dd34b0f041ea909ed03ac395baa74ee06be8f7832ce8a" + "sha256": "8ca6348c7b338894b15f0719acd8d390c793c161b32369f9e14d8889f083ae06" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-context/4.0.0-RC5/micronaut-context-4.0.0-RC5.pom", "digest": { - "sha256": "78174ef28e3cb26b81f72ddb297ca55d1251f01222fc2295668a8c7152c587e9" + "sha256": "2c070f5aeeeb34399945401c362b53a180bafffe9bf3fe3afce300d21a7c4e0f" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-core-bom/4.0.0-RC5/micronaut-core-bom-4.0.0-RC5.pom", "digest": { - "sha256": "70b71d3d8b3a084e48fdfd0c547a7412d3682d071b43ddb2dfa6a1ef61f9d8a4" + "sha256": "2a58cb4bfb5fd8b2f55aa7fc3996e6893aa0152000a86922762d1afa6bcf978d" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-core-processor/4.0.0-RC5/micronaut-core-processor-4.0.0-RC5.jar", "digest": { - "sha256": "f0e161ebc4b8c2eb74a1d8679b2f9f1a01987adff550b7735490f6eb6a69c3b0" + "sha256": "643d8da5bdbda12c132354c9e8b397f14df44e44099e3073067e1f9e38164f93" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-core-processor/4.0.0-RC5/micronaut-core-processor-4.0.0-RC5.pom", "digest": { - "sha256": "1acff56128be759367d57d5cb5dc5d460c06a7a602dca4dfbc41b8e631532c19" + "sha256": "790a9ce4bf5478d8eb5892891c004065b0f60b3c4cb2c1de0193d49c072a9be0" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-core-reactive/4.0.0-RC5/micronaut-core-reactive-4.0.0-RC5.jar", "digest": { - "sha256": "d383ce393d2bd61d893378eba5fb0f844a40b694988b13b98d39a6a14358c744" + "sha256": "238a704fa9421ba79b4980e5da260a1039fc32b52fad7fa454bbbc28152f800f" } }, { - "name": "build/repo/io/micronaut/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-core-reactive/4.0.0-RC5/micronaut-core-reactive-4.0.0-RC5.pom", "digest": { - "sha256": "7bdf65137cc40eddfdd2a873308e4d0ff864eca3fa3acca7dbcb510133f54704" + "sha256": "339b93388c10169184a01ffffa4898a0bb2eca22673506b1b381517bc140db3c" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-core/4.0.0-RC5/micronaut-core-4.0.0-RC5.jar", "digest": { - "sha256": "6b99bb82104448fb3a6aea8c886a5709bfbbd7d535b2f0a4f856c9e488b6ca54" + "sha256": "af51369feaf2b5d3764c4c920786dd3791371c846b569f9bea71b5d98fb09945" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-core/4.0.0-RC5/micronaut-core-4.0.0-RC5.pom", "digest": { - "sha256": "a2c7f7c05eff8872cb80391bcebc03905b086f0decc262969ccea93958f07a16" + "sha256": "fa4f43ed7a75ad5062276d1eac5dd910bc6fea98d460bb40b4eb47e3fe288c21" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-discovery-core/4.0.0-RC5/micronaut-discovery-core-4.0.0-RC5.jar", "digest": { - "sha256": "62e95ca637a6f1da8612340990eb92b18f1cb49f0cb1a939ddc122a3628bb80a" + "sha256": "e34a8810c4ba56dc2022be1bd7c48d55df4f310dffcb241f0a7c03c44900b850" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-discovery-core/4.0.0-RC5/micronaut-discovery-core-4.0.0-RC5.pom", "digest": { - "sha256": "68dcedb0e1edf2c2fd4211b11e42f85a5c8e5ccfa7aa50722545b7bbd39baf70" + "sha256": "fbc3751a2c68b94c60f4899812c20c6b468aabedb784e64c086336f0656609f5" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-function-client/4.0.0-RC5/micronaut-function-client-4.0.0-RC5.jar", "digest": { - "sha256": "209c401514231dfe9ea4d313a4d11a9effad502fcbd840a07434b88f647ffeaa" + "sha256": "e6aa43764625cd9cdfd15d8edf59e67a344990bf1181ca6c985d78b372bc332e" } }, { - "name": "build/repo/io/micronaut/micronaut-context/3.9.3/micronaut-context-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-function-client/4.0.0-RC5/micronaut-function-client-4.0.0-RC5.pom", "digest": { - "sha256": "e3583006a15c1d8fd2b1449e4bf63c291e59c6063a73aee10cf3224261ca2a1e" + "sha256": "964a899f372855a19fce4dce81ee27d0370208fa1c4bd8beafd8df6fbceaee30" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-function-web/4.0.0-RC5/micronaut-function-web-4.0.0-RC5.jar", "digest": { - "sha256": "42f197199dc444a18acbfeb6009b95f3943f6b977317a9bec3a0d816d20a49ac" + "sha256": "cfcf4066f68b5bc0e458a8e2b50666814a18f1ab8867f576b6d031963ced80cf" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-function-web/4.0.0-RC5/micronaut-function-web-4.0.0-RC5.pom", "digest": { - "sha256": "d16ca4608cc978542703510dace16beb6e6c06bb932f8c2050f17d05fa103ede" + "sha256": "ec3d58f851336bbcf37b288678f11679df75ab12a95aeaa6d966e953310d6b74" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-function/4.0.0-RC5/micronaut-function-4.0.0-RC5.jar", "digest": { - "sha256": "070adb6c3721885acebe184085c94ce08a7e1b78d4ff2d52465662c6104ddd46" + "sha256": "22a2dbce5eee597be0e248736f8235e5846527a7e2f05e23fd48093a8dec3e04" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-function/4.0.0-RC5/micronaut-function-4.0.0-RC5.pom", "digest": { - "sha256": "93126ed363aee45e3869eb4fbbd1784e7de8b9a164b586a5ffe7c98e6ea45da3" + "sha256": "b6802d87c77fdf83da7a6974c98e9fe22883599ed3d61b803fdf563cecfd7aef" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-graal/4.0.0-RC5/micronaut-graal-4.0.0-RC5.jar", "digest": { - "sha256": "ef929ef2646a6bf8416666b535b23cc4b30e919e84f668e905963bd44ea79f38" + "sha256": "91936348d741f625d7ae1c588a81dd7ce2862a0b00454d99c79aa1847cce8902" } }, { - "name": "build/repo/io/micronaut/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-graal/4.0.0-RC5/micronaut-graal-4.0.0-RC5.pom", "digest": { - "sha256": "a85ab70aa0d088d99e69875c18920b425ebd50a9e099bc806c5119b97896c2bc" + "sha256": "e9ae61efd427f823a0f3d257292c6fc1dfb3af35202ffd354f34b08b840ecaa9" } }, { - "name": "build/repo/io/micronaut/micronaut-core/3.9.3/micronaut-core-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-http-client-core/4.0.0-RC5/micronaut-http-client-core-4.0.0-RC5.jar", "digest": { - "sha256": "3c7cbc75df966af1816f9575179cff0d696963d0cc471518f7eb3a6fd4e41405" + "sha256": "c923a05bbf55d1c2284d17c4090981dc3d3fa60d2e322cf1d2fb6f1765bbff90" } }, { - "name": "build/repo/io/micronaut/micronaut-core/3.9.3/micronaut-core-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-http-client-core/4.0.0-RC5/micronaut-http-client-core-4.0.0-RC5.pom", "digest": { - "sha256": "f0d1f2537cadb9ed72470323ca166bfc785856dff95d6c49692c8fb36eddc5e8" + "sha256": "28e22585b3b9949f00a626ff8f9ff7c1e94ae3a975d94a7a8c1e1a6ff15275bd" } }, { - "name": "build/repo/io/micronaut/micronaut-core/3.9.3/micronaut-core-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-http-client-jdk/4.0.0-RC5/micronaut-http-client-jdk-4.0.0-RC5.jar", "digest": { - "sha256": "2f520f14ed27db7b5d355be7bfc97d13d8829a73b4c11c97ef12a23b5fa10467" + "sha256": "f23dfe8e3ca296baca472b7c3b08cb7eeb2d401720ae3f6512859a65d1a87293" } }, { - "name": "build/repo/io/micronaut/micronaut-core/3.9.3/micronaut-core-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-http-client-jdk/4.0.0-RC5/micronaut-http-client-jdk-4.0.0-RC5.pom", "digest": { - "sha256": "2ae3510173aa21d1675b0403cae0c242e334026e54f1452ad61264de7d69753f" + "sha256": "bbaf41d0bf849fd3c86f1a2dceb6e700080ae18771721c5b66f0af6235eea992" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-http-client-tck/4.0.0-RC5/micronaut-http-client-tck-4.0.0-RC5.jar", "digest": { - "sha256": "5266aecccb31d9caea87af259c66bb903be37cd7544ebf7ba0eb45bcef1fc31c" + "sha256": "565acd56712ca41bc735e32a35dc8dc699df9a940a29ddb3a4a74070c272d35f" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-http-client-tck/4.0.0-RC5/micronaut-http-client-tck-4.0.0-RC5.pom", "digest": { - "sha256": "133e44956c7a06642a1f54a254ec98ec2d3a9d81463c7c12243c4dbf82ddfec2" + "sha256": "91e6315305d6e25f093ca542efc6932cbaf597d4af754ef0195a69c0c2a21ae4" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-http-client/4.0.0-RC5/micronaut-http-client-4.0.0-RC5.jar", "digest": { - "sha256": "8945d204d343fd91eaac7c02dda6b547ec883dbf0a2615f37469536592a0fe8a" + "sha256": "8d2c671bb8e3376dade2fe3dd2a17a07dde001b33d9af5f822e8152d528ff39b" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-http-client/4.0.0-RC5/micronaut-http-client-4.0.0-RC5.pom", "digest": { - "sha256": "fcc3515d7b1f3e8dd0660ddc1937131eb123350ac5293e3257c04beb6cc43886" + "sha256": "4d98f855295f211949adb0474bc58d62da6ad21ff1209cd9299ddf82fa954011" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-http-netty/4.0.0-RC5/micronaut-http-netty-4.0.0-RC5.jar", "digest": { - "sha256": "e4ff5b7ae19ae82c0777bb67471f8adeac8487b5c34f3744d8fa5896ac7a1d1d" + "sha256": "5833edc1d02d0d1835865a0c7a555db7d9a27bd91cc0c8f21de66e1526fa95e4" } }, { - "name": "build/repo/io/micronaut/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-http-netty/4.0.0-RC5/micronaut-http-netty-4.0.0-RC5.pom", "digest": { - "sha256": "a618f1a17d729b4a7f9d5a3681af6557e4d3496be81633dccbcd0e915dfba809" + "sha256": "970e4f4a09664da90a5bdd5d1d16dc45c970260a4d87267a6704ba8c78e9740e" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-http-server-netty/4.0.0-RC5/micronaut-http-server-netty-4.0.0-RC5.jar", "digest": { - "sha256": "4c44fe5d97b2f740f0b6bd067d34dc0723cf0c63bb093fb8ddd23a2654410119" + "sha256": "70c17686fd31b7e706089efaa06a1524685548469c9ff0f71af94bae86b3bfc9" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-http-server-netty/4.0.0-RC5/micronaut-http-server-netty-4.0.0-RC5.pom", "digest": { - "sha256": "32e50e8b937119207e34231091934c8a0bfca58b5979322cccaa0fd37e123f14" + "sha256": "90df32f7abb9b9aab9cf6be77d959e16fb3c80ffaf32c9b2ee5131da88526c2d" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-http-server-tck/4.0.0-RC5/micronaut-http-server-tck-4.0.0-RC5.jar", "digest": { - "sha256": "26f499d74ac5afeca5d85795d6a4bacdd3392fe568ab3a0560513010c95e1f99" + "sha256": "1ce5f3e23038c79e8c17e07640f681db4333fd695639bda3b1f0e438a2ddec35" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-http-server-tck/4.0.0-RC5/micronaut-http-server-tck-4.0.0-RC5.pom", "digest": { - "sha256": "0d004f6852cbffb75803d2376c9891c8bb2e40592c7cbcad026df68e97deddc5" + "sha256": "1ae3712dc4f225dc9d9a7df589b772025316c842b3941fa9b5c5518ea6587b10" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-http-server/4.0.0-RC5/micronaut-http-server-4.0.0-RC5.jar", "digest": { - "sha256": "2ed7e574b50a4de88e17b8d293768bfd120e6d911e98935d5ae1f192bb9877c7" + "sha256": "00e15f6ac19a5611229e9871304af48b7c504bd5920284d402b033e7e6c55395" } }, { - "name": "build/repo/io/micronaut/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-http-server/4.0.0-RC5/micronaut-http-server-4.0.0-RC5.pom", "digest": { - "sha256": "c32f68b245ce6c887538d9103588178bec144077f38aa5948a9c9eb5f108aa79" + "sha256": "a4659a29ee319bad9b6a988f2ecd77f5e9e5857eddb43527c38fecfd47a816d4" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-http-tck/4.0.0-RC5/micronaut-http-tck-4.0.0-RC5.jar", "digest": { - "sha256": "d146611619bba2f51821f907d085811ca97789df5d88302d773bd99ced8b107c" + "sha256": "fde78743e5aa56dee7743e0e43f6ef5a4c350fb7d3b72be2757fa05183b912a8" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-http-tck/4.0.0-RC5/micronaut-http-tck-4.0.0-RC5.pom", "digest": { - "sha256": "09373942aa7075c30dfb68a1b2f961340b6ca60e8dc7248140a37d5fa823b583" + "sha256": "07d3594834b5dd6ff911ac501d5e61c1439a99d28fd16856cb6315bf33337c56" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-http-validation/4.0.0-RC5/micronaut-http-validation-4.0.0-RC5.jar", "digest": { - "sha256": "a29b0c3fc5e6dda131fe8084b03257bde78ebddeb3749de57f4062efb9435ad4" + "sha256": "a93dd3c02f30abec9689c7efb309178effeec18017b389b92dc1cf8707d37691" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-http-validation/4.0.0-RC5/micronaut-http-validation-4.0.0-RC5.pom", "digest": { - "sha256": "7640c9e1a2896e82a924ca93853a83054d7012f3d17e630e27328661ae8ab487" + "sha256": "7aaa3042954d30107db2c5a05449d895fbeb35f21bed20a8d939ae3acaec954d" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-http/4.0.0-RC5/micronaut-http-4.0.0-RC5.jar", "digest": { - "sha256": "69e39532995b6a088bdf1749a784d511c51900e46d417e64aad36dc5a71621a9" + "sha256": "0aa185755c5fb07b480ee67b0c159e2c485e86f0cb3ce3f17dd6c8bd667d763c" } }, { - "name": "build/repo/io/micronaut/micronaut-function/3.9.3/micronaut-function-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-http/4.0.0-RC5/micronaut-http-4.0.0-RC5.pom", "digest": { - "sha256": "eed21ba4e1d3c62a1b8272a156ff76a6f3939ebd43b9068407bdb23eb0999950" + "sha256": "3341efa6b3dde441c9eca29c639cb5faf65b010f552e31b8d0565cd95110e66d" } }, { - "name": "build/repo/io/micronaut/micronaut-graal/3.9.3/micronaut-graal-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/4.0.0-RC5/micronaut-inject-groovy-test-4.0.0-RC5.jar", "digest": { - "sha256": "5bb8692380fa478f34f3f032b9edb197425d367ec9ff5c38521964b78613ff92" + "sha256": "fa3c62dd11de5df4acdbc1f33a2065936791b10b720cde45481b7cdea5c6a88b" } }, { - "name": "build/repo/io/micronaut/micronaut-graal/3.9.3/micronaut-graal-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/4.0.0-RC5/micronaut-inject-groovy-test-4.0.0-RC5.pom", "digest": { - "sha256": "869321450f9222257946a9206f1e58e44c92865c8bda9833f49754a3c6a13d99" + "sha256": "ab4e2ab48d01aaaa4635731da9d88675f33fa52e2509c9aed82c90445e27ced2" } }, { - "name": "build/repo/io/micronaut/micronaut-graal/3.9.3/micronaut-graal-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-inject-groovy/4.0.0-RC5/micronaut-inject-groovy-4.0.0-RC5.jar", "digest": { - "sha256": "98e5b4d0ef54aaa1c538b6ad8b6a9b2788d9abad7d768ea8cc8698113e8a8557" + "sha256": "7721f712b997b38aef10360674a432023acfe5e840dd2d0cd788e9d14b868a14" } }, { - "name": "build/repo/io/micronaut/micronaut-graal/3.9.3/micronaut-graal-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-inject-groovy/4.0.0-RC5/micronaut-inject-groovy-4.0.0-RC5.pom", "digest": { - "sha256": "76410a8b0f9d0554a162bf0ae51ec676073a57b9eaaab67df4de8bb7c5e6378e" + "sha256": "fcafb0aab4242f74010aca94f7e6cb0ed4438d3e793b80f11898af8d844548e2" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-inject-java-test/4.0.0-RC5/micronaut-inject-java-test-4.0.0-RC5.jar", "digest": { - "sha256": "604c332aa26353e982c31b25d24f3e16042619a39e08b9f452eb576785f182df" + "sha256": "9d416cc35858064d97ddc8235ae92d1c3f4acd0db753655660bcd18d7cd92389" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-inject-java-test/4.0.0-RC5/micronaut-inject-java-test-4.0.0-RC5.pom", "digest": { - "sha256": "732728c98a64b869c13f8dd6824196025e291a8f642523260a7dca20938da722" + "sha256": "7b2a8ba8a352955b6f6b67ac428404f99d6c4b4b3c6e5b7c45e42a27685a050c" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-inject-java/4.0.0-RC5/micronaut-inject-java-4.0.0-RC5.jar", "digest": { - "sha256": "2400991d67acd31c91edc0ef1b751304f902642e5e4a293669c5c066cd2ef10f" + "sha256": "d2e19535dc4c0d16692ba7770e485f38668beb21cefcb1ef938658a897763908" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-inject-java/4.0.0-RC5/micronaut-inject-java-4.0.0-RC5.pom", "digest": { - "sha256": "c81c12f15a45d8c86bf8cb22a977b38f64dce8a607d5cf88dd117a136263a7c1" + "sha256": "a4f0c4ac912cf21ac4f5cc8a7a1d716ec1af000d12a7189860bc89b00592f846" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/4.0.0-RC5/micronaut-inject-kotlin-test-4.0.0-RC5.jar", "digest": { - "sha256": "106e14f04304eba164b807ad6946204e0d310a994e825c2c2088cb853e6385a5" + "sha256": "74e57557e71d24c58757d1b721b34c89ae03ee78b66045e805de29a0491896b7" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/4.0.0-RC5/micronaut-inject-kotlin-test-4.0.0-RC5.pom", "digest": { - "sha256": "cdae597ffdff6772b9d596f7a4250b0e2cd4dc2788f5973e8a2ed59214f26604" + "sha256": "d5e50842e13761b3a2ef6883c232b49d8626227852a7b2a5cdc8f18cd3afd6a0" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-inject-kotlin/4.0.0-RC5/micronaut-inject-kotlin-4.0.0-RC5.jar", "digest": { - "sha256": "18ad4a6e64a483a3011dbad4839abe2b302abd7fa1e4e832900fa4b3406fed16" + "sha256": "ce7c444748f4784e57ddc00fb35058e0e46a58965c188bea2f51a3f37a1c4be4" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-inject-kotlin/4.0.0-RC5/micronaut-inject-kotlin-4.0.0-RC5.pom", "digest": { - "sha256": "d2f67e2e68790ca93e0cf3faabfc70de2609e002fb813fe9e8e04d4d2400f587" + "sha256": "1c49d799df91bf1741e8bd26d5f8bfc38d59ffdbad167e92986008b627dbd670" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-inject/4.0.0-RC5/micronaut-inject-4.0.0-RC5.jar", "digest": { - "sha256": "ba5b511686ec8997f4ae91b035a5307f3ce24a9cb25b21d7785f2f3e95e02048" + "sha256": "634032462b55dc462949a53e90fa2d137d6246960f35d49fa6ab6ef194e09dc1" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-inject/4.0.0-RC5/micronaut-inject-4.0.0-RC5.pom", "digest": { - "sha256": "8468579bf913faf14c4bd0b704854bd6e2fe41aedbd943d6f9f2f92280a056b8" + "sha256": "8d0b0e651e375113b23aa437d9b9203d6b866ced981c58c7261c37a7cefdbfb6" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-jackson-core/4.0.0-RC5/micronaut-jackson-core-4.0.0-RC5.jar", "digest": { - "sha256": "0dc41aefec207e0e7162a540423b0214567069dabaed6ee5edce64b5981d978e" + "sha256": "04b936cce487200ec5b7d92db25e922c5c44f8b54851999376455c25b69ae924" } }, { - "name": "build/repo/io/micronaut/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-jackson-core/4.0.0-RC5/micronaut-jackson-core-4.0.0-RC5.pom", "digest": { - "sha256": "ff46ee2aa91fb3f09f264ce0ca2ad89414b4b334bae1b6428660ee2dfdb9fafe" + "sha256": "8bf7dbd70c5fb09c0a54a56cc4cf480d3720eefa609f749d0efb9f4c447fb5e5" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-jackson-databind/4.0.0-RC5/micronaut-jackson-databind-4.0.0-RC5.jar", "digest": { - "sha256": "cfea6246ec49c137c610088f38b82b502ba16c9924d4048561357977accfce89" + "sha256": "7bf1e9a2d996633cbfc0a630378d58c7c0b3d49b859cb8c1e0b888d85b8738e4" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-jackson-databind/4.0.0-RC5/micronaut-jackson-databind-4.0.0-RC5.pom", "digest": { - "sha256": "0c055e7b6937aaeb6e241a367dc8a2e64b650adf7e5106182c5c66ec934f6339" + "sha256": "6d0efd68476e5b268439a17124ac575c5f7480f0990cbd5ebc6a03f6a83c9c8f" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-json-core/4.0.0-RC5/micronaut-json-core-4.0.0-RC5.jar", "digest": { - "sha256": "bfca34e88c9b48552a7a08ba6aace4a435b40d83a0a60965ae4b3b327ed257a3" + "sha256": "e652e41aa28b5bbd75b703ec7e076ced217cbbb5ff25a3abf6732028814f0460" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-json-core/4.0.0-RC5/micronaut-json-core-4.0.0-RC5.pom", "digest": { - "sha256": "e33d12d3ace2b0099cf01824922421e805b62e5eac5809ddc20b51b6aabc8ec4" + "sha256": "8dbfb5ae7b4fc603fb7477fa844c8c5a46ec8180f0812f474cc97761ed0c76c0" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-management/4.0.0-RC5/micronaut-management-4.0.0-RC5.jar", "digest": { - "sha256": "de8885925e4d72852b713c1c230d4d8d21755610dc658674cfb2cf843193c3ac" + "sha256": "bcb95d4f1de068897435599204e97a065373982822a9077610eedb9e46d22c25" } }, { - "name": "build/repo/io/micronaut/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-management/4.0.0-RC5/micronaut-management-4.0.0-RC5.pom", "digest": { - "sha256": "81f04a617f3cc6dd62e6e8d35e2c4f0e6bfe10b98aa145d4d747876f41dc5b67" + "sha256": "726bbf8b88651e614a897dc8130c58afcaefebda1742629c4117cfc903adf78a" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-messaging/4.0.0-RC5/micronaut-messaging-4.0.0-RC5.jar", "digest": { - "sha256": "66d373b069d2d2c1fefc6de80e1a303f19714711fa4d7c11d99b611a8cd1b71c" + "sha256": "4fe01ddba472754bf19190e2ce98ef83c5a0cbc8490da14cf1430969b2f6b916" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-messaging/4.0.0-RC5/micronaut-messaging-4.0.0-RC5.pom", "digest": { - "sha256": "09bd27eb376d6ad9bdbd6205d6639d294c03ffda14c6a3d79d978ccd43d88e56" + "sha256": "758cc5c219c39956f261f667758c4b4fbd9d23628bf3b679e670ee11d4199bdd" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-retry/4.0.0-RC5/micronaut-retry-4.0.0-RC5.jar", "digest": { - "sha256": "d2a37823099296b3af5c8d852b484b82026c99570565ed5d115ccc3ff4a73d3d" + "sha256": "3c38f47fe976b5177baa7d2416957cb0e240e68dfa0e9b5388621b1dc62e36f1" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-retry/4.0.0-RC5/micronaut-retry-4.0.0-RC5.pom", "digest": { - "sha256": "10b7a66dd2c887ef77009268f86c6fd897337fab0e3f58448acbb146513f9349" + "sha256": "17d5b16b66af914aa456e8323fe69ce329704275f3bfa7cca59aa3c74d4dd300" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-router/4.0.0-RC5/micronaut-router-4.0.0-RC5.jar", "digest": { - "sha256": "10f8a5681ca47c7168764c560e05ef1a4a8c3a104a2b00f97461aeb2155dc650" + "sha256": "53c2ac2a841a0d71b94b700b6dc0125c56f07a75552820324b2f8db5ca3c7004" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-router/4.0.0-RC5/micronaut-router-4.0.0-RC5.pom", "digest": { - "sha256": "b1f12a6f0527745e33eed089d6105dd3cf6e660b4d062b73858fe860f10dcf25" + "sha256": "e4b841fe554a880506aeaf249b161193f10604a1cb946b18cbd7acf8bc88d851" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-all.jar", + "name": "build/repo/io/micronaut/micronaut-runtime-osx/4.0.0-RC5/micronaut-runtime-osx-4.0.0-RC5.jar", "digest": { - "sha256": "4dc93c49aa11bf29be95edf49fd4da93219828d335d3d7cfde519e22870c818c" + "sha256": "d107c6b971a9409115a3380a87ed0d0f4f59744a2e8bfd9e724475668f0d598c" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-javadoc.jar", + "name": "build/repo/io/micronaut/micronaut-runtime-osx/4.0.0-RC5/micronaut-runtime-osx-4.0.0-RC5.pom", "digest": { - "sha256": "016b486ce602bc5b7898c0f2121193598c2e213ebcff8c93b66c42ee11e8f182" + "sha256": "810a954ebd4efbd10b841a6991ee7ad8cdc033315aa62e600c34fd46cb4d4539" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-sources.jar", + "name": "build/repo/io/micronaut/micronaut-runtime/4.0.0-RC5/micronaut-runtime-4.0.0-RC5.jar", "digest": { - "sha256": "a85595d12b4f0fa767b238b53507d81b6efd2b10a80f2aedd1b09ec697b2400a" + "sha256": "621ea91b4a532276b8b0419bebacb76599a2240c385f48e7f315387b8c64e173" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.jar", + "name": "build/repo/io/micronaut/micronaut-runtime/4.0.0-RC5/micronaut-runtime-4.0.0-RC5.pom", "digest": { - "sha256": "83aedef04f63a91cb5ccd592b92f2dc92e2698df7d160ca085f198f4d42a1c47" + "sha256": "e96b9174af2110956ba9a580c13e9ad6596d9c64b2066a1a233d721ccbc431ae" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.module", + "name": "build/repo/io/micronaut/micronaut-websocket/4.0.0-RC5/micronaut-websocket-4.0.0-RC5.jar", "digest": { - "sha256": "35807608a88115726fca3842d6ee8dba9c371fba6c22e2caa39b80016d026bba" + "sha256": "d48b8f5b92fe411d51f39edb7af908064b21c8cc8e02ce3ee9bce8fc9077717f" } }, { - "name": "build/repo/io/micronaut/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.pom", + "name": "build/repo/io/micronaut/micronaut-websocket/4.0.0-RC5/micronaut-websocket-4.0.0-RC5.pom", "digest": { - "sha256": "4ce1c90d946862aa44154cfcf67fc84874757053f0bf5cd37599a8f31cda3bfb" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-all.jar", - "digest": { - "sha256": "225047e6b106f09b260ed82923905719414bb9270c191aeca262cee4a83770f3" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-javadoc.jar", - "digest": { - "sha256": "4ce869deb7a8346bfab67be2357a7b8189b7727a058ec60facbf9f425f7620db" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-sources.jar", - "digest": { - "sha256": "d2b0277173f147725d26ebd57e1298e604f1e6510d318910495135d26e35cf38" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.jar", - "digest": { - "sha256": "b339268d6ff0fc458b39c1071af7ad06f175c230a6047bf5360edc172607f492" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.module", - "digest": { - "sha256": "f2d25b89da0598982324b152b5b6e7e0bd520c8a5731a2ac88deb53382e76f65" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.pom", - "digest": { - "sha256": "84e0d6d23445167aed4e49b50f563b746533288649c9e10b892edd5a1bfcf9bb" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-all.jar", - "digest": { - "sha256": "8f1364bd4c3b98312a04a32361952427e84b55d2f48b4f1c3b00b98004791825" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-javadoc.jar", - "digest": { - "sha256": "b1037bfd929f292c4102ba0f80cfa8731ff93f3368a334c522bf78c221be5e0a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-sources.jar", - "digest": { - "sha256": "1c7c000f87f7be4b67b5ed15b20ed3d3526fa3fcf37b9849596bbe11d8e10686" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.jar", - "digest": { - "sha256": "7e4d73f99e95f1e5a548d2c8cb1fd2c7e99e4d395d612dfe09dd1c4c52258d63" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.module", - "digest": { - "sha256": "88cf24dfe5fa2f750485b7cb8372a437f2a1adb80d5ff125938e05ac713d7a8e" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.pom", - "digest": { - "sha256": "7fe0410ddd972fb0d0e346ca8f5dbacc3a6c82d9991b3d585c4db3c2b869c2a2" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3-all.jar", - "digest": { - "sha256": "0e5f01ff65b7fdcf08970e801cf51088b7426d181b6e44a278401eb765675c8e" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3-javadoc.jar", - "digest": { - "sha256": "a66d58221320094fa8daed0a845d80543ba817a999b817c39451b1cd5e9d1365" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3-sources.jar", - "digest": { - "sha256": "1bb64700f4b035f1cfcb04072ffd02d80228053a75a1eb02e4f9a5168d9f0248" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3.jar", - "digest": { - "sha256": "010eb7823872abce004ede25252f496a43fd96ee9300c0818efb6d9658994f94" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3.module", - "digest": { - "sha256": "d6d169dcc9aa381d6bc34af0e9f299e4ab0ca8728419e15e3346d2109420f5ee" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-http/3.9.3/micronaut-http-3.9.3.pom", - "digest": { - "sha256": "4d93abed78cdafbc645e0bcee47e7afec4e14181c48c562c10cd98c991b88438" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-all.jar", - "digest": { - "sha256": "a2cfbe0a4d78bbea1c22173da3a200ce8fc128f26a33dd31790fa319f523518a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-javadoc.jar", - "digest": { - "sha256": "b0a8ec42e66b156e558f05016594465e37f892935f235ee756e48348c94b222b" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-sources.jar", - "digest": { - "sha256": "14e258ea39929baebe1808a5f474163572d9b4110e829cfd4c8f05ef0781842f" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.jar", - "digest": { - "sha256": "392c2fadc98307379ab0a354b385fe5f59dd793f1853204a2a8d7dccdbd8c5f4" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.module", - "digest": { - "sha256": "e7032f091f233d181e1e0ad252e38b9c526a64df99d45ccf90ce04a2bd5b56c7" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.pom", - "digest": { - "sha256": "891462dbaaa764c96317975dcafd0cebbd7e1667cc62a42cabf0bed9103c484f" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-all.jar", - "digest": { - "sha256": "8bd910f06c3877f4ef030fcf2c873e195e749e4aefb296b1aea679a99b821870" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-javadoc.jar", - "digest": { - "sha256": "e1bf42e66557db1bcf75112d467b56d26421db56e3695ef0eb1d87b7354c5456" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-sources.jar", - "digest": { - "sha256": "dac0fe4aca7b9f6ff32e97983052ed156ec58194bc6d881b7545b7ac22a0abc3" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.jar", - "digest": { - "sha256": "30a5929514517121d0ad2c1115493b3283c801745ce67b3622f41d1b225a08b9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.module", - "digest": { - "sha256": "6b47ad9225d483543c69803b8f13230156732d92d66da9efcf0442dbbce78a5d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.pom", - "digest": { - "sha256": "799f31190c6c16ca14b9e68f52f7f2972c3d4cb9b86d5c3c457de45e755eacea" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-all.jar", - "digest": { - "sha256": "70c0a9b525761eeef14bc082020177da89540ccfe8c5eecd56d613a0a8c51efe" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-javadoc.jar", - "digest": { - "sha256": "65637187f26328a795a1d5ea919dfc372c7e58b3ac84022c3a93c0ceb7c807e6" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-sources.jar", - "digest": { - "sha256": "11a6677f21df331268d22f5908fa70ee4e2e983c92c122c2471a3417b680905f" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.jar", - "digest": { - "sha256": "e0e4135a57cb84589af2c329c8b5b959083640f2a4e5d4a404411044751819e3" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.module", - "digest": { - "sha256": "653234c3e63110a451dc179b7684ecfc42dc1bac03ceb6b1fb04c6cf1ddfd8c9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.pom", - "digest": { - "sha256": "4c6f8d676b1e356a2747ec3442d04af5084e4ab13484d5f54e4e43946c64edcf" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-all.jar", - "digest": { - "sha256": "2fb94b5c520d19c7450c15d458ea326b157c5844d516a7f6ceef31190e6bb1b8" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-javadoc.jar", - "digest": { - "sha256": "559c5bfef7020faaca0acdcbfcea52093994871f09de9597623ef7e098ff6496" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-sources.jar", - "digest": { - "sha256": "742be340f1d4b39b6cb5b009e3d190a3525b4d6f40bc00be78f2a8eb4746b7b2" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.jar", - "digest": { - "sha256": "4730123516fb67816327b3c5ecedd4122c6297e1fa1d732b69a6ee94a83f4bee" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.module", - "digest": { - "sha256": "a88c261d4b186bd78430623f207d2e9f5d620788806fa354dadc36f4dd9c778a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.pom", - "digest": { - "sha256": "b1bbc72aa6d8f2e7246dc6a60e25d1799da7254073f4194247ba2dafd0965b65" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-all.jar", - "digest": { - "sha256": "0f717b4399ba573721dff89ddbd6973c7e782fbc658be9cc301d14a46e3a9f66" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-javadoc.jar", - "digest": { - "sha256": "bf8c3f40044c79eb526baa6ad04d2aa5f4a4d9585b063eb5aec2bfc8cac207fc" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-sources.jar", - "digest": { - "sha256": "833b0a5f04a7e5b4a0189c3ffe66e2b2e55752048153540f7dcb3f144f5aac05" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.jar", - "digest": { - "sha256": "268ec614170a6254e2d4f9d256b6b88486f3bc7b80eb156688309b24b96219f3" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.module", - "digest": { - "sha256": "0fb5702799dd7f0f7c9afc0d712c5739b8ace25fbb0daab6b28ceac6b5d8d75c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.pom", - "digest": { - "sha256": "edf35b8cb6f5ae996d1fb4994a26aa31580c7c20d9f4d5ee6c04484a747437bb" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject/3.9.3/micronaut-inject-3.9.3-javadoc.jar", - "digest": { - "sha256": "f08e6367be09cbdca9c36613a5317c3071f76b2fc95a2caa65351078a257d419" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject/3.9.3/micronaut-inject-3.9.3-sources.jar", - "digest": { - "sha256": "09c7ab9226d3142ec65cf5e0bd2abe19359cc762beebf5252ecc05a05c29abae" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject/3.9.3/micronaut-inject-3.9.3.jar", - "digest": { - "sha256": "9e06194ecff21f58f82d3a5ff8a2700c9c20ac0fca7c0ec9e8eda12bd488059f" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-inject/3.9.3/micronaut-inject-3.9.3.pom", - "digest": { - "sha256": "df637e3f999ce3c4ae84c89cd211c0d8a9db2f435c2ad7dc75386ddf3684124c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-all.jar", - "digest": { - "sha256": "fc90b18c49ce870906c9e4670afc57f4cc0eee2f9f5cd1539bd8c75ad4bb4fbc" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-javadoc.jar", - "digest": { - "sha256": "6cbf97d31874e8a14073df6c4efdf55490d2436a3719c2fa3d18a8663cbe111d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-sources.jar", - "digest": { - "sha256": "0089154508694db94331948228c9140b3c4afb7e4af066f770a1166d45776c36" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.jar", - "digest": { - "sha256": "004ccc227cae95a22cdfc9eabef83b58ed52e1725751ca942622e71e9141458a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.module", - "digest": { - "sha256": "602d40404d4aef18c76333330b8509c7dc1a6bcf2498a1566403eab34d35ae7e" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.pom", - "digest": { - "sha256": "6da7240fec15d4dacf22827e58dbe37efc682fc75f37977977d6ce0063c75b2d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-all.jar", - "digest": { - "sha256": "b839f2f6add1bb2a9f0f5bcefddac0cd491f675e2e8c7bdb3fcec8aff6da686f" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-javadoc.jar", - "digest": { - "sha256": "8e6df2858efcaae3e876c949457ca7afddc64a0b75dadb8efe96d4f38713590e" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-sources.jar", - "digest": { - "sha256": "4adfbd5e6a995169cd156775ca737f63c09ff665c04f1ffea4c4ffefbd14574a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.jar", - "digest": { - "sha256": "22e1e744f52cad75c06f3bbc4d3a567d78a6b438f3f04b28a626a4162d78d8fe" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.module", - "digest": { - "sha256": "9720a6363c78826ead01faf259dbf8404ae424e35293fa593c13594526d00eda" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.pom", - "digest": { - "sha256": "0381d00f460c86277dc135a1dea2690cf17f9d657ac18a6616ea86df56e4b94c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-all.jar", - "digest": { - "sha256": "a7745dc876940d7e766b6dc38fea7ddbe1a0430c2028e05837f6b063bfb11727" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-javadoc.jar", - "digest": { - "sha256": "da32d5c2419cd73a03092ecbffd246c8022bab25562e61b3cb7e46470e3a7cc4" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-sources.jar", - "digest": { - "sha256": "d91234d69e099da22b012d1f17ee86efba36dc4e315f7b786596e89fb10956e8" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.jar", - "digest": { - "sha256": "b12ca6e7977a82835732b9327d9e9e72358c997c19e0143251dc1609b0643b98" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.module", - "digest": { - "sha256": "0f0c6e981095b2d158b053df7c261ec296a507b660aba41b194ba8769aaac9f8" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.pom", - "digest": { - "sha256": "23d0f89d5907035ce8a2b9242a282f9586e6051f4866e55494d6e70f0f5addf7" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3-all.jar", - "digest": { - "sha256": "d0d939a19d55fc75be472b3eb7c24a0db1a84f0482c799746decd9ee4288561a" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3-javadoc.jar", - "digest": { - "sha256": "b1d286953deda77ef0e2756421bb76e28e107112f678435ca96c3d49d9b00eef" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3-sources.jar", - "digest": { - "sha256": "ef8398fded497e25e04e5c1b5a272514bce93dbb61fa2d851d79642fcc3a647c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3.jar", - "digest": { - "sha256": "aa0f634375768bdaf1cd711e13202d05e662826cddf51fbc343279afd5df90f2" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3.module", - "digest": { - "sha256": "fb28bd6534029751fb50faba1f48ac89dc0ea5aaeb489051d035156ea5cb1c77" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-management/3.9.3/micronaut-management-3.9.3.pom", - "digest": { - "sha256": "cc48c8625d3b3d68e966c3680793baa0dd4575114f061c28da20590a932cc61e" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-all.jar", - "digest": { - "sha256": "985a524f8975d704d2835cd214dd89f433ae66d2e04e944ef35617ace4fa8eb0" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-javadoc.jar", - "digest": { - "sha256": "f5689a5917442d70bebb6aaaf897bdcc5e55aecbf415beca524627fa79d7a72d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-sources.jar", - "digest": { - "sha256": "cea869dcc77abac7bcfdfb0ad5f0bbe8d490283d8565e48aef8cdb9de8e914a3" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.jar", - "digest": { - "sha256": "79d65004e6b2709b4ccffb0da12c1b7bbb241a918c377ec9adc8d605c950d3c9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.module", - "digest": { - "sha256": "2b1f1a07d3db33e9c7a3d8f6e5f882c0820070b7b5c60893a197f8c4c888e164" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.pom", - "digest": { - "sha256": "b428bc2b2e1b2a842a00a7ea600734e15c15ce28581ce0a79cb7f6f4f273c780" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-parent/3.9.3/micronaut-parent-3.9.3.pom", - "digest": { - "sha256": "f2e2678b31f7deeff6f2aab7bf9f33404736a93ebabad294e046184fdd0a081d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3-all.jar", - "digest": { - "sha256": "fbde6b3d1a6023f157a09d297af91f385794939d89bdc1689393a536067e78ca" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3-javadoc.jar", - "digest": { - "sha256": "13a7118e6aebf5d0633dffbd4305c3a8e9a8545faa7f159947dc78378115dcfc" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3-sources.jar", - "digest": { - "sha256": "b5914dc2d367fd493b9cff07a7defb12e5010bf14698bc771dd8b81d412e1c1c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3.jar", - "digest": { - "sha256": "daa113b67d7449ae6ce7fd55bfb372a89b5a81188af72d67233c923fd0d21183" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3.module", - "digest": { - "sha256": "23550912ea273ea200c324558eaf88bead1ef11f69a6fc0e766967570c634abc" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-router/3.9.3/micronaut-router-3.9.3.pom", - "digest": { - "sha256": "b4e7ecda17b8ee1d6eb14cb300b19ba66fadbd39fef8ff3fc833da6670991868" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-all.jar", - "digest": { - "sha256": "6c0f187977930a269c9c93b36636fca45c2d235171dec6bcb8585853feb5b647" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-javadoc.jar", - "digest": { - "sha256": "53125fd4411e8ba2c4a903d1d1c264a249dafaebaba225586b0e7fdd731a7029" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-sources.jar", - "digest": { - "sha256": "063aef11419dd5e1f3bf53da08cd0cd944967b183adab38b567d618cde6f6107" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.jar", - "digest": { - "sha256": "9548f6a125f4c96d26922d32b686b2e158d931036c6e4a27d35a182ad74d47ee" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.module", - "digest": { - "sha256": "161591a0326bf6e6f3a4e1f1a2657dca994d8193e9c59d3dc6c676c099f952e9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.pom", - "digest": { - "sha256": "d230ce5a7c842c18d874d06e00efbe0d006f3d6ebf098f5e8f910c1f6022ddb7" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3-javadoc.jar", - "digest": { - "sha256": "7b093c6f1040e717878a09eabad9d1279c5fdddc91d93245352c1af17f048e74" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3-sources.jar", - "digest": { - "sha256": "d900dfd247e75c428c0d26ad6f79aedd2ebf249fca91dc2bc420fb0c38a85891" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3.jar", - "digest": { - "sha256": "81ec81a162c38068ca2c9320b99ccf6bdd5c2694d2662a2eb29431813c2c3793" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3.pom", - "digest": { - "sha256": "bc1828ca2a0438a62954a15de4c9ed348ab84bc0517bdc0174c84b9cb833e2a8" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-session/3.9.3/micronaut-session-3.9.3-javadoc.jar", - "digest": { - "sha256": "43f7defe750b91b36d7adfb2cd00e10b7bbfada35572fe9a8ab2d465cff96dfa" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-session/3.9.3/micronaut-session-3.9.3-sources.jar", - "digest": { - "sha256": "045f9c19a6f55aefb5086b07a5195536f51f8d5fc951596d3c77e64a26938021" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-session/3.9.3/micronaut-session-3.9.3.jar", - "digest": { - "sha256": "40c4b955956db6f7a4ddeca7c78a18fa2d531dd4ec634dcd6121c8c505084db6" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-session/3.9.3/micronaut-session-3.9.3.pom", - "digest": { - "sha256": "9bbfece0aa13a0efa40f9492a2b785c6cf007fd87f5a7eaf975920939836aaea" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3-all.jar", - "digest": { - "sha256": "37966e7c2cbb47bce9daac602f91dab7cf97c0b32f11a0ae8d3bc17b47cb9f4d" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3-javadoc.jar", - "digest": { - "sha256": "3b5de180273e2a15f39a8f71ee6506c9c6068c12ac5fbd1336484582e756c1cb" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3-sources.jar", - "digest": { - "sha256": "24d95b353ba5c60bba01eeea940307812d80b810665f1b01434f75a474b7c2b9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3.jar", - "digest": { - "sha256": "af1f1685c95dc93dddc305d97f9d7b21396afacb112293dc46216cc75fc83db6" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3.module", - "digest": { - "sha256": "2bdfff4de5b57ee2c5f233ad79f65a7e8130e6ea33d39299c724486033f681e9" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-validation/3.9.3/micronaut-validation-3.9.3.pom", - "digest": { - "sha256": "a92b68661ba89ae54249cd60d4ad0648954fabf34c6d52da9c11cd0f6d55ca8c" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-all.jar", - "digest": { - "sha256": "05c9ff6abffcba7198bc7071e747822fc3778612c90540495b3143f58012c744" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-javadoc.jar", - "digest": { - "sha256": "bccbb573b6949b870c2f83deb43491cc52200219e556efabbb5320f256e81b1b" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-sources.jar", - "digest": { - "sha256": "a51b33ad8811e2b469037cc177e93be83bf6b543df1973fb8277d951ab66a763" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.jar", - "digest": { - "sha256": "610f0df4e8957d67d920d7146afb5c2c34f13e5ddf7caec848ea9d470ba09bd0" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.module", - "digest": { - "sha256": "b05cf732a1d5fc75ecf372d2ad3c6846ef866a1f7c3ee351c86e294f8a3a4071" - } - }, - { - "name": "build/repo/io/micronaut/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.pom", - "digest": { - "sha256": "b9800fbe85adf2a0d2151140076b8f41b124ceb9ed1e662e0280f0ed8a20bbf3" + "sha256": "bfd407165ce21778c8ab390879e44c171a64e6fff022508f968e29ddd2fe7415" } } ], "predicate": { "builder": { - "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v1.4.0" + "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v1.7.0" }, "buildType": "https://github.com/slsa-framework/slsa-github-generator/generic@v1", "invocation": { "configSource": { - "uri": "git+https://github.com/micronaut-projects/micronaut-core@refs/tags/v3.9.3", + "uri": "git+https://github.com/micronaut-projects/micronaut-core@refs/tags/v4.0.0-RC5", "digest": { - "sha1": "2308675f75f2f9659fc53f3f0a1d9440c9421a78" + "sha1": "7d4ee93144d12094b0e4f7dad46cae4f67ff0e48" }, "entryPoint": ".github/workflows/release.yml" }, @@ -1233,7 +543,7 @@ }, "release": { "assets": [], - "assets_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/releases/107597724/assets", + "assets_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/releases/111202087/assets", "author": { "avatar_url": "https://avatars.githubusercontent.com/u/864788?v=4", "events_url": "https://api.github.com/users/sdelamo/events{/privacy}", @@ -1254,22 +564,22 @@ "type": "User", "url": "https://api.github.com/users/sdelamo" }, - "body": "\r\n\r\n## What's Changed\r\n### Dependency Upgrades \ud83d\ude80\r\n* Update netty monorepo to v4.1.92.Final by @sdelamo in https://github.com/micronaut-projects/micronaut-core/pull/9277\r\n* Bump micronaut-maven-plugin to 3.5.4 (#9370)\r\n* Bump micronaut-servlet to 3.3.7 (#9398)\r\n* Bump micronaut-aws to 3.17.3 (#9368)\r\n\r\n### Tests \u2705\r\n* test: Writable in Controller and Filter by @sdelamo in https://github.com/micronaut-projects/micronaut-core/pull/9286\r\n* TCK Test for JSON additional types codec by @sdelamo in https://github.com/micronaut-projects/micronaut-core/pull/9272\r\n* Remove test limitation for TCK by @timyates in https://github.com/micronaut-projects/micronaut-core/pull/9317\r\n* Add test for `@Body` not being required in 3.9.x by @timyates in https://github.com/micronaut-projects/micronaut-core/pull/9318\r\n* TCK tests for boolean textplain and default media type for String return type by @sdelamo in https://github.com/micronaut-projects/micronaut-core/pull/9314\r\n\r\n**Full Changelog**: https://github.com/micronaut-projects/micronaut-core/compare/v3.9.2...v3.9.3", - "created_at": "2023-06-06T15:01:43Z", + "body": "\r\n\r\n## What's Changed\r\n### Breaking Changes \ud83d\udee0\r\n* Change annotation-based CORS to match configuration-based defaults by @wetted in https://github.com/micronaut-projects/micronaut-core/pull/9509\r\n### Bug Fixes \ud83d\udc1e\r\n* KSP: Properly map suspended function class by @dstepanov in https://github.com/micronaut-projects/micronaut-core/pull/9520\r\n* use binary name to store annotation names in metadata for KSP by @graemerocher in https://github.com/micronaut-projects/micronaut-core/pull/9536\r\n* Fixed incorrect ability to disable slf4j by @altro3 in https://github.com/micronaut-projects/micronaut-core/pull/9532\r\n* Fix KSP nullability handling by @graemerocher in https://github.com/micronaut-projects/micronaut-core/pull/9538\r\n* fix: txt/plain possible for BigDecimal by @sdelamo in https://github.com/micronaut-projects/micronaut-core/pull/9535\r\n\r\n### Dependency updates \ud83d\ude80\r\n* Update dependency io.micronaut.build.internal:micronaut-gradle-plugins to v6.5.1 by @renovate in https://github.com/micronaut-projects/micronaut-core/pull/9521\r\n* Update dependency io.micronaut.rxjava2:micronaut-rxjava2-bom to v2.0.0-M6 by @renovate in https://github.com/micronaut-projects/micronaut-core/pull/9488\r\n* Update dependency io.micronaut.groovy:micronaut-runtime-groovy to v4.0.0-M4 by @renovate in https://github.com/micronaut-projects/micronaut-core/pull/9475\r\n* chore(deps): update dependency io.micronaut.build.internal:micronaut-gradle-plugins to v6.5.3 by @renovate in https://github.com/micronaut-projects/micronaut-core/pull/9540\r\n* fix(deps): update dependency com.github.javaparser:javaparser-symbol-solver-core to v3.25.4 by @renovate in https://github.com/micronaut-projects/micronaut-core/pull/9533\r\n\r\n### Build \ud83d\udc18\r\n* core: enable binary compatability check by @wetted in https://github.com/micronaut-projects/micronaut-core/pull/9505\r\n* Manual sync of graalvm workflows by @msupic in https://github.com/micronaut-projects/micronaut-core/pull/9539\r\n\r\n\r\n**Full Changelog**: https://github.com/micronaut-projects/micronaut-core/compare/v4.0.0-RC4...v4.0.0-RC5", + "created_at": "2023-07-06T03:49:56Z", "draft": false, - "html_url": "https://github.com/micronaut-projects/micronaut-core/releases/tag/v3.9.3", - "id": 107597724, - "mentions_count": 2, - "name": "Micronaut Framework 3.9.3", - "node_id": "RE_kwDOB2eaPM4Gac-c", - "prerelease": false, - "published_at": "2023-06-07T01:34:50Z", - "tag_name": "v3.9.3", - "tarball_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/tarball/v3.9.3", - "target_commitish": "3.9.x", - "upload_url": "https://uploads.github.com/repos/micronaut-projects/micronaut-core/releases/107597724/assets{?name,label}", - "url": "https://api.github.com/repos/micronaut-projects/micronaut-core/releases/107597724", - "zipball_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/zipball/v3.9.3" + "html_url": "https://github.com/micronaut-projects/micronaut-core/releases/tag/v4.0.0-RC5", + "id": 111202087, + "mentions_count": 7, + "name": "Micronaut Core 4.0.0-RC5", + "node_id": "RE_kwDOB2eaPM4GoM8n", + "prerelease": true, + "published_at": "2023-07-06T05:04:16Z", + "tag_name": "v4.0.0-RC5", + "tarball_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/tarball/v4.0.0-RC5", + "target_commitish": "4.0.x", + "upload_url": "https://uploads.github.com/repos/micronaut-projects/micronaut-core/releases/111202087/assets{?name,label}", + "url": "https://api.github.com/repos/micronaut-projects/micronaut-core/releases/111202087", + "zipball_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/zipball/v4.0.0-RC5" }, "repository": { "allow_forking": true, @@ -1293,8 +603,8 @@ "downloads_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/downloads", "events_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/events", "fork": false, - "forks": 980, - "forks_count": 980, + "forks": 984, + "forks_count": 984, "forks_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/forks", "full_name": "micronaut-projects/micronaut-core", "git_commits_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/git/commits{/sha}", @@ -1332,8 +642,8 @@ "name": "micronaut-core", "node_id": "MDEwOlJlcG9zaXRvcnkxMjQyMzAyMDQ=", "notifications_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/notifications{?since,all,participating}", - "open_issues": 587, - "open_issues_count": 587, + "open_issues": 596, + "open_issues_count": 596, "owner": { "avatar_url": "https://avatars.githubusercontent.com/u/36880643?v=4", "events_url": "https://api.github.com/users/micronaut-projects/events{/privacy}", @@ -1356,11 +666,11 @@ }, "private": false, "pulls_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/pulls{/number}", - "pushed_at": "2023-06-07T01:34:50Z", + "pushed_at": "2023-07-06T05:04:16Z", "releases_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/releases{/id}", - "size": 97477, + "size": 98079, "ssh_url": "git@github.com:micronaut-projects/micronaut-core.git", - "stargazers_count": 5712, + "stargazers_count": 5744, "stargazers_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/stargazers", "statuses_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/statuses/{sha}", "subscribers_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/subscribers", @@ -1377,11 +687,11 @@ "serverless" ], "trees_url": "https://api.github.com/repos/micronaut-projects/micronaut-core/git/trees{/sha}", - "updated_at": "2023-06-06T23:55:12Z", + "updated_at": "2023-07-05T23:14:03Z", "url": "https://api.github.com/repos/micronaut-projects/micronaut-core", "visibility": "public", - "watchers": 5712, - "watchers_count": 5712, + "watchers": 5744, + "watchers_count": 5744, "web_commit_signoff_required": false }, "sender": { @@ -1406,19 +716,19 @@ } }, "github_head_ref": "", - "github_ref": "refs/tags/v3.9.3", + "github_ref": "refs/tags/v4.0.0-RC5", "github_ref_type": "tag", "github_repository_id": "124230204", "github_repository_owner": "micronaut-projects", "github_repository_owner_id": "36880643", "github_run_attempt": "1", - "github_run_id": "5195022256", - "github_run_number": "135", - "github_sha1": "2308675f75f2f9659fc53f3f0a1d9440c9421a78" + "github_run_id": "5471746976", + "github_run_number": "144", + "github_sha1": "7d4ee93144d12094b0e4f7dad46cae4f67ff0e48" } }, "metadata": { - "buildInvocationID": "5195022256-1", + "buildInvocationID": "5471746976-1", "completeness": { "parameters": true, "environment": false, @@ -1428,9 +738,9 @@ }, "materials": [ { - "uri": "git+https://github.com/micronaut-projects/micronaut-core@refs/tags/v3.9.3", + "uri": "git+https://github.com/micronaut-projects/micronaut-core@refs/tags/v4.0.0-RC5", "digest": { - "sha1": "2308675f75f2f9659fc53f3f0a1d9440c9421a78" + "sha1": "7d4ee93144d12094b0e4f7dad46cae4f67ff0e48" } } ] @@ -1442,7 +752,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 1, + "FAILED": 2, "PASSED": 6, "SKIPPED": 0, "UNKNOWN": 1 @@ -1467,10 +777,10 @@ ], "justification": [ { - "The target repository uses build tool gradle to deploy": "https://github.com/micronaut-projects/micronaut-core/blob/68f9bb0a78fa930865d37fca39252b9ec66e4a43/.github/workflows/gradle.yml", - "The build is triggered by": "https://github.com/micronaut-projects/micronaut-core/blob/68f9bb0a78fa930865d37fca39252b9ec66e4a43/.github/workflows/gradle.yml" + "The target repository uses build tool gradle to deploy": "https://github.com/micronaut-projects/micronaut-core/blob/68f9bb0a78fa930865d37fca39252b9ec66e4a43/.github/workflows/central-sync.yml", + "The build is triggered by": "https://github.com/micronaut-projects/micronaut-core/blob/68f9bb0a78fa930865d37fca39252b9ec66e4a43/.github/workflows/central-sync.yml" }, - "Deploy command: ['./gradlew', 'publishToSonatype', 'docs', '--no-daemon']", + "Deploy command: ['./gradlew', 'publishToSonatype', 'closeAndReleaseSonatypeStagingRepository']", "However, could not find a passing workflow run." ], "result_type": "PASSED" @@ -1523,10 +833,24 @@ ], "justification": [ "Successfully verified level 3: ", - "verify passed : build/repo/micronaut-aop/3.9.3/micronaut-aop-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-aop/3.9.3/micronaut-aop-3.9.3-sources.jar,verify passed : build/repo/micronaut-aop/3.9.3/micronaut-aop-3.9.3.jar,verify passed : build/repo/micronaut-aop/3.9.3/micronaut-aop-3.9.3.pom,verify passed : build/repo/micronaut-bom/3.9.3/micronaut-bom-3.9.3.module,verify passed : build/repo/micronaut-bom/3.9.3/micronaut-bom-3.9.3.pom,verify passed : build/repo/micronaut-bom/3.9.3/micronaut-bom-3.9.3.toml,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-all.jar,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3-sources.jar,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.jar,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.module,verify passed : build/repo/micronaut-buffer-netty/3.9.3/micronaut-buffer-netty-3.9.3.pom,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3-all.jar,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3-sources.jar,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3.jar,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3.module,verify passed : build/repo/micronaut-context/3.9.3/micronaut-context-3.9.3.pom,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-all.jar,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3-sources.jar,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.jar,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.module,verify passed : build/repo/micronaut-core-reactive/3.9.3/micronaut-core-reactive-3.9.3.pom,verify passed : build/repo/micronaut-core/3.9.3/micronaut-core-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-core/3.9.3/micronaut-core-3.9.3-sources.jar,verify passed : build/repo/micronaut-core/3.9.3/micronaut-core-3.9.3.jar,verify passed : build/repo/micronaut-core/3.9.3/micronaut-core-3.9.3.pom,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-all.jar,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3-sources.jar,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.jar,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.module,verify passed : build/repo/micronaut-function-client/3.9.3/micronaut-function-client-3.9.3.pom,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-all.jar,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3-sources.jar,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.jar,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.module,verify passed : build/repo/micronaut-function-web/3.9.3/micronaut-function-web-3.9.3.pom,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3-all.jar,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3-sources.jar,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3.jar,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3.module,verify passed : build/repo/micronaut-function/3.9.3/micronaut-function-3.9.3.pom,verify passed : build/repo/micronaut-graal/3.9.3/micronaut-graal-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-graal/3.9.3/micronaut-graal-3.9.3-sources.jar,verify passed : build/repo/micronaut-graal/3.9.3/micronaut-graal-3.9.3.jar,verify passed : build/repo/micronaut-graal/3.9.3/micronaut-graal-3.9.3.pom,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-all.jar,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.jar,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.module,verify passed : build/repo/micronaut-http-client-core/3.9.3/micronaut-http-client-core-3.9.3.pom,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-all.jar,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.jar,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.module,verify passed : build/repo/micronaut-http-client/3.9.3/micronaut-http-client-3.9.3.pom,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-all.jar,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.jar,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.module,verify passed : build/repo/micronaut-http-netty/3.9.3/micronaut-http-netty-3.9.3.pom,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-all.jar,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.jar,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.module,verify passed : build/repo/micronaut-http-server-netty/3.9.3/micronaut-http-server-netty-3.9.3.pom,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-all.jar,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.jar,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.module,verify passed : build/repo/micronaut-http-server-tck/3.9.3/micronaut-http-server-tck-3.9.3.pom,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-all.jar,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.jar,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.module,verify passed : build/repo/micronaut-http-server/3.9.3/micronaut-http-server-3.9.3.pom,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-all.jar,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3-sources.jar,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.jar,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.module,verify passed : build/repo/micronaut-http-validation/3.9.3/micronaut-http-validation-3.9.3.pom,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3-all.jar,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3-sources.jar,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3.jar,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3.module,verify passed : build/repo/micronaut-http/3.9.3/micronaut-http-3.9.3.pom,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-all.jar,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.jar,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.module,verify passed : build/repo/micronaut-inject-groovy-test/3.9.3/micronaut-inject-groovy-test-3.9.3.pom,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-all.jar,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.jar,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.module,verify passed : build/repo/micronaut-inject-groovy/3.9.3/micronaut-inject-groovy-3.9.3.pom,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-all.jar,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.jar,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.module,verify passed : build/repo/micronaut-inject-java-test/3.9.3/micronaut-inject-java-test-3.9.3.pom,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-all.jar,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.jar,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.module,verify passed : build/repo/micronaut-inject-java/3.9.3/micronaut-inject-java-3.9.3.pom,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-all.jar,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.jar,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.module,verify passed : build/repo/micronaut-inject-kotlin-test/3.9.3/micronaut-inject-kotlin-test-3.9.3.pom,verify passed : build/repo/micronaut-inject/3.9.3/micronaut-inject-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-inject/3.9.3/micronaut-inject-3.9.3-sources.jar,verify passed : build/repo/micronaut-inject/3.9.3/micronaut-inject-3.9.3.jar,verify passed : build/repo/micronaut-inject/3.9.3/micronaut-inject-3.9.3.pom,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-all.jar,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3-sources.jar,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.jar,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.module,verify passed : build/repo/micronaut-jackson-core/3.9.3/micronaut-jackson-core-3.9.3.pom,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-all.jar,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3-sources.jar,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.jar,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.module,verify passed : build/repo/micronaut-jackson-databind/3.9.3/micronaut-jackson-databind-3.9.3.pom,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-all.jar,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3-sources.jar,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.jar,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.module,verify passed : build/repo/micronaut-json-core/3.9.3/micronaut-json-core-3.9.3.pom,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3-all.jar,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3-sources.jar,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3.jar,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3.module,verify passed : build/repo/micronaut-management/3.9.3/micronaut-management-3.9.3.pom,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-all.jar,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3-sources.jar,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.jar,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.module,verify passed : build/repo/micronaut-messaging/3.9.3/micronaut-messaging-3.9.3.pom,verify passed : build/repo/micronaut-parent/3.9.3/micronaut-parent-3.9.3.pom,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3-all.jar,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3-sources.jar,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3.jar,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3.module,verify passed : build/repo/micronaut-router/3.9.3/micronaut-router-3.9.3.pom,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-all.jar,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3-sources.jar,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.jar,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.module,verify passed : build/repo/micronaut-runtime-osx/3.9.3/micronaut-runtime-osx-3.9.3.pom,verify passed : build/repo/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3-sources.jar,verify passed : build/repo/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3.jar,verify passed : build/repo/micronaut-runtime/3.9.3/micronaut-runtime-3.9.3.pom,verify passed : build/repo/micronaut-session/3.9.3/micronaut-session-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-session/3.9.3/micronaut-session-3.9.3-sources.jar,verify passed : build/repo/micronaut-session/3.9.3/micronaut-session-3.9.3.jar,verify passed : build/repo/micronaut-session/3.9.3/micronaut-session-3.9.3.pom,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3-all.jar,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3-sources.jar,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3.jar,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3.module,verify passed : build/repo/micronaut-validation/3.9.3/micronaut-validation-3.9.3.pom,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-all.jar,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-javadoc.jar,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3-sources.jar,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.jar,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.module,verify passed : build/repo/micronaut-websocket/3.9.3/micronaut-websocket-3.9.3.pom" + "verify passed: build/repo/micronaut-aop/4.0.0-RC5/micronaut-aop-4.0.0-RC5.jar,verify passed: build/repo/micronaut-aop/4.0.0-RC5/micronaut-aop-4.0.0-RC5.pom,verify passed: build/repo/micronaut-buffer-netty/4.0.0-RC5/micronaut-buffer-netty-4.0.0-RC5.jar,verify passed: build/repo/micronaut-buffer-netty/4.0.0-RC5/micronaut-buffer-netty-4.0.0-RC5.pom,verify passed: build/repo/micronaut-context-propagation/4.0.0-RC5/micronaut-context-propagation-4.0.0-RC5.jar,verify passed: build/repo/micronaut-context-propagation/4.0.0-RC5/micronaut-context-propagation-4.0.0-RC5.pom,verify passed: build/repo/micronaut-context/4.0.0-RC5/micronaut-context-4.0.0-RC5.jar,verify passed: build/repo/micronaut-context/4.0.0-RC5/micronaut-context-4.0.0-RC5.pom,verify passed: build/repo/micronaut-core-bom/4.0.0-RC5/micronaut-core-bom-4.0.0-RC5.pom,verify passed: build/repo/micronaut-core-processor/4.0.0-RC5/micronaut-core-processor-4.0.0-RC5.jar,verify passed: build/repo/micronaut-core-processor/4.0.0-RC5/micronaut-core-processor-4.0.0-RC5.pom,verify passed: build/repo/micronaut-core-reactive/4.0.0-RC5/micronaut-core-reactive-4.0.0-RC5.jar,verify passed: build/repo/micronaut-core-reactive/4.0.0-RC5/micronaut-core-reactive-4.0.0-RC5.pom,verify passed: build/repo/micronaut-core/4.0.0-RC5/micronaut-core-4.0.0-RC5.jar,verify passed: build/repo/micronaut-core/4.0.0-RC5/micronaut-core-4.0.0-RC5.pom,verify passed: build/repo/micronaut-discovery-core/4.0.0-RC5/micronaut-discovery-core-4.0.0-RC5.jar,verify passed: build/repo/micronaut-discovery-core/4.0.0-RC5/micronaut-discovery-core-4.0.0-RC5.pom,verify passed: build/repo/micronaut-function-client/4.0.0-RC5/micronaut-function-client-4.0.0-RC5.jar,verify passed: build/repo/micronaut-function-client/4.0.0-RC5/micronaut-function-client-4.0.0-RC5.pom,verify passed: build/repo/micronaut-function-web/4.0.0-RC5/micronaut-function-web-4.0.0-RC5.jar,verify passed: build/repo/micronaut-function-web/4.0.0-RC5/micronaut-function-web-4.0.0-RC5.pom,verify passed: build/repo/micronaut-function/4.0.0-RC5/micronaut-function-4.0.0-RC5.jar,verify passed: build/repo/micronaut-function/4.0.0-RC5/micronaut-function-4.0.0-RC5.pom,verify passed: build/repo/micronaut-graal/4.0.0-RC5/micronaut-graal-4.0.0-RC5.jar,verify passed: build/repo/micronaut-graal/4.0.0-RC5/micronaut-graal-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-client-core/4.0.0-RC5/micronaut-http-client-core-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-client-core/4.0.0-RC5/micronaut-http-client-core-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-client-jdk/4.0.0-RC5/micronaut-http-client-jdk-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-client-jdk/4.0.0-RC5/micronaut-http-client-jdk-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-client-tck/4.0.0-RC5/micronaut-http-client-tck-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-client-tck/4.0.0-RC5/micronaut-http-client-tck-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-client/4.0.0-RC5/micronaut-http-client-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-client/4.0.0-RC5/micronaut-http-client-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-netty/4.0.0-RC5/micronaut-http-netty-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-netty/4.0.0-RC5/micronaut-http-netty-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-server-netty/4.0.0-RC5/micronaut-http-server-netty-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-server-netty/4.0.0-RC5/micronaut-http-server-netty-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-server-tck/4.0.0-RC5/micronaut-http-server-tck-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-server-tck/4.0.0-RC5/micronaut-http-server-tck-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-server/4.0.0-RC5/micronaut-http-server-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-server/4.0.0-RC5/micronaut-http-server-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-tck/4.0.0-RC5/micronaut-http-tck-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-tck/4.0.0-RC5/micronaut-http-tck-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http-validation/4.0.0-RC5/micronaut-http-validation-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http-validation/4.0.0-RC5/micronaut-http-validation-4.0.0-RC5.pom,verify passed: build/repo/micronaut-http/4.0.0-RC5/micronaut-http-4.0.0-RC5.jar,verify passed: build/repo/micronaut-http/4.0.0-RC5/micronaut-http-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-groovy-test/4.0.0-RC5/micronaut-inject-groovy-test-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-groovy-test/4.0.0-RC5/micronaut-inject-groovy-test-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-groovy/4.0.0-RC5/micronaut-inject-groovy-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-groovy/4.0.0-RC5/micronaut-inject-groovy-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-java-test/4.0.0-RC5/micronaut-inject-java-test-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-java-test/4.0.0-RC5/micronaut-inject-java-test-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-java/4.0.0-RC5/micronaut-inject-java-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-java/4.0.0-RC5/micronaut-inject-java-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-kotlin-test/4.0.0-RC5/micronaut-inject-kotlin-test-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-kotlin-test/4.0.0-RC5/micronaut-inject-kotlin-test-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject-kotlin/4.0.0-RC5/micronaut-inject-kotlin-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject-kotlin/4.0.0-RC5/micronaut-inject-kotlin-4.0.0-RC5.pom,verify passed: build/repo/micronaut-inject/4.0.0-RC5/micronaut-inject-4.0.0-RC5.jar,verify passed: build/repo/micronaut-inject/4.0.0-RC5/micronaut-inject-4.0.0-RC5.pom,verify passed: build/repo/micronaut-jackson-core/4.0.0-RC5/micronaut-jackson-core-4.0.0-RC5.jar,verify passed: build/repo/micronaut-jackson-core/4.0.0-RC5/micronaut-jackson-core-4.0.0-RC5.pom,verify passed: build/repo/micronaut-jackson-databind/4.0.0-RC5/micronaut-jackson-databind-4.0.0-RC5.jar,verify passed: build/repo/micronaut-jackson-databind/4.0.0-RC5/micronaut-jackson-databind-4.0.0-RC5.pom,verify passed: build/repo/micronaut-json-core/4.0.0-RC5/micronaut-json-core-4.0.0-RC5.jar,verify passed: build/repo/micronaut-json-core/4.0.0-RC5/micronaut-json-core-4.0.0-RC5.pom,verify passed: build/repo/micronaut-management/4.0.0-RC5/micronaut-management-4.0.0-RC5.jar,verify passed: build/repo/micronaut-management/4.0.0-RC5/micronaut-management-4.0.0-RC5.pom,verify passed: build/repo/micronaut-messaging/4.0.0-RC5/micronaut-messaging-4.0.0-RC5.jar,verify passed: build/repo/micronaut-messaging/4.0.0-RC5/micronaut-messaging-4.0.0-RC5.pom,verify passed: build/repo/micronaut-retry/4.0.0-RC5/micronaut-retry-4.0.0-RC5.jar,verify passed: build/repo/micronaut-retry/4.0.0-RC5/micronaut-retry-4.0.0-RC5.pom,verify passed: build/repo/micronaut-router/4.0.0-RC5/micronaut-router-4.0.0-RC5.jar,verify passed: build/repo/micronaut-router/4.0.0-RC5/micronaut-router-4.0.0-RC5.pom,verify passed: build/repo/micronaut-runtime-osx/4.0.0-RC5/micronaut-runtime-osx-4.0.0-RC5.jar,verify passed: build/repo/micronaut-runtime-osx/4.0.0-RC5/micronaut-runtime-osx-4.0.0-RC5.pom,verify passed: build/repo/micronaut-runtime/4.0.0-RC5/micronaut-runtime-4.0.0-RC5.jar,verify passed: build/repo/micronaut-runtime/4.0.0-RC5/micronaut-runtime-4.0.0-RC5.pom,verify passed: build/repo/micronaut-websocket/4.0.0-RC5/micronaut-websocket-4.0.0-RC5.jar,verify passed: build/repo/micronaut-websocket/4.0.0-RC5/micronaut-websocket-4.0.0-RC5.pom" ], "result_type": "PASSED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Failed to discover any witness provenance." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_version_control_system_1", "check_description": "Check whether the target repo uses a version control system.", @@ -1589,6 +913,10 @@ "check_id": "mcn_provenance_level_three_1", "num_deps_pass": 0 }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 + }, { "check_id": "mcn_build_service_1", "num_deps_pass": 2 diff --git a/tests/e2e/expected_results/micronaut-core/slf4j.json b/tests/e2e/expected_results/micronaut-core/slf4j.json index e9a18ed6c..0093d3f8a 100644 --- a/tests/e2e/expected_results/micronaut-core/slf4j.json +++ b/tests/e2e/expected_results/micronaut-core/slf4j.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 13:20:54" + "timestamps": "2023-07-08 03:33:32" }, "target": { "info": { @@ -61,7 +61,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 5, + "FAILED": 6, "PASSED": 3, "SKIPPED": 0, "UNKNOWN": 0 @@ -134,7 +134,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -152,6 +152,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -201,6 +215,10 @@ "check_id": "mcn_provenance_level_three_1", "num_deps_pass": 0 }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 + }, { "check_id": "mcn_build_service_1", "num_deps_pass": 0 diff --git a/tests/e2e/expected_results/multibuild_test/multibuild_test.json b/tests/e2e/expected_results/multibuild_test/multibuild_test.json index 2d7320580..92db0ee92 100644 --- a/tests/e2e/expected_results/multibuild_test/multibuild_test.json +++ b/tests/e2e/expected_results/multibuild_test/multibuild_test.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-18 21:51:40" + "timestamps": "2023-07-08 03:15:03" }, "target": { "info": { @@ -61,7 +61,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 4, + "FAILED": 5, "PASSED": 4, "SKIPPED": 0, "UNKNOWN": 0 @@ -141,7 +141,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -159,6 +159,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -208,6 +222,10 @@ "check_id": "mcn_provenance_level_three_1", "num_deps_pass": 0 }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 + }, { "check_id": "mcn_provenance_expectation_1", "num_deps_pass": 0 diff --git a/tests/e2e/expected_results/plot-plugin/plot-plugin.json b/tests/e2e/expected_results/plot-plugin/plot-plugin.json index 7b8f04ff8..bd52af38e 100644 --- a/tests/e2e/expected_results/plot-plugin/plot-plugin.json +++ b/tests/e2e/expected_results/plot-plugin/plot-plugin.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 11:15:48" + "timestamps": "2023-07-08 03:11:33" }, "target": { "info": { @@ -103,7 +103,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 5, + "FAILED": 6, "PASSED": 3, "SKIPPED": 0, "UNKNOWN": 0 @@ -176,7 +176,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -194,6 +194,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -246,6 +260,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/slsa-verifier/slsa-verifier_cue_PASS.json b/tests/e2e/expected_results/slsa-verifier/slsa-verifier_cue_PASS.json index 46028fa42..a86c796eb 100644 --- a/tests/e2e/expected_results/slsa-verifier/slsa-verifier_cue_PASS.json +++ b/tests/e2e/expected_results/slsa-verifier/slsa-verifier_cue_PASS.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 14:13:33" + "timestamps": "2023-07-08 21:09:08" }, "target": { "info": { @@ -1683,7 +1683,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 0, + "FAILED": 1, "PASSED": 8, "SKIPPED": 0, "UNKNOWN": 0 @@ -1768,6 +1768,20 @@ ], "result_type": "PASSED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Failed to discover any witness provenance." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -1836,6 +1850,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/tinyMediaManager/tinyMediaManager.json b/tests/e2e/expected_results/tinyMediaManager/tinyMediaManager.json index b2855cb14..26c5745eb 100644 --- a/tests/e2e/expected_results/tinyMediaManager/tinyMediaManager.json +++ b/tests/e2e/expected_results/tinyMediaManager/tinyMediaManager.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-20 18:27:38" + "timestamps": "2023-07-08 12:28:33" }, "target": { "info": { @@ -61,7 +61,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 6, + "FAILED": 7, "PASSED": 2, "SKIPPED": 0, "UNKNOWN": 0 @@ -109,7 +109,8 @@ "Build service - SLSA Level 2" ], "justification": [ - "The target repository does not have a build service." + "The target repository does not have a build service for maven.", + "The target repository does not have a build service for at least one build tool." ], "result_type": "FAILED" }, @@ -134,7 +135,7 @@ "Provenance conforms with expectations - SLSA Level 3" ], "justification": [ - "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_level_three_1 FAILED." + "Check mcn_provenance_expectation_1 is set to FAILED because mcn_provenance_available_1 FAILED." ], "result_type": "FAILED" }, @@ -152,6 +153,20 @@ ], "result_type": "FAILED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Check mcn_provenance_witness_level_one_1 is set to FAILED because mcn_provenance_available_1 FAILED." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_trusted_builder_level_three_1", "check_description": "Check whether the target uses a trusted SLSA level 3 builder.", @@ -197,6 +212,10 @@ "check_id": "mcn_provenance_available_1", "num_deps_pass": 0 }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 + }, { "check_id": "mcn_provenance_expectation_1", "num_deps_pass": 0 diff --git a/tests/e2e/expected_results/urllib3/urllib3.json b/tests/e2e/expected_results/urllib3/urllib3.json index 0166186a7..9fab9f184 100644 --- a/tests/e2e/expected_results/urllib3/urllib3.json +++ b/tests/e2e/expected_results/urllib3/urllib3.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-14 11:34:11" + "timestamps": "2023-07-08 03:12:00" }, "target": { "info": { @@ -271,7 +271,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 1, + "FAILED": 2, "PASSED": 7, "SKIPPED": 0, "UNKNOWN": 0 @@ -352,10 +352,24 @@ ], "justification": [ "Successfully verified level 3: ", - "verify passed : urllib3-2.0.3-py3-none-any.whl,verify passed : urllib3-2.0.3.tar.gz" + "verify passed: urllib3-2.0.3-py3-none-any.whl,verify passed: urllib3-2.0.3.tar.gz" ], "result_type": "PASSED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Failed to discover any witness provenance." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_version_control_system_1", "check_description": "Check whether the target repo uses a version control system.", @@ -421,6 +435,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/e2e/expected_results/urllib3/urllib3_cue_invalid.json b/tests/e2e/expected_results/urllib3/urllib3_cue_invalid.json index 3a1865bfc..5e087a0fc 100644 --- a/tests/e2e/expected_results/urllib3/urllib3_cue_invalid.json +++ b/tests/e2e/expected_results/urllib3/urllib3_cue_invalid.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2023-06-16 20:26:58" + "timestamps": "2023-07-08 21:42:11" }, "target": { "info": { @@ -271,7 +271,7 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 1, + "FAILED": 2, "PASSED": 6, "SKIPPED": 0, "UNKNOWN": 1 @@ -356,6 +356,20 @@ ], "result_type": "PASSED" }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "check_description": "Check whether the target has a level-1 witness provenance.", + "slsa_requirements": [ + "Provenance - Available - SLSA Level 1", + "Provenance content - Identifies build instructions - SLSA Level 1", + "Provenance content - Identifies artifacts - SLSA Level 1", + "Provenance content - Identifies builder - SLSA Level 1" + ], + "justification": [ + "Failed to discover any witness provenance." + ], + "result_type": "FAILED" + }, { "check_id": "mcn_version_control_system_1", "check_description": "Check whether the target repo uses a version control system.", @@ -421,6 +435,10 @@ { "check_id": "mcn_build_service_1", "num_deps_pass": 0 + }, + { + "check_id": "mcn_provenance_witness_level_one_1", + "num_deps_pass": 0 } ], "dep_status": [] diff --git a/tests/slsa_analyzer/build_tool/test_gradle.py b/tests/slsa_analyzer/build_tool/test_gradle.py index a2f094767..908034fc2 100644 --- a/tests/slsa_analyzer/build_tool/test_gradle.py +++ b/tests/slsa_analyzer/build_tool/test_gradle.py @@ -37,3 +37,52 @@ def test_gradle_build_tool(gradle_tool: Gradle, macaron_path: str, mock_repo: st base_dir = Path(__file__).parent ctx = prepare_repo_for_testing(mock_repo, macaron_path, base_dir) assert gradle_tool.is_detected(ctx.component.repository.fs_path) == expected_value + + +def test_get_group_ids_nested_projects(tmp_path: Path, gradle_tool: Gradle) -> None: + """Test the ``get_group_ids`` method in case there are Gradle projects nested under a root project. + + In this case, we should only obtain the group id of the root project, making the assumption + that all subprojects under it have the same group id. + + This is consistent with the behavior of the ``get_build_dirs`` method. + """ + repo_dir = tmp_path.joinpath("repo") + subproject_a_dir = repo_dir.joinpath("subprojecta") + subproject_b_dir = repo_dir.joinpath("subprojectb") + + subproject_a_dir.mkdir(parents=True) + subproject_b_dir.mkdir(parents=True) + + with open(repo_dir.joinpath("build.gradle"), "w", encoding="utf-8") as file: + file.write('group = "io.micronaut"') + with open(subproject_a_dir.joinpath("build.gradle"), "w", encoding="utf-8") as file: + file.write('group = "io.micronaut.foo"') + with open(subproject_b_dir.joinpath("build.gradle"), "w", encoding="utf-8") as file: + file.write('group = "io.micronaut.bar"') + + assert set(gradle_tool.get_group_ids(str(repo_dir))) == {"io.micronaut"} + + +def test_get_group_ids_separate_projects(tmp_path: Path, gradle_tool: Gradle) -> None: + """Test the ``get_group_ids`` method in case there are multiple separate Gradle projects in a repo. + + "Separate projects" means they are in different directories in the repo. + """ + repo_dir = tmp_path.joinpath("repo") + + project_a_dir = repo_dir.joinpath("subprojecta") + project_b_dir = repo_dir.joinpath("subprojectb") + + project_a_dir.mkdir(parents=True) + project_b_dir.mkdir(parents=True) + + with open(project_a_dir.joinpath("build.gradle"), "w", encoding="utf-8") as file: + file.write('group = "io.micronaut.foo"') + with open(project_b_dir.joinpath("build.gradle"), "w", encoding="utf-8") as file: + file.write('group = "io.micronaut.bar"') + + assert set(gradle_tool.get_group_ids(str(repo_dir))) == { + "io.micronaut.foo", + "io.micronaut.bar", + } diff --git a/tests/slsa_analyzer/checks/test_provenance_available_check.py b/tests/slsa_analyzer/checks/test_provenance_available_check.py index 9f6a84b7f..2dc182d14 100644 --- a/tests/slsa_analyzer/checks/test_provenance_available_check.py +++ b/tests/slsa_analyzer/checks/test_provenance_available_check.py @@ -35,17 +35,14 @@ def __init__(self, profile: dict): super().__init__(profile) self.release = { "assets": [ - {"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}, - {"name": "artifact.txt", "url": "URL", "size": "10"}, + {"name": "attestation.intoto.jsonl", "url": "URL", "size": 10}, + {"name": "artifact.txt", "url": "URL", "size": 10}, ] } def get_latest_release(self, full_name: str) -> dict: return self.release - def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict]: - return [item for item in self.release["assets"] if item["name"] == name or item["name"].endswith(ext)] - def download_asset(self, url: str, download_path: str) -> bool: return False @@ -85,10 +82,10 @@ def test_provenance_available_check(self) -> None: assert check.run_check(ctx, check_result) == CheckResultType.PASSED # Repo doesn't have a provenance. - api_client.release = {"assets": [{"name": "attestation.intoto", "url": "URL", "size": "10"}]} + api_client.release = {"assets": [{"name": "attestation.intoto", "url": "URL", "size": 10}]} assert check.run_check(ctx, check_result) == CheckResultType.FAILED - api_client.release = {"assets": [{"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}]} + api_client.release = {"assets": [{"name": "attestation.intoto.jsonl", "url": "URL", "size": 10}]} # Test Jenkins. ci_info["service"] = jenkins diff --git a/tests/slsa_analyzer/checks/test_provenance_l3_check.py b/tests/slsa_analyzer/checks/test_provenance_l3_check.py index 72626712f..de8cb528a 100644 --- a/tests/slsa_analyzer/checks/test_provenance_l3_check.py +++ b/tests/slsa_analyzer/checks/test_provenance_l3_check.py @@ -12,7 +12,7 @@ from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis -from macaron.slsa_analyzer.git_service.api_client import GhAPIClient +from macaron.slsa_analyzer.git_service.api_client import GhAPIClient, GitHubReleaseAsset from macaron.slsa_analyzer.specs.ci_spec import CIInfo from tests.conftest import MockAnalyzeContext @@ -43,9 +43,6 @@ def __init__(self, profile: dict): def get_latest_release(self, full_name: str) -> dict: return self.release - def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict]: - return [item for item in self.release["assets"] if item["name"] == name or item["name"].endswith(ext)] - def download_asset(self, url: str, download_path: str) -> bool: return True @@ -80,11 +77,21 @@ def test_provenance_l3_check(self) -> None: ) # Repo has provenances but no downloaded files. - ci_info["provenance_assets"] = [{"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}] + ci_info["provenance_assets"] = [] + ci_info["provenance_assets"].extend( + [ + GitHubReleaseAsset( + name="attestation.intoto.jsonl", + url="URL", + size_in_bytes=10, + api_client=api_client, + ) + ] + ) ci_info["latest_release"] = { "assets": [ - {"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}, - {"name": "artifact.txt", "url": "URL", "size": "10"}, + {"name": "attestation.intoto.jsonl", "url": "URL", "size": 10}, + {"name": "artifact.txt", "url": "URL", "size": 10}, ] } ctx = MockAnalyzeContext(macaron_path=MacaronTestCase.macaron_path, output_dir="") @@ -92,11 +99,21 @@ def test_provenance_l3_check(self) -> None: assert check.run_check(ctx, check_result) == CheckResultType.FAILED # Attestation size is too large. - ci_info["provenance_assets"] = [{"name": "attestation.intoto.jsonl", "url": "URL", "size": "100000000"}] + ci_info["provenance_assets"] = [] + ci_info["provenance_assets"].extend( + [ + GitHubReleaseAsset( + name="attestation.intoto.jsonl", + url="URL", + size_in_bytes=100_000_000, + api_client=api_client, + ) + ] + ) ci_info["latest_release"] = { "assets": [ - {"name": "attestation.intoto.jsonl", "url": "URL", "size": "100000000"}, - {"name": "artifact.txt", "url": "URL", "size": "10"}, + {"name": "attestation.intoto.jsonl", "url": "URL", "size": 100_000_000}, + {"name": "artifact.txt", "url": "URL", "size": 10}, ] } assert check.run_check(ctx, check_result) == CheckResultType.FAILED @@ -105,14 +122,24 @@ def test_provenance_l3_check(self) -> None: ci_info["provenance_assets"] = [] ci_info["latest_release"] = { "assets": [ - {"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}, - {"name": "artifact.txt", "url": "URL", "size": "10"}, + {"name": "attestation.intoto.jsonl", "url": "URL", "size": 10}, + {"name": "artifact.txt", "url": "URL", "size": 10}, ] } assert check.run_check(ctx, check_result) == CheckResultType.FAILED # No release available - ci_info["provenance_assets"] = [{"name": "attestation.intoto.jsonl", "url": "URL", "size": "10"}] + ci_info["provenance_assets"] = [] + ci_info["provenance_assets"].extend( + [ + GitHubReleaseAsset( + name="attestation.intoto.jsonl", + url="URL", + size_in_bytes=10, + api_client=api_client, + ) + ] + ) ci_info["latest_release"] = {} assert check.run_check(ctx, check_result) == CheckResultType.FAILED diff --git a/tests/slsa_analyzer/checks/test_provenance_l3_content_check.py b/tests/slsa_analyzer/checks/test_provenance_l3_content_check.py index f919e5ed4..296d2dac2 100644 --- a/tests/slsa_analyzer/checks/test_provenance_l3_content_check.py +++ b/tests/slsa_analyzer/checks/test_provenance_l3_content_check.py @@ -15,7 +15,7 @@ from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis from macaron.slsa_analyzer.git_service.api_client import GhAPIClient -from macaron.slsa_analyzer.provenance.loader import ProvPayloadLoader +from macaron.slsa_analyzer.provenance.loader import load_provenance_payload from macaron.slsa_analyzer.specs.ci_spec import CIInfo from tests.conftest import MockAnalyzeContext @@ -46,9 +46,6 @@ def __init__(self, profile: dict): def get_latest_release(self, full_name: str) -> dict: return self.release - def get_assets(self, release: dict, name: str = "", ext: str = "") -> list[dict]: - return [item for item in self.release["assets"] if item["name"] == name or item["name"].endswith(ext)] - def download_asset(self, url: str, download_path: str) -> bool: return True @@ -97,7 +94,7 @@ def test_expectation_check(self) -> None: # Repo has a provenance, but no expectation. ci_info["provenances"] = [ - ProvPayloadLoader.load(os.path.join(prov_dir, "slsa-verifier-linux-amd64.intoto.jsonl")) + load_provenance_payload(os.path.join(prov_dir, "slsa-verifier-linux-amd64.intoto.jsonl")), ] ctx.dynamic_data["is_inferred_prov"] = False ctx.dynamic_data["expectation"] = None diff --git a/tests/slsa_analyzer/checks/test_provenance_witness_l1_check.py b/tests/slsa_analyzer/checks/test_provenance_witness_l1_check.py new file mode 100644 index 000000000..ed13760b8 --- /dev/null +++ b/tests/slsa_analyzer/checks/test_provenance_witness_l1_check.py @@ -0,0 +1,4 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Test the check ``provenance_witness_l1_check``.""" diff --git a/tests/slsa_analyzer/checks/test_vcs_check.py b/tests/slsa_analyzer/checks/test_vcs_check.py index 92faae552..5dc99c804 100644 --- a/tests/slsa_analyzer/checks/test_vcs_check.py +++ b/tests/slsa_analyzer/checks/test_vcs_check.py @@ -36,6 +36,7 @@ def __init__(self) -> None: ci_services=[], is_inferred_prov=True, expectation=None, + package_registries=[], ) self.wrapper_path = "" self.output_dir = "" diff --git a/tests/slsa_analyzer/package_registry/__init__.py b/tests/slsa_analyzer/package_registry/__init__.py new file mode 100644 index 000000000..19aeac023 --- /dev/null +++ b/tests/slsa_analyzer/package_registry/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. diff --git a/tests/slsa_analyzer/package_registry/test_jfrog_maven_registry.py b/tests/slsa_analyzer/package_registry/test_jfrog_maven_registry.py new file mode 100644 index 000000000..eca170319 --- /dev/null +++ b/tests/slsa_analyzer/package_registry/test_jfrog_maven_registry.py @@ -0,0 +1,556 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Tests for the ``JFrogMavenRegistry`` class.""" + +import os +from pathlib import Path + +import pytest + +from macaron.config.defaults import load_defaults +from macaron.errors import ConfigurationError +from macaron.slsa_analyzer.build_tool.base_build_tool import BaseBuildTool +from macaron.slsa_analyzer.build_tool.gradle import Gradle +from macaron.slsa_analyzer.build_tool.maven import Maven +from macaron.slsa_analyzer.build_tool.pip import Pip +from macaron.slsa_analyzer.build_tool.poetry import Poetry +from macaron.slsa_analyzer.package_registry.jfrog_maven_registry import JFrogMavenAssetMetadata, JFrogMavenRegistry + + +@pytest.fixture(name="jfrog_maven") +def jfrog_maven_instance() -> JFrogMavenRegistry: + """Provide a default ``JFrogMavenRegistry`` object used in the tests below.""" + return JFrogMavenRegistry( + hostname="registry.jfrog.com", + repo="repo", + enabled=True, + ) + + +def test_load_defaults(tmp_path: Path) -> None: + """Test the ``load_defaults`` method.""" + user_config_path = os.path.join(tmp_path, "config.ini") + user_config_input = """ + [package_registry.jfrog.maven] + hostname = jfrog.registry.xyz + repo = prod-repo + download_timeout = 300 + """ + with open(user_config_path, "w", encoding="utf-8") as user_config_file: + user_config_file.write(user_config_input) + + # We don't have to worry about modifying the ``defaults`` object causing test + # pollution here, since we reload the ``defaults`` object before every test with the + # ``setup_test`` fixture. + load_defaults(user_config_path) + + jfrog_maven = JFrogMavenRegistry() + jfrog_maven.load_defaults() + assert jfrog_maven.hostname == "jfrog.registry.xyz" + assert jfrog_maven.repo == "prod-repo" + assert jfrog_maven.download_timeout == 300 + + +def test_load_defaults_without_jfrog_maven_config() -> None: + """Test the ``load_defaults`` method in trivial case when no config is given.""" + jfrog_maven = JFrogMavenRegistry() + jfrog_maven.load_defaults() + + +@pytest.mark.parametrize( + ("user_config_input"), + [ + pytest.param( + """ + [package_registry.jfrog.maven] + repo = prod-repo + """, + id="Missing hostname", + ), + pytest.param( + """ + [package_registry.jfrog.maven] + hostname = jfrog.registry.xyz + """, + id="Missing repo", + ), + pytest.param( + """ + [package_registry.jfrog.maven] + hostname = jfrog.registry.xyz + repo = prod-repo + download_timeout = foo + """, + id="Invalid value for download_timeout", + ), + pytest.param( + """ + [requests] + timeout = foo + + [package_registry.jfrog.maven] + hostname = jfrog.registry.xyz + repo = prod-repo + download_timeout = foo + """, + id="Invalid value for request timeout", + ), + ], +) +def test_load_defaults_with_invalid_config(tmp_path: Path, user_config_input: str) -> None: + """Test the ``load_defaults`` method in case the config is invalid.""" + user_config_path = os.path.join(tmp_path, "config.ini") + with open(user_config_path, "w", encoding="utf-8") as user_config_file: + user_config_file.write(user_config_input) + + # We don't have to worry about modifying the ``defaults`` object causing test + # pollution here, since we reload the ``defaults`` object before every test with the + # ``setup_test`` fixture. + load_defaults(user_config_path) + + jfrog_maven = JFrogMavenRegistry() + with pytest.raises(ConfigurationError): + jfrog_maven.load_defaults() + + +@pytest.mark.parametrize( + ("build_tool", "expected_result"), + [ + (Maven(), True), + (Gradle(), True), + (Pip(), False), + (Poetry(), False), + ], +) +def test_is_detected( + jfrog_maven: JFrogMavenRegistry, + build_tool: BaseBuildTool, + expected_result: bool, +) -> None: + """Test the ``is_detected`` method.""" + assert jfrog_maven.is_detected(build_tool) == expected_result + + # The method always returns False when the jfrog_maven instance is not enabled + # (in the ini config). + jfrog_maven.enabled = False + assert jfrog_maven.is_detected(build_tool) is False + + +@pytest.mark.parametrize( + ("args", "expected_path"), + [ + pytest.param( + { + "group_id": "io.micronaut", + }, + "io/micronaut", + id="Only group_id 1", + ), + pytest.param( + { + "group_id": "com.fasterxml.jackson.core", + }, + "com/fasterxml/jackson/core", + id="Only group_id 2", + ), + pytest.param( + { + "group_id": "com.fasterxml.jackson.core", + "artifact_id": "jackson-annotations", + }, + "com/fasterxml/jackson/core/jackson-annotations", + id="group_id and artifact_id", + ), + pytest.param( + { + "group_id": "com.fasterxml.jackson.core", + "artifact_id": "jackson-annotations", + "version": "2.9.9", + }, + "com/fasterxml/jackson/core/jackson-annotations/2.9.9", + id="group_id and artifact_id and version", + ), + pytest.param( + { + "group_id": "com.fasterxml.jackson.core", + "artifact_id": "jackson-annotations", + "version": "2.9.9", + "asset_name": "jackson-annotations-2.9.9.jar", + }, + "com/fasterxml/jackson/core/jackson-annotations/2.9.9/jackson-annotations-2.9.9.jar", + id="group_id and artifact_id and version and asset_name,", + ), + ], +) +def test_construct_maven_repository_path( + jfrog_maven: JFrogMavenRegistry, + args: dict, + expected_path: str, +) -> None: + """Test the ``construct_maven_repository_path`` method.""" + assert jfrog_maven.construct_maven_repository_path(**args) == expected_path + + +@pytest.mark.parametrize( + ("group_id", "expected_group_path"), + [ + ( + "io.micronaut", + "io/micronaut", + ), + ( + "com.fasterxml.jackson.core", + "com/fasterxml/jackson/core", + ), + ], +) +def test_to_group_folder_path( + jfrog_maven: JFrogMavenRegistry, + group_id: str, + expected_group_path: str, +) -> None: + """Test the ``to_gorup_folder_path`` method.""" + assert jfrog_maven.construct_maven_repository_path(group_id) == expected_group_path + + +@pytest.mark.parametrize( + ("folder_path", "expected_url"), + [ + ( + "io/micronaut/micronaut-jdbc", + "https://registry.jfrog.com/api/storage/repo/io/micronaut/micronaut-jdbc", + ), + ( + "com/fasterxml/jackson/core/jackson-annotations", + "https://registry.jfrog.com/api/storage/repo/com/fasterxml/jackson/core/jackson-annotations", + ), + ], +) +def test_construct_folder_info_url( + jfrog_maven: JFrogMavenRegistry, + folder_path: str, + expected_url: str, +) -> None: + """Test the ``construct_folder_info_url`` method.""" + assert jfrog_maven.construct_folder_info_url(folder_path) == expected_url + + +@pytest.mark.parametrize( + ("file_path", "expected_url"), + [ + ( + "com/fasterxml/jackson/core/jackson-annotations/2.9.9/jackson-annotations-2.9.9.jar", + ( + "https://registry.jfrog.com/api/storage/repo/" + "com/fasterxml/jackson/core/jackson-annotations/2.9.9/jackson-annotations-2.9.9.jar" + ), + ), + ( + "com/fasterxml/jackson/core/jackson-annotations", + "https://registry.jfrog.com/api/storage/repo/com/fasterxml/jackson/core/jackson-annotations", + ), + ], +) +def test_construct_file_info_url( + jfrog_maven: JFrogMavenRegistry, + file_path: str, + expected_url: str, +) -> None: + """Test the ``construct_file_info_url`` method.""" + assert jfrog_maven.construct_file_info_url(file_path) == expected_url + + +@pytest.mark.parametrize( + ("args", "expected_url"), + [ + pytest.param( + { + "group_id": "io.micronaut", + "artifact_id": "micronaut-jdbc", + }, + "https://registry.jfrog.com/api/search/latestVersion?repos=repo&g=io.micronaut&a=micronaut-jdbc", + ), + pytest.param( + { + "group_id": "com.fasterxml.jackson.core", + "artifact_id": "jackson-annotations", + }, + "https://registry.jfrog.com/api/search/latestVersion?repos=repo&g=com.fasterxml.jackson.core&a=jackson-annotations", # noqa: B950 + ), + ], +) +def test_construct_latest_version_url( + jfrog_maven: JFrogMavenRegistry, + args: dict, + expected_url: str, +) -> None: + """Test the ``construct_latest_version_url`` method.""" + assert jfrog_maven.construct_latest_version_url(**args) == expected_url + + +@pytest.mark.parametrize( + ("folder_info_payload", "expected_folder_names"), + [ + pytest.param( + """ + { + "children": [ + { + "uri": "/child1", + "folder": true + }, + { + "uri": "/child2", + "folder": false + } + ] + } + """, + ["child1"], + id="Payload with both files and folders", + ), + pytest.param( + """ + { + "children": [ + { + "uri": "/jackson-annotations", + "folder": true + }, + { + "uri": "/jackson-core", + "folder": true + } + ] + } + """, + ["jackson-annotations", "jackson-core"], + id="Payload with folders only", + ), + ], +) +def test_extract_folder_names_from_folder_info_payload( + jfrog_maven: JFrogMavenRegistry, + folder_info_payload: str, + expected_folder_names: list[str], +) -> None: + """Test the ``extract_folder_names_from_folder_info_payload`` method.""" + assert jfrog_maven.extract_folder_names_from_folder_info_payload(folder_info_payload) == expected_folder_names + + +@pytest.mark.parametrize( + ("args", "expected_file_names"), + [ + pytest.param( + { + "folder_info_payload": """ + { + "children": [ + { + "uri": "/child1", + "folder": true + }, + { + "uri": "/child2", + "folder": false + } + ] + } + """ + }, + ["child2"], + id="Payload with both files and folders", + ), + pytest.param( + { + "folder_info_payload": """ + { + "children": [ + { + "uri": "/jackson-databind-2.9.9.jar", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.asc", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.md5", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.sha1", + "folder": false + }, + { + "uri": "/multiple.intoto.jsonl", + "folder": false + } + ] + } + """, + "extensions": ["jar"], + }, + ["jackson-databind-2.9.9.jar"], + id="One allowed extension 1", + ), + pytest.param( + { + "folder_info_payload": """ + { + "children": [ + { + "uri": "/jackson-databind-2.9.9.jar", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.md5", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9-javadoc.jar", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9-javadoc.jar.md5", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9-sources.jar", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9-sources.jar.md5", + "folder": false + }, + { + "uri": "/multiple.intoto.jsonl", + "folder": false + } + ] + } + """, + "extensions": ["jar"], + }, + [ + "jackson-databind-2.9.9.jar", + "jackson-databind-2.9.9-javadoc.jar", + "jackson-databind-2.9.9-sources.jar", + ], + id="One allowed extension 2", + ), + pytest.param( + { + "folder_info_payload": """ + { + "children": [ + { + "uri": "/jackson-databind-2.9.9.jar", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.asc", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.md5", + "folder": false + }, + { + "uri": "/jackson-databind-2.9.9.jar.sha1", + "folder": false + }, + { + "uri": "/multiple.intoto.jsonl", + "folder": false + } + ] + } + """, + "extensions": ["jar", "intoto.jsonl"], + }, + ["jackson-databind-2.9.9.jar", "multiple.intoto.jsonl"], + id="Multiple allowed extensions", + ), + pytest.param({"folder_info_payload": "{}"}, [], id="Malformed payload 1"), + pytest.param( + { + "folder_info_payload": """ + { + "children": {} + } + """, + }, + [], + id="Malformed payload 2", + ), + pytest.param( + { + "folder_info_payload": """ + { + "children": [ + { + "uri": "/jackson-databind-2.9.9.jar", + "folder": false + }, + { + "uri": {}, + "folder": false + }, + { + "uri": "/foo" + }, + { + "uri": "/multiple.intoto.jsonl", + "folder": false + } + ] + } + """, + }, + ["jackson-databind-2.9.9.jar", "multiple.intoto.jsonl"], + id="Malformed payload 3", + ), + ], +) +def test_extract_file_names_from_folder_info_payload( + jfrog_maven: JFrogMavenRegistry, + args: dict, + expected_file_names: list[str], +) -> None: + """Test the ``extract_file_names_from_folder_info_payload`` method.""" + assert jfrog_maven.extract_file_names_from_folder_info_payload(**args) == expected_file_names + + +@pytest.mark.parametrize( + ("file_info_payload", "expected_metadata"), + [ + pytest.param( + """ + { + "size": "66897", + "checksums": { + "sha1": "d735e01f9d6e3f31166a6783903a400faaf30376", + "md5": "bcdc3d1df2197c73fcc95189372a1247", + "sha256": "17918b3097285da88371fac925922902a9fe60f075237e76f406c09234c8d614" + }, + "downloadUri": "https://registry.jfrog.com/repo/com/fasterxml/jackson/core/jackson-annotations/2.9.9/jackson-annotations-2.9.9.jar" + } + """, # noqa: B950 + JFrogMavenAssetMetadata( + size_in_bytes=66897, + sha256_digest="17918b3097285da88371fac925922902a9fe60f075237e76f406c09234c8d614", + download_uri="https://registry.jfrog.com/repo/com/fasterxml/jackson/core/jackson-annotations/2.9.9/jackson-annotations-2.9.9.jar", # noqa: B950 + ), + id="Valid", + ), + ], +) +def test_extract_asset_metadata_from_file_info_payload( + jfrog_maven: JFrogMavenRegistry, + file_info_payload: str, + expected_metadata: JFrogMavenAssetMetadata, +) -> None: + """Test the ``extract_asset_metadata_from_file_info_payload`` method.""" + assert jfrog_maven.extract_asset_metadata_from_file_info_payload(file_info_payload) == expected_metadata diff --git a/tests/slsa_analyzer/provenance/intoto/__init__.py b/tests/slsa_analyzer/provenance/intoto/__init__.py new file mode 100644 index 000000000..19aeac023 --- /dev/null +++ b/tests/slsa_analyzer/provenance/intoto/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. diff --git a/tests/slsa_analyzer/provenance/intoto/v01/__init__.py b/tests/slsa_analyzer/provenance/intoto/v01/__init__.py new file mode 100644 index 000000000..19aeac023 --- /dev/null +++ b/tests/slsa_analyzer/provenance/intoto/v01/__init__.py @@ -0,0 +1,2 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. diff --git a/tests/slsa_analyzer/provenance/intoto/v01/test_validate.py b/tests/slsa_analyzer/provenance/intoto/v01/test_validate.py new file mode 100644 index 000000000..2e438a484 --- /dev/null +++ b/tests/slsa_analyzer/provenance/intoto/v01/test_validate.py @@ -0,0 +1,196 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Tests for validation of in-toto attestation version 0.1.""" + +import pytest + +from macaron.slsa_analyzer.provenance.intoto.errors import ValidateInTotoPayloadError +from macaron.slsa_analyzer.provenance.intoto.v01 import validate_intoto_statement, validate_intoto_subject +from macaron.util import JsonType + + +@pytest.mark.parametrize( + ("payload"), + [ + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": "https://slsa.dev/provenance/v0.2", + }, + id="Without predicate", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": "https://slsa.dev/provenance/v0.2", + "predicate": { + "builder": { + "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/builder_go_slsa3.yml@refs/tags/v1.5.0" # noqa: B950 + }, + "buildType": "https://github.com/slsa-framework/slsa-github-generator/go@v1", + }, + }, + id="With predicate", + ), + ], +) +def test_validate_valid_intoto_statement( + payload: dict[str, JsonType], +) -> None: + """Test validating valid in-toto statements.""" + assert validate_intoto_statement(payload) is True + + +@pytest.mark.parametrize( + ("payload"), + [ + pytest.param( + { + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": "https://slsa.dev/provenance/v0.2", + }, + id="Missing '_type'", + ), + pytest.param( + { + "_type": {}, + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": "https://slsa.dev/provenance/v0.2", + }, + id="Invalid '_type'", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "predicateType": "https://slsa.dev/provenance/v0.2", + }, + id="Missing 'subject'", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": "subject", + "predicateType": "https://slsa.dev/provenance/v0.2", + }, + id="Invalid 'subject'", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + }, + id="Missing 'predicateType'", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": {}, + }, + id="Invalid 'predicateType'", + ), + pytest.param( + { + "_type": "https://in-toto.io/Statement/v0.1", + "subject": [ + { + "name": "foo.txt", + "digest": {"sha256": "abcxyz123456"}, + }, + ], + "predicateType": "https://slsa.dev/provenance/v0.2", + "predicate": [], + }, + id="Invalid 'predicate'", + ), + ], +) +def test_validate_invalid_intoto_statement( + payload: dict[str, JsonType], +) -> None: + """Test validating invalid in-toto statements.""" + with pytest.raises(ValidateInTotoPayloadError): + validate_intoto_statement(payload) + + +@pytest.mark.parametrize( + ("subject_json"), + [ + pytest.param( + [], + id="Invalid subject entry", + ), + pytest.param( + { + "digest": {"sha256": "abcxyz123456"}, + }, + id="Missing 'name'", + ), + pytest.param( + { + "name": {}, + "digest": {"sha256": "abcxyz123456"}, + }, + id="Invalid 'name'", + ), + pytest.param( + { + "name": "foo.txt", + }, + id="Missing 'digest'", + ), + pytest.param( + { + "name": "foo.txt", + "digest": "digest", + }, + id="Invalid 'digest' 1", + ), + pytest.param( + { + "name": "foo.txt", + "digest": {"sha256": {}}, + }, + id="Invalid 'digest' 2", + ), + ], +) +def test_validate_invalid_subject( + subject_json: JsonType, +) -> None: + """Test validating invalid in-toto subjects.""" + with pytest.raises(ValidateInTotoPayloadError): + validate_intoto_subject(subject_json) diff --git a/tests/slsa_analyzer/provenance/test_witness_provenance.py b/tests/slsa_analyzer/provenance/test_witness_provenance.py new file mode 100644 index 000000000..de718a482 --- /dev/null +++ b/tests/slsa_analyzer/provenance/test_witness_provenance.py @@ -0,0 +1,180 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""Tests for witness provenance.""" + +import json +import os +from pathlib import Path + +import pytest + +from macaron.config.defaults import load_defaults +from macaron.slsa_analyzer.provenance.intoto import InTotoV01Payload, v01 +from macaron.slsa_analyzer.provenance.witness import ( + WitnessProvenanceSubject, + WitnessVerifierConfig, + extract_witness_provenance_subjects, + is_witness_provenance_payload, + load_witness_verifier_config, +) + + +@pytest.mark.parametrize( + ("user_config_input", "expected_verifier_config"), + [ + pytest.param( + "", + WitnessVerifierConfig( + predicate_types={"https://witness.testifysec.com/attestation-collection/v0.1"}, + artifact_extensions={"jar"}, + ), + id="Default config", + ), + pytest.param( + """ + [provenance.witness] + predicate_types = + https://witness.testifysec.com/attestation-collection/v0.2 + https://witness.testifysec.com/attestation-collection/v0.3 + artifact_extensions = + jar + bom + """, + WitnessVerifierConfig( + predicate_types={ + "https://witness.testifysec.com/attestation-collection/v0.2", + "https://witness.testifysec.com/attestation-collection/v0.3", + }, + artifact_extensions={"jar", "bom"}, + ), + id="Valid config", + ), + ], +) +def test_load_witness_predicate_types( + tmp_path: Path, + user_config_input: str, + expected_verifier_config: WitnessVerifierConfig, +) -> None: + """Test the ``load_witness_predicate_types`` function.""" + user_config_path = os.path.join(tmp_path, "config.ini") + with open(user_config_path, "w", encoding="utf-8") as user_config_file: + user_config_file.write(user_config_input) + load_defaults(user_config_path) + assert load_witness_verifier_config() == expected_verifier_config + + +@pytest.mark.parametrize( + ("payload_json", "predicate_types", "expected_result"), + [ + pytest.param( + json.loads( + """ + { "predicateType": "https://witness.testifysec.com/attestation-collection/v0.1" } + """ + ), + {"https://witness.testifysec.com/attestation-collection/v0.1"}, + True, + id="Valid predicateType", + ), + pytest.param( + json.loads( + """ + { "predicateType": "https://witness.net/attestation-collection/v0.1" } + """ + ), + {"https://witness.testifysec.com/attestation-collection/v0.1"}, + False, + id="Invalid predicateType", + ), + ], +) +def test_is_witness_provenance_payload( + payload_json: v01.InTotoStatement, + predicate_types: set[str], + expected_result: bool, +) -> None: + """Test the ``is_witness_provenance_payload`` function.""" + payload = InTotoV01Payload(statement=payload_json) + assert is_witness_provenance_payload(payload, predicate_types) == expected_result + + +@pytest.mark.parametrize( + ("payload_json", "expected_subjects"), + [ + pytest.param( + json.loads( + """ +{ + "subject": [ + { + "name": "https://witness.dev/attestations/product/v0.1/file:target/jackson-annotations-2.9.9.jar", + "digest": { + "sha256": "6f97fe2094bd50435d6fbb7a2f6c2638fe44e6af17cfff98ce111d0abfffe17e" + } + }, + { + "name": "https://witness.dev/attestations/product/v0.1/file:foo/bar/baz.txt", + "digest": { + "sha256": "cbc8f554dbfa17e5c5873c425a09cb1488c2f784ac52340747a92b7ec0aaefba" + } + } + ] +} + """ + ), + { + WitnessProvenanceSubject( + subject_name=( + "https://witness.dev/attestations/product/v0.1/file:target/jackson-annotations-2.9.9.jar" + ), + sha256_digest="6f97fe2094bd50435d6fbb7a2f6c2638fe44e6af17cfff98ce111d0abfffe17e", + ), + WitnessProvenanceSubject( + subject_name="https://witness.dev/attestations/product/v0.1/file:foo/bar/baz.txt", + sha256_digest="cbc8f554dbfa17e5c5873c425a09cb1488c2f784ac52340747a92b7ec0aaefba", + ), + }, + id="Valid payload", + ), + pytest.param( + json.loads( + """ +{ + "subject": [ + { + "name": "https://witness.dev/attestations/product/v0.1/file:target/jackson-annotations-2.9.9.jar", + "digest": { + "sha256": "6f97fe2094bd50435d6fbb7a2f6c2638fe44e6af17cfff98ce111d0abfffe17e" + } + }, + { + "name": "https://witness.dev/attestations/product/v0.1/file:foo/bar/baz.txt", + "digest": { + "sha1": "cbc8f554dbfa17e5c5873c425a09cb1488c2f784ac52340747a92b7ec0aaefba" + } + } + ] +} + """ + ), + { + WitnessProvenanceSubject( + subject_name=( + "https://witness.dev/attestations/product/v0.1/file:target/jackson-annotations-2.9.9.jar" + ), + sha256_digest="6f97fe2094bd50435d6fbb7a2f6c2638fe44e6af17cfff98ce111d0abfffe17e", + ), + }, + id="Missing sha256", + ), + ], +) +def test_extract_witness_provenances_subjects( + payload_json: v01.InTotoStatement, + expected_subjects: set[WitnessProvenanceSubject], +) -> None: + """Test the ``extract_witness_provenance_subjects`` function.""" + payload = InTotoV01Payload(statement=payload_json) + assert extract_witness_provenance_subjects(payload) == expected_subjects diff --git a/tests/slsa_analyzer/test_analyze_context.py b/tests/slsa_analyzer/test_analyze_context.py index d519d7abe..f2adeacdf 100644 --- a/tests/slsa_analyzer/test_analyze_context.py +++ b/tests/slsa_analyzer/test_analyze_context.py @@ -11,8 +11,10 @@ from macaron.code_analyzer.call_graph import BaseNode, CallGraph from macaron.slsa_analyzer.ci_service.github_actions import GitHubActions from macaron.slsa_analyzer.levels import SLSALevels +from macaron.slsa_analyzer.provenance.intoto import validate_intoto_payload from macaron.slsa_analyzer.slsa_req import Category, ReqName, SLSAReq from macaron.slsa_analyzer.specs.ci_spec import CIInfo +from macaron.util import JsonType from tests.conftest import MockAnalyzeContext @@ -78,12 +80,15 @@ def test_update_req_status(self) -> None: def test_provenances(self) -> None: """Test getting the provenances data from an AnalyzeContext instance.""" - expected_provenance = { - "predicate": "sample-predicate", + expected_provenance: dict[str, JsonType] = { "_type": "https://in-toto.io/Statement/v0.1", + "subject": [], "predicateType": "https://slsa.dev/provenance/v0.2", + "predicate": {}, } + expected_payload = validate_intoto_payload(expected_provenance) + gh_actions = GitHubActions() gh_actions_ci_info = CIInfo( @@ -92,7 +97,7 @@ def test_provenances(self) -> None: callgraph=CallGraph(BaseNode(), ""), provenance_assets=[], latest_release={}, - provenances=[expected_provenance], + provenances=[expected_payload], ) self.analyze_ctx.dynamic_data["ci_services"].append(gh_actions_ci_info)