diff --git a/README.md b/README.md index fb2e5da65..6897e6f1f 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ Macaron is a supply chain security analysis tool from [Oracle Labs](https://labs.oracle.com/pls/apex/r/labs/labs/intro), which focuses on the build integrity of an artifact and the artifact dependencies. It is based on the [Supply chain Levels for Software Artifacts (SLSA)](https://slsa.dev/) specification, which aims at preventing some of the software supply chain attacks as the systems get more complex, especially with respect to the use of open-source third-party code in applications. Attacks include stealing credentials, injecting malicious code etc., and it is critical to have security assurance on the third-party code to guarantee that the integrity of the code has not been compromised. -Macaron uses [SLSA requirements specifications v0.1](https://slsa.dev/spec/v0.1/requirements) to define concrete rules for protecting software integrity that can be checked for compliance requirements automatically. Macaron provides a customizable checker platform that makes it easy to define checks that depend on each other. This is particularly useful for implementing checks for SLSA levels. In addition, Macaron also checks a user-specified policy for the repository to detect unexpected behavior in the build process. Macaron is a work-in-progress project and currently supports Maven and Gradle Java build systems only. We plan to support build systems for other languages, such as Python in future. +Macaron uses [SLSA requirements specifications v0.1](https://slsa.dev/spec/v0.1/requirements) to define concrete rules for protecting software integrity that can be checked for compliance requirements automatically. Macaron provides a customizable checker platform that makes it easy to define checks that depend on each other. This is particularly useful for implementing checks for SLSA levels. In addition, Macaron also checks a user-specified policy for the repository to detect unexpected behavior in the build process. Macaron is a work-in-progress project and currently supports Maven and Gradle Java build systems. Support has also been added for Python projects that use Pip or Poetry as their package managers, minus dependency analysis. We plan to support build systems for other languages in future. ## Table of Contents diff --git a/src/macaron/config/defaults.ini b/src/macaron/config/defaults.ini index 50b716559..8a7507ab2 100644 --- a/src/macaron/config/defaults.ini +++ b/src/macaron/config/defaults.ini @@ -211,6 +211,59 @@ jenkins = gradle-git-publish gitPublishPush +# This is the spec for trusted Pip packaging tools. +[builder.pip] +entry_conf = +build_configs = + setup.py + setup.cfg + pyproject.toml +packager = + pip + pip3 + flit + conda +publisher = + twine + flit + conda +# These are the Python interpreters that may be used to load modules. +interpreter = + python + python3 +interpreter_flag = + -m +build_arg = + install + build + setup.py +deploy_arg = + publish + upload +[builder.pip.ci.deploy] +github_actions = pypa/gh-action-pypi-publish + +# This is the spec for trusted Poetry packaging tools. +[builder.poetry] +entry_conf = +build_configs = pyproject.toml +package_lock = poetry.lock +builder = + poetry + poetry-core +# These are the Python interpreters that may be used to load modules. +interpreter = + python + python3 +interpreter_flag = + -m +build_arg = + build +deploy_arg = + publish +[builder.poetry.ci.deploy] +github_actions = pypa/gh-action-pypi-publish + # This is the spec for GitHub Actions CI. [ci.github_actions] entry_conf = diff --git a/src/macaron/slsa_analyzer/build_tool/__init__.py b/src/macaron/slsa_analyzer/build_tool/__init__.py index 4fede9843..c80581919 100644 --- a/src/macaron/slsa_analyzer/build_tool/__init__.py +++ b/src/macaron/slsa_analyzer/build_tool/__init__.py @@ -1,4 +1,4 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """The build_tool package contains the supported build tools for Macaron.""" @@ -6,7 +6,9 @@ from .base_build_tool import BaseBuildTool from .gradle import Gradle from .maven import Maven +from .pip import Pip +from .poetry import Poetry # The list of supported build tools. The order of the list determine the order # in which each build tool is checked against the target repository. -BUILD_TOOLS: list[BaseBuildTool] = [Gradle(), Maven()] +BUILD_TOOLS: list[BaseBuildTool] = [Gradle(), Maven(), Poetry(), Pip()] diff --git a/src/macaron/slsa_analyzer/build_tool/base_build_tool.py b/src/macaron/slsa_analyzer/build_tool/base_build_tool.py index 81fc728a8..2235b8e5d 100644 --- a/src/macaron/slsa_analyzer/build_tool/base_build_tool.py +++ b/src/macaron/slsa_analyzer/build_tool/base_build_tool.py @@ -55,7 +55,12 @@ def __init__(self, name: str) -> None: self.name = name self.entry_conf: list[str] = [] self.build_configs: list[str] = [] + self.package_lock: list[str] = [] self.builder: list[str] = [] + self.packager: list[str] = [] + self.publisher: list[str] = [] + self.interpreter: list[str] = [] + self.interpreter_flag: list[str] = [] self.build_arg: list[str] = [] self.deploy_arg: list[str] = [] self.ci_build_kws: dict[str, list[str]] = { diff --git a/src/macaron/slsa_analyzer/build_tool/pip.py b/src/macaron/slsa_analyzer/build_tool/pip.py new file mode 100644 index 000000000..4abdbd09b --- /dev/null +++ b/src/macaron/slsa_analyzer/build_tool/pip.py @@ -0,0 +1,89 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module contains the Pip class which inherits BaseBuildTool. + +This module is used to work with repositories that use pip for dependency management. +""" + +import logging + +from macaron.config.defaults import defaults +from macaron.dependency_analyzer import DependencyAnalyzer, NoneDependencyAnalyzer +from macaron.slsa_analyzer.build_tool.base_build_tool import BaseBuildTool, file_exists + +logger: logging.Logger = logging.getLogger(__name__) + + +class Pip(BaseBuildTool): + """This class contains the information of the pip build tool.""" + + def __init__(self) -> None: + """Initialize instance.""" + super().__init__(name="pip") + + def load_defaults(self) -> None: + """Load the default values from defaults.ini.""" + if "builder.pip" in defaults: + for item in defaults["builder.pip"]: + if hasattr(self, item): + setattr(self, item, defaults.get_list("builder.pip", item)) + + if "builder.pip.ci.deploy" in defaults: + for item in defaults["builder.pip.ci.deploy"]: + if item in self.ci_deploy_kws: + self.ci_deploy_kws[item] = defaults.get_list("builder.pip.ci.deploy", item) + + def is_detected(self, repo_path: str) -> bool: + """Return True if this build tool is used in the target repo. + + Parameters + ---------- + repo_path : str + The path to the target repo. + + Returns + ------- + bool + True if this build tool is detected, else False. + """ + for file in self.build_configs: + if file_exists(repo_path, file): + return True + return False + + def prepare_config_files(self, wrapper_path: str, build_dir: str) -> bool: + """Prepare the necessary wrapper files for running the build. + + This method returns False on errors. Pip doesn't require any preparation, therefore this method always + returns True. + + Parameters + ---------- + wrapper_path : str + The path where all necessary wrapper files are located. + build_dir : str + The path of the build dir. This is where all files are copied to. + + Returns + ------- + bool + True if succeed else False. + """ + return True + + def get_dep_analyzer(self, repo_path: str) -> DependencyAnalyzer: + """Create a DependencyAnalyzer for the build tool. + + Parameters + ---------- + repo_path: str + The path to the target repo. + + Returns + ------- + DependencyAnalyzer + The DependencyAnalyzer object. + """ + # TODO: Implement this method. + return NoneDependencyAnalyzer() diff --git a/src/macaron/slsa_analyzer/build_tool/poetry.py b/src/macaron/slsa_analyzer/build_tool/poetry.py new file mode 100644 index 000000000..c101a368d --- /dev/null +++ b/src/macaron/slsa_analyzer/build_tool/poetry.py @@ -0,0 +1,125 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module contains the Poetry class which inherits BaseBuildTool. + +This module is used to work with repositories that use Poetry for dependency management. +""" + +import glob +import logging +import os +import tomllib +from pathlib import Path + +from macaron.config.defaults import defaults +from macaron.dependency_analyzer import DependencyAnalyzer, NoneDependencyAnalyzer +from macaron.slsa_analyzer.build_tool.base_build_tool import BaseBuildTool, file_exists + +logger: logging.Logger = logging.getLogger(__name__) + + +class Poetry(BaseBuildTool): + """This class contains the information of the poetry build tool.""" + + def __init__(self) -> None: + """Initialize instance.""" + super().__init__(name="poetry") + + def load_defaults(self) -> None: + """Load the default values from defaults.ini.""" + if "builder.poetry" in defaults: + for item in defaults["builder.poetry"]: + if hasattr(self, item): + setattr(self, item, defaults.get_list("builder.poetry", item)) + + if "builder.pip.ci.deploy" in defaults: + for item in defaults["builder.pip.ci.deploy"]: + if item in self.ci_deploy_kws: + self.ci_deploy_kws[item] = defaults.get_list("builder.pip.ci.deploy", item) + + def is_detected(self, repo_path: str) -> bool: + """Return True if this build tool is used in the target repo. + + Parameters + ---------- + repo_path : str + The path to the target repo. + + Returns + ------- + bool + True if this build tool is detected, else False. + """ + package_lock_exists = "" + for file in self.package_lock: + if file_exists(repo_path, file): + package_lock_exists = file + break + + for conf in self.build_configs: + # Find the paths of all pyproject.toml files. + pattern = os.path.join(repo_path, "**", conf) + files_detected = glob.glob(pattern, recursive=True) + + if files_detected: + # If a package_lock file exists, and a config file is present, Poetry build tool is detected. + if package_lock_exists: + return True + # TODO: this implementation assumes one build type, so when multiple build types are supported, this + # needs to be updated. + # Take the highest level file, if there are two at the same level, take the first in the list. + file_path = min(files_detected, key=lambda x: len(Path(x).parts)) + try: + # Parse the .toml file + with open(file_path, "rb") as toml_file: + try: + data = tomllib.load(toml_file) + # Check for the existence of a [tool.poetry] section. + if ("tool" in data) and ("poetry" in data["tool"]): + return True + except tomllib.TOMLDecodeError: + logger.error("Failed to read the %s file: invalid toml file.", conf) + return False + return False + except FileNotFoundError: + logger.error("Failed to read the %s file.", conf) + return False + + return False + + def prepare_config_files(self, wrapper_path: str, build_dir: str) -> bool: + """Prepare the necessary wrapper files for running the build. + + This method returns False on errors. Poetry doesn't require any preparation, therefore this method always + returns True. + + Parameters + ---------- + wrapper_path : str + The path where all necessary wrapper files are located. + build_dir : str + The path of the build dir. This is where all files are copied to. + + Returns + ------- + bool + True if succeeds else False. + """ + return True + + def get_dep_analyzer(self, repo_path: str) -> DependencyAnalyzer: + """Create a DependencyAnalyzer for the build tool. + + Parameters + ---------- + repo_path: str + The path to the target repo. + + Returns + ------- + DependencyAnalyzer + The DependencyAnalyzer object. + """ + # TODO: Implement this method. + return NoneDependencyAnalyzer() diff --git a/src/macaron/slsa_analyzer/checks/build_as_code_check.py b/src/macaron/slsa_analyzer/checks/build_as_code_check.py index a0fb19dc4..1f49af4c7 100644 --- a/src/macaron/slsa_analyzer/checks/build_as_code_check.py +++ b/src/macaron/slsa_analyzer/checks/build_as_code_check.py @@ -9,6 +9,7 @@ from sqlalchemy.orm import Mapped, mapped_column from sqlalchemy.sql.sqltypes import String +from macaron.config.defaults import defaults from macaron.database.database_manager import ORMBase from macaron.database.table_definitions import CheckFactsTable from macaron.slsa_analyzer.analyze_context import AnalyzeContext @@ -17,6 +18,7 @@ from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.base_ci_service import NoneCIService from macaron.slsa_analyzer.ci_service.circleci import CircleCI +from macaron.slsa_analyzer.ci_service.github_actions import GHWorkflowType from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis @@ -64,7 +66,10 @@ def __init__(self) -> None: def _has_deploy_command(self, commands: list[list[str]], build_tool: BaseBuildTool) -> str: """Check if the bash command is a build and deploy command.""" + # Account for Python projects having separate tools for packaging and publishing. + deploy_tool = build_tool.publisher if build_tool.publisher else build_tool.builder for com in commands: + # Check for empty or invalid commands. if not com or not com[0]: continue @@ -75,13 +80,28 @@ def _has_deploy_command(self, commands: list[list[str]], build_tool: BaseBuildTo if not cmd_program_name: logger.debug("Found invalid program name %s.", com[0]) continue - if any(build_cmd for build_cmd in build_tool.builder if build_cmd == cmd_program_name): + + check_build_commands = any(build_cmd for build_cmd in deploy_tool if build_cmd == cmd_program_name) + + # Support the use of interpreters like Python that load modules, i.e., 'python -m pip install'. + check_module_build_commands = any( + interpreter == cmd_program_name + and com[1] + and com[1] in build_tool.interpreter_flag + and com[2] + and com[2] in deploy_tool + for interpreter in build_tool.interpreter + ) + prog_name_index = 2 if check_module_build_commands else 0 + + if check_build_commands or check_module_build_commands: # Check the arguments in the bash command for the deploy goals. # If there are no deploy args for this build tool, accept as deploy command. if not build_tool.deploy_arg: logger.info("No deploy arguments required. Accept %s as deploy command.", str(com)) return str(com) - for word in com[1:]: + + for word in com[(prog_name_index + 1) :]: # TODO: allow plugin versions in arguments, e.g., maven-plugin:1.6.8:deploy. if word in build_tool.deploy_arg: logger.info("Found deploy command %s.", str(com)) @@ -114,6 +134,75 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu # Checking if a CI service is discovered for this repo. if isinstance(ci_service, NoneCIService): continue + + trusted_deploy_actions = defaults.get_list("builder.pip.ci.deploy", "github_actions", fallback=[]) + + # Check for use of a trusted Github Actions workflow to publish/deploy. + # TODO: verify that deployment is legitimate and not a test + if trusted_deploy_actions: + for callee in ci_info["callgraph"].bfs(): + workflow_name = callee.name.split("@")[0] + + if not workflow_name or callee.node_type not in [ + GHWorkflowType.EXTERNAL, + GHWorkflowType.REUSABLE, + ]: + logger.debug("Workflow %s is not relevant. Skipping...", callee.name) + continue + if workflow_name in trusted_deploy_actions: + trigger_link = ci_service.api_client.get_file_link( + ctx.repo_full_name, + ctx.commit_sha, + ci_service.api_client.get_relative_path_of_workflow( + os.path.basename(callee.caller_path) + ), + ) + deploy_action_source_link = ci_service.api_client.get_file_link( + ctx.repo_full_name, ctx.commit_sha, callee.caller_path + ) + + html_url = ci_service.has_latest_run_passed( + ctx.repo_full_name, + ctx.branch_name, + ctx.commit_sha, + ctx.commit_date, + os.path.basename(callee.caller_path), + ) + + # TODO: include in the justification multiple cases of external action usage + justification: list[str | dict[str, str]] = [ + { + f"The target repository uses build tool {build_tool.name}" + " to deploy": deploy_action_source_link, + "The build is triggered by": trigger_link, + }, + f"Deploy action: {workflow_name}", + {"The status of the build can be seen at": html_url} + if html_url + else "However, could not find a passing workflow run.", + ] + check_result["justification"].extend(justification) + if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: + predicate = ci_info["provenances"][0]["predicate"] + predicate["buildType"] = f"Custom {ci_service.name}" + predicate["builder"]["id"] = deploy_action_source_link + predicate["invocation"]["configSource"][ + "uri" + ] = f"{ctx.remote_path}@refs/heads/{ctx.branch_name}" + predicate["invocation"]["configSource"]["digest"]["sha1"] = ctx.commit_sha + predicate["invocation"]["configSource"]["entryPoint"] = trigger_link + predicate["metadata"]["buildInvocationId"] = html_url + check_result["result_tables"] = [ + BuildAsCodeTable( + build_tool_name=build_tool.name, + ci_service_name=ci_service.name, + build_trigger=trigger_link, + deploy_command=workflow_name, + build_status_url=html_url, + ) + ] + return CheckResultType.PASSED + for bash_cmd in ci_info["bash_commands"]: deploy_cmd = self._has_deploy_command(bash_cmd["commands"], build_tool) if deploy_cmd: @@ -136,7 +225,7 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu os.path.basename(bash_cmd["CI_path"]), ) - justification: list[str | dict[str, str]] = [ + justification_cmd: list[str | dict[str, str]] = [ { f"The target repository uses build tool {build_tool.name} to deploy": bash_source_link, "The build is triggered by": trigger_link, @@ -146,7 +235,7 @@ def run_check(self, ctx: AnalyzeContext, check_result: CheckResult) -> CheckResu if html_url else "However, could not find a passing workflow run.", ] - check_result["justification"].extend(justification) + check_result["justification"].extend(justification_cmd) if ctx.dynamic_data["is_inferred_prov"] and ci_info["provenances"]: predicate = ci_info["provenances"][0]["predicate"] predicate["buildType"] = f"Custom {ci_service.name}" diff --git a/src/macaron/slsa_analyzer/checks/build_service_check.py b/src/macaron/slsa_analyzer/checks/build_service_check.py index 5ed8e8c51..deb6475a7 100644 --- a/src/macaron/slsa_analyzer/checks/build_service_check.py +++ b/src/macaron/slsa_analyzer/checks/build_service_check.py @@ -65,13 +65,30 @@ def _has_build_command(self, commands: list[list[str]], build_tool: BaseBuildToo if not cmd_program_name: logger.debug("Found invalid program name %s.", com[0]) continue - if any(build_cmd for build_cmd in build_tool.builder if build_cmd == cmd_program_name): + + builder = build_tool.packager if build_tool.packager else build_tool.builder + + check_build_commands = any(build_cmd for build_cmd in builder if build_cmd == cmd_program_name) + + # Support the use of interpreters like Python that load modules, i.e., 'python -m pip install'. + check_module_build_commands = any( + interpreter == cmd_program_name + and com[1] + and com[1] in build_tool.interpreter_flag + and com[2] + and com[2] in builder + for interpreter in build_tool.interpreter + ) + + prog_name_index = 2 if check_module_build_commands else 0 + + if check_build_commands or check_module_build_commands: # Check the arguments in the bash command for the build goals. # If there are no build args for this build tool, accept as build command. if not build_tool.build_arg: logger.info("No build arguments required. Accept %s as build command.", str(com)) return str(com) - for word in com[1:]: + for word in com[(prog_name_index + 1) :]: # TODO: allow plugin versions in arguments, e.g., maven-plugin:1.6.8:package. if word in build_tool.build_arg: logger.info("Found build command %s.", str(com)) diff --git a/tests/conftest.py b/tests/conftest.py index d3c042ec9..4c6496fa7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -10,6 +10,13 @@ from macaron.config.defaults import create_defaults, defaults, load_defaults from macaron.slsa_analyzer.build_tool.gradle import Gradle from macaron.slsa_analyzer.build_tool.maven import Maven +from macaron.slsa_analyzer.build_tool.pip import Pip +from macaron.slsa_analyzer.build_tool.poetry import Poetry +from macaron.slsa_analyzer.ci_service.circleci import CircleCI +from macaron.slsa_analyzer.ci_service.github_actions import GitHubActions +from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI +from macaron.slsa_analyzer.ci_service.jenkins import Jenkins +from macaron.slsa_analyzer.ci_service.travis import Travis # We need to pass fixture names as arguments to maintain an order. # pylint: disable=redefined-outer-name @@ -99,3 +106,145 @@ def gradle_tool(setup_test) -> Gradle: # type: ignore # pylint: disable=unused- gradle = Gradle() gradle.load_defaults() return gradle + + +@pytest.fixture(autouse=True) +def poetry_tool(setup_test) -> Poetry: # type: ignore # pylint: disable=unused-argument + """Create a Poetry tool instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + Poetry + The Poetry instance. + """ + poetry = Poetry() + poetry.load_defaults() + return poetry + + +@pytest.fixture(autouse=True) +def pip_tool(setup_test) -> Pip: # type: ignore # pylint: disable=unused-argument + """Create a Pip tool instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + Pip + The Pip instance. + """ + pip = Pip() + pip.load_defaults() + return pip + + +class MockGitHubActions(GitHubActions): + """Mock the GitHubActions class.""" + + def has_latest_run_passed( + self, repo_full_name: str, branch_name: str, commit_sha: str, commit_date: str, workflow: str + ) -> str: + return "run_feedback" + + +@pytest.fixture() +def github_actions_service(setup_test) -> GitHubActions: # type: ignore # pylint: disable=unused-argument + """Create a GitHub Actions service instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + GitHub Actions + The GitHub Actions instance. + """ + github_actions = MockGitHubActions() + github_actions.load_defaults() + return github_actions + + +@pytest.fixture() +def jenkins_service(setup_test): # type: ignore # pylint: disable=unused-argument + """Create a Jenkins service instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + Jenkins + The Jenkins instance. + """ + jenkins = Jenkins() + jenkins.load_defaults() + return jenkins + + +@pytest.fixture() +def travis_service(setup_test): # type: ignore # pylint: disable=unused-argument + """Create a Travis CI service instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + Travis + The Travis CI instance. + """ + travis = Travis() + travis.load_defaults() + return travis + + +@pytest.fixture() +def circle_ci_service(setup_test): # type: ignore # pylint: disable=unused-argument + """Create a CircleCI service instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + CircleCI + The CircleCI instance. + """ + circle_ci = CircleCI() + circle_ci.load_defaults() + return circle_ci + + +@pytest.fixture() +def gitlab_ci_service(setup_test): # type: ignore # pylint: disable=unused-argument + """Create a GitlabCI service instance. + + Parameters + ---------- + setup_test + Depends on setup_test fixture. + + Returns + ------- + GitlabCI + The GitlabCI instance. + """ + gitlab_ci = GitLabCI() + gitlab_ci.load_defaults() + return gitlab_ci diff --git a/tests/e2e/expected_results/urllib3/urllib3.json b/tests/e2e/expected_results/urllib3/urllib3.json index d1f990c51..e21085ea4 100644 --- a/tests/e2e/expected_results/urllib3/urllib3.json +++ b/tests/e2e/expected_results/urllib3/urllib3.json @@ -1,6 +1,6 @@ { "metadata": { - "timestamps": "2022-11-09 20:35:52" + "timestamps": "2023-03-17 13:53:32" }, "target": { "info": { @@ -20,28 +20,28 @@ "predicateType": "https://slsa.dev/provenance/v0.2", "subject": [ { - "name": "urllib3-1.26.12-py2.py3-none-any.whl", + "name": "urllib3-1.26.15-py2.py3-none-any.whl", "digest": { - "sha256": "b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997" + "sha256": "aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42" } }, { - "name": "urllib3-1.26.12.tar.gz", + "name": "urllib3-1.26.15.tar.gz", "digest": { - "sha256": "3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e" + "sha256": "8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305" } } ], "predicate": { "builder": { - "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v1.2.0" + "id": "https://github.com/slsa-framework/slsa-github-generator/.github/workflows/generator_generic_slsa3.yml@refs/tags/v1.2.1" }, - "buildType": "https://github.com/slsa-framework/slsa-github-generator@v1", + "buildType": "https://github.com/slsa-framework/slsa-github-generator/generic@v1", "invocation": { "configSource": { - "uri": "git+https://github.com/urllib3/urllib3@refs/tags/1.26.12", + "uri": "git+https://github.com/urllib3/urllib3@refs/tags/1.26.15", "digest": { - "sha1": "a5b29ac1025f9bb30f2c9b756f3b171389c2c039" + "sha1": "95ca35211d23d8baf7646e1f60aa31e3650178a8" }, "entryPoint": ".github/workflows/publish.yml" }, @@ -52,31 +52,31 @@ "github_base_ref": "", "github_event_name": "push", "github_event_payload": { - "after": "c128a2928b839dddeaa6000b21d4f2583e69a164", + "after": "e8b707c05e8c9f93d9681ae9d042fbec92c08e8c", "base_ref": null, "before": "0000000000000000000000000000000000000000", "commits": [], - "compare": "https://github.com/urllib3/urllib3/compare/1.26.12", + "compare": "https://github.com/urllib3/urllib3/compare/1.26.15", "created": true, "deleted": false, "forced": false, "head_commit": { "author": { - "email": "seth.larson@elastic.co", - "name": "Seth Michael Larson", - "username": "sethmlarson" + "email": "68963309+nickwilliams-zaxiom@users.noreply.github.com", + "name": "Nick Williams", + "username": "nickwilliams-zaxiom" }, "committer": { - "email": "sethmichaellarson@gmail.com", - "name": "Seth Michael Larson", - "username": "sethmlarson" + "email": "noreply@github.com", + "name": "GitHub", + "username": "web-flow" }, "distinct": true, - "id": "a5b29ac1025f9bb30f2c9b756f3b171389c2c039", - "message": "Add outputs.hashes to build action", - "timestamp": "2022-08-22T08:14:45-05:00", - "tree_id": "e62873ad556d245d3f06dc46019044979253068b", - "url": "https://github.com/urllib3/urllib3/commit/a5b29ac1025f9bb30f2c9b756f3b171389c2c039" + "id": "95ca35211d23d8baf7646e1f60aa31e3650178a8", + "message": "Release 1.26.15\n\nCo-authored-by: Seth Michael Larson ", + "timestamp": "2023-03-10T17:50:17-06:00", + "tree_id": "2ba76cf12ce448956051758b665287ea40c1a8bf", + "url": "https://github.com/urllib3/urllib3/commit/95ca35211d23d8baf7646e1f60aa31e3650178a8" }, "organization": { "avatar_url": "https://avatars.githubusercontent.com/u/26825299?v=4", @@ -96,7 +96,7 @@ "email": "sethmichaellarson@gmail.com", "name": "sethmlarson" }, - "ref": "refs/tags/1.26.12", + "ref": "refs/tags/1.26.15", "repository": { "allow_forking": true, "archive_url": "https://api.github.com/repos/urllib3/urllib3/{archive_format}{/ref}", @@ -119,14 +119,15 @@ "downloads_url": "https://api.github.com/repos/urllib3/urllib3/downloads", "events_url": "https://api.github.com/repos/urllib3/urllib3/events", "fork": false, - "forks": 989, - "forks_count": 989, + "forks": 1033, + "forks_count": 1033, "forks_url": "https://api.github.com/repos/urllib3/urllib3/forks", "full_name": "urllib3/urllib3", "git_commits_url": "https://api.github.com/repos/urllib3/urllib3/git/commits{/sha}", "git_refs_url": "https://api.github.com/repos/urllib3/urllib3/git/refs{/sha}", "git_tags_url": "https://api.github.com/repos/urllib3/urllib3/git/tags{/sha}", "git_url": "git://github.com/urllib3/urllib3.git", + "has_discussions": false, "has_downloads": true, "has_issues": true, "has_pages": false, @@ -158,8 +159,8 @@ "name": "urllib3", "node_id": "MDEwOlJlcG9zaXRvcnkyNDEwNjc2", "notifications_url": "https://api.github.com/repos/urllib3/urllib3/notifications{?since,all,participating}", - "open_issues": 110, - "open_issues_count": 110, + "open_issues": 120, + "open_issues_count": 120, "organization": "urllib3", "owner": { "avatar_url": "https://avatars.githubusercontent.com/u/26825299?v=4", @@ -185,12 +186,12 @@ }, "private": false, "pulls_url": "https://api.github.com/repos/urllib3/urllib3/pulls{/number}", - "pushed_at": 1661174126, + "pushed_at": 1678492341, "releases_url": "https://api.github.com/repos/urllib3/urllib3/releases{/id}", - "size": 6500, + "size": 7069, "ssh_url": "git@github.com:urllib3/urllib3.git", - "stargazers": 3131, - "stargazers_count": 3131, + "stargazers": 3302, + "stargazers_count": 3302, "stargazers_url": "https://api.github.com/repos/urllib3/urllib3/stargazers", "statuses_url": "https://api.github.com/repos/urllib3/urllib3/statuses/{sha}", "subscribers_url": "https://api.github.com/repos/urllib3/urllib3/subscribers", @@ -205,11 +206,11 @@ "urllib3" ], "trees_url": "https://api.github.com/repos/urllib3/urllib3/git/trees{/sha}", - "updated_at": "2022-08-22T06:37:09Z", + "updated_at": "2023-03-10T11:51:02Z", "url": "https://github.com/urllib3/urllib3", "visibility": "public", - "watchers": 3131, - "watchers_count": 3131, + "watchers": 3302, + "watchers_count": 3302, "web_commit_signoff_required": false }, "sender": { @@ -234,19 +235,19 @@ } }, "github_head_ref": "", - "github_ref": "refs/tags/1.26.12", + "github_ref": "refs/tags/1.26.15", "github_ref_type": "tag", "github_repository_id": "2410676", "github_repository_owner": "urllib3", "github_repository_owner_id": "26825299", "github_run_attempt": "1", - "github_run_id": "2904159641", - "github_run_number": "5", - "github_sha1": "a5b29ac1025f9bb30f2c9b756f3b171389c2c039" + "github_run_id": "4389408691", + "github_run_number": "12", + "github_sha1": "95ca35211d23d8baf7646e1f60aa31e3650178a8" } }, "metadata": { - "buildInvocationID": "2904159641-1", + "buildInvocationID": "4389408691-1", "completeness": { "parameters": true, "environment": false, @@ -256,9 +257,9 @@ }, "materials": [ { - "uri": "git+https://github.com/urllib3/urllib3@refs/tags/1.26.12", + "uri": "git+https://github.com/urllib3/urllib3@refs/tags/1.26.15", "digest": { - "sha1": "a5b29ac1025f9bb30f2c9b756f3b171389c2c039" + "sha1": "95ca35211d23d8baf7646e1f60aa31e3650178a8" } } ] @@ -270,12 +271,50 @@ "checks": { "summary": { "DISABLED": 0, - "FAILED": 5, - "PASSED": 3, + "FAILED": 2, + "PASSED": 6, "SKIPPED": 0, "UNKNOWN": 0 }, "results": [ + { + "check_id": "mcn_build_as_code_1", + "check_description": "The build definition and configuration executed by the build service is verifiably derived from text file definitions stored in a version control system.", + "slsa_requirements": [ + "Build as code - SLSA Level 3" + ], + "justification": [ + { + "The target repository uses build tool pip to deploy": "https://github.com/urllib3/urllib3/blob/87a0ecee6e691fe5ff93cd000c0158deebef763b/.github/workflows/publish.yml", + "The build is triggered by": "https://github.com/urllib3/urllib3/blob/87a0ecee6e691fe5ff93cd000c0158deebef763b/.github/workflows/publish.yml" + }, + "Deploy action: pypa/gh-action-pypi-publish", + "However, could not find a passing workflow run." + ], + "result_type": "PASSED" + }, + { + "check_id": "mcn_build_script_1", + "check_description": "Check if the target repo has a valid build script.", + "slsa_requirements": [ + "Scripted Build - SLSA Level 1" + ], + "justification": [ + "Check mcn_build_script_1 is set to PASSED because mcn_build_service_1 PASSED." + ], + "result_type": "PASSED" + }, + { + "check_id": "mcn_build_service_1", + "check_description": "Check if the target repo has a valid build service.", + "slsa_requirements": [ + "Build service - SLSA Level 2" + ], + "justification": [ + "Check mcn_build_service_1 is set to PASSED because mcn_build_as_code_1 PASSED." + ], + "result_type": "PASSED" + }, { "check_id": "mcn_provenance_available_1", "check_description": "Check whether the target has intoto provenance.", @@ -287,7 +326,7 @@ ], "justification": [ "Found provenance in release assets:", - "urllib3.intoto.jsonl" + "multiple.intoto.jsonl" ], "result_type": "PASSED" }, @@ -301,9 +340,8 @@ "Provenance content - Identifies source code - SLSA Level 2" ], "justification": [ - "Successfully verified level 3 provenance for the following artifacts", - "urllib3-1.26.12-py2.py3-none-any.whl.", - "urllib3-1.26.12.tar.gz." + "Successfully verified level 3: ", + "verify passed: urllib3-1.26.15-py2.py3-none-any.whl,verify passed: urllib3-1.26.15.tar.gz" ], "result_type": "PASSED" }, @@ -320,39 +358,6 @@ ], "result_type": "PASSED" }, - { - "check_id": "mcn_build_as_code_1", - "check_description": "The build definition and configuration executed by the build service is verifiably derived from text file definitions stored in a version control system.", - "slsa_requirements": [ - "Build as code - SLSA Level 3" - ], - "justification": [ - "The target repository does not have a build tool." - ], - "result_type": "FAILED" - }, - { - "check_id": "mcn_build_script_1", - "check_description": "Check if the target repo has a valid build script.", - "slsa_requirements": [ - "Scripted Build - SLSA Level 1" - ], - "justification": [ - "The target repository does not have a build tool." - ], - "result_type": "FAILED" - }, - { - "check_id": "mcn_build_service_1", - "check_description": "Check if the target repo has a valid build service.", - "slsa_requirements": [ - "Build service - SLSA Level 2" - ], - "justification": [ - "The target repository does not have a build service." - ], - "result_type": "FAILED" - }, { "check_id": "mcn_policy_check_1", "check_description": "Check whether the SLSA provenance for the produced artifact conforms to the policy.", @@ -360,7 +365,7 @@ "Policy - SLSA Level 3" ], "justification": [ - "Could not verify policy against the provenance." + "No policy defined for repository." ], "result_type": "FAILED" }, @@ -386,15 +391,15 @@ "unique_dep_repos": 0, "checks_summary": [ { - "check_id": "mcn_policy_check_1", + "check_id": "mcn_trusted_builder_level_three_1", "num_deps_pass": 0 }, { - "check_id": "mcn_provenance_available_1", + "check_id": "mcn_build_script_1", "num_deps_pass": 0 }, { - "check_id": "mcn_build_as_code_1", + "check_id": "mcn_build_service_1", "num_deps_pass": 0 }, { @@ -402,19 +407,19 @@ "num_deps_pass": 0 }, { - "check_id": "mcn_trusted_builder_level_three_1", + "check_id": "mcn_provenance_available_1", "num_deps_pass": 0 }, { - "check_id": "mcn_build_script_1", + "check_id": "mcn_provenance_level_three_1", "num_deps_pass": 0 }, { - "check_id": "mcn_provenance_level_three_1", + "check_id": "mcn_build_as_code_1", "num_deps_pass": 0 }, { - "check_id": "mcn_build_service_1", + "check_id": "mcn_policy_check_1", "num_deps_pass": 0 } ], diff --git a/tests/slsa_analyzer/build_tool/__snapshots__/test_poetry.ambr b/tests/slsa_analyzer/build_tool/__snapshots__/test_poetry.ambr new file mode 100644 index 000000000..24a7fc494 --- /dev/null +++ b/tests/slsa_analyzer/build_tool/__snapshots__/test_poetry.ambr @@ -0,0 +1,15 @@ +# serializer version: 1 +# name: test_get_build_dirs[mock_repo0] + list([ + PosixPath('.'), + ]) +# --- +# name: test_get_build_dirs[mock_repo1] + list([ + ]) +# --- +# name: test_get_build_dirs[mock_repo2] + list([ + PosixPath('.'), + ]) +# --- diff --git a/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/has_poetry_lock/poetry.lock b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/has_poetry_lock/poetry.lock new file mode 100644 index 000000000..e69de29bb diff --git a/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/has_poetry_lock/pyproject.toml b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/has_poetry_lock/pyproject.toml new file mode 100644 index 000000000..19aeac023 --- /dev/null +++ b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/has_poetry_lock/pyproject.toml @@ -0,0 +1,2 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. diff --git a/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry/pyproject.toml b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry/pyproject.toml new file mode 100644 index 000000000..19aeac023 --- /dev/null +++ b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry/pyproject.toml @@ -0,0 +1,2 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. diff --git a/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry_lock/pyproject.toml b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry_lock/pyproject.toml new file mode 100644 index 000000000..07ab95b92 --- /dev/null +++ b/tests/slsa_analyzer/build_tool/mock_repos/poetry_repos/no_poetry_lock/pyproject.toml @@ -0,0 +1,6 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +[tool.poetry] +name = "repo" +version = "0.0.0" diff --git a/tests/slsa_analyzer/build_tool/test_poetry.py b/tests/slsa_analyzer/build_tool/test_poetry.py new file mode 100644 index 000000000..16ddd31f5 --- /dev/null +++ b/tests/slsa_analyzer/build_tool/test_poetry.py @@ -0,0 +1,39 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +"""This module tests the Poetry build functions.""" + +from pathlib import Path + +import pytest + +from macaron.slsa_analyzer.build_tool.poetry import Poetry +from tests.slsa_analyzer.mock_git_utils import prepare_repo_for_testing + + +@pytest.mark.parametrize( + "mock_repo", + [ + Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "has_poetry_lock"), + Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "no_poetry"), + Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "no_poetry_lock"), + ], +) +def test_get_build_dirs(snapshot: list, poetry_tool: Poetry, mock_repo: Path) -> None: + """Test discovering build directories.""" + assert list(poetry_tool.get_build_dirs(str(mock_repo))) == snapshot + + +@pytest.mark.parametrize( + ("mock_repo", "expected_value"), + [ + (Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "has_poetry_lock"), True), + (Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "no_poetry"), False), + (Path(__file__).parent.joinpath("mock_repos", "poetry_repos", "no_poetry_lock"), True), + ], +) +def test_poetry_build_tool(poetry_tool: Poetry, macaron_path: str, mock_repo: str, expected_value: bool) -> None: + """Test the Poetry build tool.""" + base_dir = Path(__file__).parent + repo = prepare_repo_for_testing(mock_repo, macaron_path, base_dir) + assert poetry_tool.is_detected(repo.git_obj.path) == expected_value diff --git a/tests/slsa_analyzer/checks/resources/github/workflow_files/pypi_publish.yaml b/tests/slsa_analyzer/checks/resources/github/workflow_files/pypi_publish.yaml new file mode 100644 index 000000000..320adf281 --- /dev/null +++ b/tests/slsa_analyzer/checks/resources/github/workflow_files/pypi_publish.yaml @@ -0,0 +1,24 @@ +# Copyright (c) 2023 - 2023, Oracle and/or its affiliates. All rights reserved. +# Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. + +name: Publish to PyPI + +on: + # For manual tests. + workflow_dispatch: + push: + tags: + - '*' # triggers only if push new tag version, like `0.8.4`. + +permissions: read-all + +jobs: + publish: + permissions: + actions: read # For the detection of GitHub Actions environment. + id-token: write # For signing. + contents: write # For asset uploads. + uses: pypa/gh-action-pypi-publish@37f50c210e3d2f9450da2cd423303d6a14a6e29f + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} diff --git a/tests/slsa_analyzer/checks/test_build_as_code_check.py b/tests/slsa_analyzer/checks/test_build_as_code_check.py index 5c72d6346..af26ed1e4 100644 --- a/tests/slsa_analyzer/checks/test_build_as_code_check.py +++ b/tests/slsa_analyzer/checks/test_build_as_code_check.py @@ -1,149 +1,228 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module contains the tests for the Build As Code Check.""" import os +from pathlib import Path from unittest.mock import MagicMock +import macaron from macaron.code_analyzer.call_graph import BaseNode, CallGraph +from macaron.parsers.actionparser import parse as parse_action from macaron.parsers.bashparser import BashCommands from macaron.slsa_analyzer.analyze_context import AnalyzeContext from macaron.slsa_analyzer.build_tool.gradle import Gradle from macaron.slsa_analyzer.build_tool.maven import Maven +from macaron.slsa_analyzer.build_tool.pip import Pip +from macaron.slsa_analyzer.build_tool.poetry import Poetry from macaron.slsa_analyzer.checks.build_as_code_check import BuildAsCodeCheck from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.circleci import CircleCI -from macaron.slsa_analyzer.ci_service.github_actions import GitHubActions +from macaron.slsa_analyzer.ci_service.github_actions import GHWorkflowType, GitHubActions, GitHubNode from macaron.slsa_analyzer.ci_service.gitlab_ci import GitLabCI from macaron.slsa_analyzer.ci_service.jenkins import Jenkins from macaron.slsa_analyzer.ci_service.travis import Travis from macaron.slsa_analyzer.specs.ci_spec import CIInfo -from ...macaron_testcase import MacaronTestCase - - -class MockGitHubActions(GitHubActions): - """Mock the GitHubActions class.""" - - def has_latest_run_passed( - self, repo_full_name: str, branch_name: str, commit_sha: str, commit_date: str, workflow: str - ) -> str: - return "run_feedback" - - -class TestBuildAsCodeCheck(MacaronTestCase): - """Test the Build as Code Check.""" - - def test_build_as_code_check(self) -> None: - """Test the Build As Code Check.""" - check = BuildAsCodeCheck() - check_result = CheckResult(justification=[]) # type: ignore - maven = Maven() - maven.load_defaults() - gradle = Gradle() - gradle.load_defaults() - github_actions = MockGitHubActions() - github_actions.load_defaults() - jenkins = Jenkins() - jenkins.load_defaults() - travis = Travis() - travis.load_defaults() - circle_ci = CircleCI() - circle_ci.load_defaults() - gitlab_ci = GitLabCI() - gitlab_ci.load_defaults() - - bash_commands = BashCommands( - caller_path="source_file", CI_path="ci_file", CI_type="github_actions", commands=[[]] - ) - ci_info = CIInfo( - service=github_actions, - bash_commands=[bash_commands], - callgraph=CallGraph(BaseNode(), ""), - provenance_assets=[], - latest_release={}, - provenances=[], - ) - - # The target repo uses Maven build tool but does not deploy artifacts. - use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - use_build_tool.dynamic_data["build_spec"]["tool"] = maven - assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED - - # The target repo uses Gradle build tool but does not deploy artifacts. - use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - use_build_tool.dynamic_data["build_spec"]["tool"] = gradle - assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED - - # The target repo does not use a build tool. - no_build_tool = AnalyzeContext("no_build_tool", os.path.abspath("./"), MagicMock()) - assert check.run_check(no_build_tool, check_result) == CheckResultType.FAILED - - # Use mvn deploy to deploy the artifact. - maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - maven_deploy.dynamic_data["build_spec"]["tool"] = maven - bash_commands["commands"] = [["mvn", "deploy"]] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.PASSED - - # Use the mvn in the local directory to deploy the artifact. - bash_commands["commands"] = [["./mvn", "deploy"]] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.PASSED - - # Use an invalid build command that has mvn. - bash_commands["commands"] = [["mvnblah", "deploy"]] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED - - # Use mvn but do not deploy artifacts. - no_maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - no_maven_deploy.dynamic_data["build_spec"]["tool"] = maven - bash_commands["commands"] = [["mvn", "verify"]] - no_maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(no_maven_deploy, check_result) == CheckResultType.FAILED - - # Use an invalid goal that has deploy keyword. - bash_commands["commands"] = [["mvnb", "deployblah"]] - no_maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(no_maven_deploy, check_result) == CheckResultType.FAILED - - # Use gradle to deploy the artifact. - gradle_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - gradle_deploy.dynamic_data["build_spec"]["tool"] = gradle - bash_commands["commands"] = [["./gradlew", "publishToSonatype"]] - gradle_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(gradle_deploy, check_result) == CheckResultType.PASSED - - # Test Jenkins. - maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - maven_deploy.dynamic_data["build_spec"]["tool"] = maven - ci_info["service"] = jenkins - bash_commands["commands"] = [] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED - - # Test Travis. - maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - maven_deploy.dynamic_data["build_spec"]["tool"] = maven - ci_info["service"] = travis - bash_commands["commands"] = [] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED - - # Test Circle CI. - maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - maven_deploy.dynamic_data["build_spec"]["tool"] = maven - ci_info["service"] = circle_ci - bash_commands["commands"] = [] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED - - # Test GitLab CI. - maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) - maven_deploy.dynamic_data["build_spec"]["tool"] = maven - ci_info["service"] = gitlab_ci - bash_commands["commands"] = [] - maven_deploy.dynamic_data["ci_services"] = [ci_info] - assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + +def test_build_as_code_check( + maven_tool: Maven, + gradle_tool: Gradle, + poetry_tool: Poetry, + pip_tool: Pip, + github_actions_service: GitHubActions, + jenkins_service: Jenkins, + travis_service: Travis, + circle_ci_service: CircleCI, + gitlab_ci_service: GitLabCI, +) -> None: + """Test the Build As Code Check.""" + check = BuildAsCodeCheck() + check_result = CheckResult(justification=[]) # type: ignore + bash_commands = BashCommands(caller_path="source_file", CI_path="ci_file", CI_type="github_actions", commands=[[]]) + ci_info = CIInfo( + service=github_actions_service, + bash_commands=[bash_commands], + callgraph=CallGraph(BaseNode(), ""), + provenance_assets=[], + latest_release={}, + provenances=[], + ) + + # The target repo uses Maven build tool but does not deploy artifacts. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = maven_tool + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + + # The target repo uses Gradle build tool but does not deploy artifacts. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = gradle_tool + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + + # The target repo uses Poetry build tool but does not deploy artifacts. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = poetry_tool + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + + # The target repo uses Pip build tool but does not deploy artifacts. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = pip_tool + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + + # The target repo does not use a build tool. + no_build_tool = AnalyzeContext("no_build_tool", os.path.abspath("./"), MagicMock()) + assert check.run_check(no_build_tool, check_result) == CheckResultType.FAILED + + # Use mvn deploy to deploy the artifact. + maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + bash_commands["commands"] = [["mvn", "deploy"]] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.PASSED + + # Use the mvn in the local directory to deploy the artifact. + bash_commands["commands"] = [["./mvn", "deploy"]] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.PASSED + + # Use an invalid build command that has mvn. + bash_commands["commands"] = [["mvnblah", "deploy"]] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + + # Use mvn but do not deploy artifacts. + no_maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + no_maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + bash_commands["commands"] = [["mvn", "verify"]] + no_maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(no_maven_deploy, check_result) == CheckResultType.FAILED + + # Use an invalid goal that has deploy keyword. + bash_commands["commands"] = [["mvnb", "deployblah"]] + no_maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(no_maven_deploy, check_result) == CheckResultType.FAILED + + # Use gradle to deploy the artifact. + gradle_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + gradle_deploy.dynamic_data["build_spec"]["tool"] = gradle_tool + bash_commands["commands"] = [["./gradlew", "publishToSonatype"]] + gradle_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(gradle_deploy, check_result) == CheckResultType.PASSED + + # Use poetry publish to publish the artifact. + poetry_publish = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + poetry_publish.dynamic_data["build_spec"]["tool"] = poetry_tool + bash_commands["commands"] = [["poetry", "publish"]] + poetry_publish.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(poetry_publish, check_result) == CheckResultType.PASSED + + # Use Poetry but do not deploy artifacts. + no_poetry_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + no_poetry_deploy.dynamic_data["build_spec"]["tool"] = poetry_tool + bash_commands["commands"] = [["poetry", "upload"]] + no_poetry_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(no_maven_deploy, check_result) == CheckResultType.FAILED + + # Use twine upload to deploy the artifact. + twine_upload = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + twine_upload.dynamic_data["build_spec"]["tool"] = pip_tool + bash_commands["commands"] = [["twine", "upload", "dist/*"]] + twine_upload.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(twine_upload, check_result) == CheckResultType.PASSED + + # Use flit publish to deploy the artifact. + flit_publish = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + flit_publish.dynamic_data["build_spec"]["tool"] = pip_tool + bash_commands["commands"] = [["flit", "publish"]] + flit_publish.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(flit_publish, check_result) == CheckResultType.PASSED + + # Test Jenkins. + maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + ci_info["service"] = jenkins_service + bash_commands["commands"] = [] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + + # Test Travis. + maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + ci_info["service"] = travis_service + bash_commands["commands"] = [] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + + # Test Circle CI. + maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + ci_info["service"] = circle_ci_service + bash_commands["commands"] = [] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + + # Test GitLab CI. + maven_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + maven_deploy.dynamic_data["build_spec"]["tool"] = maven_tool + ci_info["service"] = gitlab_ci_service + bash_commands["commands"] = [] + maven_deploy.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(maven_deploy, check_result) == CheckResultType.FAILED + + +def test_gha_workflow_deployment( + pip_tool: Pip, + github_actions_service: GitHubActions, +) -> None: + """Test the use of verified GitHub Actions to deploy.""" + check = BuildAsCodeCheck() + check_result = CheckResult(justification=[]) # type: ignore + ci_info = CIInfo( + service=github_actions_service, + bash_commands=[], + callgraph=CallGraph(BaseNode(), ""), + provenance_assets=[], + latest_release={}, + provenances=[], + ) + + workflows_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "resources", "github", "workflow_files") + + # This Github Actions workflow uses gh-action-pypi-publish to publish the artifact. + gha_deploy = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + gha_deploy.dynamic_data["build_spec"]["tool"] = pip_tool + gha_deploy.dynamic_data["ci_services"] = [ci_info] + + root = GitHubNode(name="root", node_type=GHWorkflowType.NONE, source_path="", parsed_obj={}, caller_path="") + gh_cg = CallGraph(root, "") + workflow_path = os.path.join(workflows_dir, "pypi_publish.yaml") + parsed_obj = parse_action(workflow_path, macaron_path=str(Path(macaron.MACARON_PATH))) + callee = GitHubNode( + name=os.path.basename(workflow_path), + node_type=GHWorkflowType.INTERNAL, + source_path=workflow_path, + parsed_obj=parsed_obj, + caller_path="", + ) + root.add_callee(callee) + github_actions_service.build_call_graph_from_node(callee) + ci_info["callgraph"] = gh_cg + assert check.run_check(gha_deploy, check_result) == CheckResultType.PASSED + + # This Github Actions workflow is not using a trusted action to publish the artifact. + root = GitHubNode(name="root", node_type=GHWorkflowType.NONE, source_path="", parsed_obj={}, caller_path="") + gh_cg = CallGraph(root, "") + workflow_path = os.path.join(workflows_dir, "pypi_publish_blah.yaml") + parsed_obj = parse_action(workflow_path, macaron_path=str(Path(macaron.MACARON_PATH))) + callee = GitHubNode( + name=os.path.basename(workflow_path), + node_type=GHWorkflowType.INTERNAL, + source_path=workflow_path, + parsed_obj=parsed_obj, + caller_path="", + ) + root.add_callee(callee) + github_actions_service.build_call_graph_from_node(callee) + ci_info["callgraph"] = gh_cg + assert check.run_check(gha_deploy, check_result) == CheckResultType.FAILED diff --git a/tests/slsa_analyzer/checks/test_build_service_check.py b/tests/slsa_analyzer/checks/test_build_service_check.py index d4d4cfbd2..3998225c1 100644 --- a/tests/slsa_analyzer/checks/test_build_service_check.py +++ b/tests/slsa_analyzer/checks/test_build_service_check.py @@ -1,4 +1,4 @@ -# Copyright (c) 2022 - 2022, Oracle and/or its affiliates. All rights reserved. +# Copyright (c) 2022 - 2023, Oracle and/or its affiliates. All rights reserved. # Licensed under the Universal Permissive License v 1.0 as shown at https://oss.oracle.com/licenses/upl/. """This module contains the tests for the Build Service Check.""" @@ -11,6 +11,8 @@ from macaron.slsa_analyzer.analyze_context import AnalyzeContext from macaron.slsa_analyzer.build_tool.gradle import Gradle from macaron.slsa_analyzer.build_tool.maven import Maven +from macaron.slsa_analyzer.build_tool.pip import Pip +from macaron.slsa_analyzer.build_tool.poetry import Poetry from macaron.slsa_analyzer.checks.build_service_check import BuildServiceCheck from macaron.slsa_analyzer.checks.check_result import CheckResult, CheckResultType from macaron.slsa_analyzer.ci_service.circleci import CircleCI @@ -43,6 +45,10 @@ def test_build_service_check(self) -> None: maven.load_defaults() gradle = Gradle() gradle.load_defaults() + poetry = Poetry() + poetry.load_defaults() + pip = Pip() + pip.load_defaults() github_actions = MockGitHubActions() github_actions.load_defaults() jenkins = Jenkins() @@ -76,6 +82,16 @@ def test_build_service_check(self) -> None: use_build_tool.dynamic_data["build_spec"]["tool"] = gradle assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + # The target repo uses Poetry build tool but does not use a service. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = poetry + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + + # The target repo uses Pip build tool but does not use a service. + use_build_tool = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + use_build_tool.dynamic_data["build_spec"]["tool"] = pip + assert check.run_check(use_build_tool, check_result) == CheckResultType.FAILED + # The target repo does not use a build tool. no_build_tool = AnalyzeContext("no_build_tool", os.path.abspath("./"), MagicMock()) assert check.run_check(no_build_tool, check_result) == CheckResultType.FAILED @@ -116,6 +132,48 @@ def test_build_service_check(self) -> None: gradle_build_ci.dynamic_data["ci_services"] = [ci_info] assert check.run_check(gradle_build_ci, check_result) == CheckResultType.PASSED + # Use poetry in CI to build the artifact. + poetry_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + poetry_build_ci.dynamic_data["build_spec"]["tool"] = poetry + bash_commands["commands"] = [["poetry", "build"]] + poetry_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(poetry_build_ci, check_result) == CheckResultType.PASSED + + # Use pip in CI to build the artifact. + pip_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + pip_build_ci.dynamic_data["build_spec"]["tool"] = pip + bash_commands["commands"] = [["pip", "install"]] + pip_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(pip_build_ci, check_result) == CheckResultType.PASSED + + # Use flit in CI to build the artifact. + flit_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + flit_build_ci.dynamic_data["build_spec"]["tool"] = pip + bash_commands["commands"] = [["flit", "build"]] + flit_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(flit_build_ci, check_result) == CheckResultType.PASSED + + # Use pip as a module in CI to build the artifact. + pip_interpreter_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + pip_interpreter_build_ci.dynamic_data["build_spec"]["tool"] = pip + bash_commands["commands"] = [["python", "-m", "pip", "install"]] + pip_interpreter_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(pip_interpreter_build_ci, check_result) == CheckResultType.PASSED + + # Use pip as a module incorrectly in CI to build the artifact. + no_pip_interpreter_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + no_pip_interpreter_build_ci.dynamic_data["build_spec"]["tool"] = pip + bash_commands["commands"] = [["python", "pip", "install"]] + no_pip_interpreter_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(no_pip_interpreter_build_ci, check_result) == CheckResultType.FAILED + + # Use pip as a module in CI with invalid goal to build the artifact. + no_pip_interpreter_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) + no_pip_interpreter_build_ci.dynamic_data["build_spec"]["tool"] = pip + bash_commands["commands"] = [["python", "-m", "pip", "installl"]] + no_pip_interpreter_build_ci.dynamic_data["ci_services"] = [ci_info] + assert check.run_check(no_pip_interpreter_build_ci, check_result) == CheckResultType.FAILED + # Test Jenkins. maven_build_ci = AnalyzeContext("use_build_tool", os.path.abspath("./"), MagicMock()) maven_build_ci.dynamic_data["build_spec"]["tool"] = maven