diff --git a/control/control.go b/control/control.go index b392c72ee7ce..15aba411dedc 100644 --- a/control/control.go +++ b/control/control.go @@ -35,6 +35,7 @@ import ( "github.com/moby/buildkit/solver/llbsolver" "github.com/moby/buildkit/solver/llbsolver/cdidevices" "github.com/moby/buildkit/solver/llbsolver/proc" + provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types" "github.com/moby/buildkit/solver/pb" "github.com/moby/buildkit/util/bklog" "github.com/moby/buildkit/util/db" @@ -508,7 +509,19 @@ func (c *Controller) Solve(ctx context.Context, req *controlapi.SolveRequest) (* } if attrs, ok := attests["provenance"]; ok { - procs = append(procs, proc.ProvenanceProcessor(attrs)) + var slsaVersion provenancetypes.ProvenanceSLSA + params := make(map[string]string) + for k, v := range attrs { + if k == "version" { + slsaVersion = provenancetypes.ProvenanceSLSA(v) + if err := slsaVersion.Validate(); err != nil { + return nil, err + } + } else { + params[k] = v + } + } + procs = append(procs, proc.ProvenanceProcessor(slsaVersion, params)) } resp, err := c.solver.Solve(ctx, req.Ref, req.Session, frontend.SolveRequest{ diff --git a/docs/attestations/slsa-definitions.md b/docs/attestations/slsa-definitions.md index 08b692df3d3b..3356048ff9d8 100644 --- a/docs/attestations/slsa-definitions.md +++ b/docs/attestations/slsa-definitions.md @@ -2,22 +2,436 @@ title: SLSA definitions --- -BuildKit supports the [creation of SLSA Provenance](./slsa-provenance.md) for builds that -it runs. +BuildKit supports the [creation of SLSA Provenance](./slsa-provenance.md) for +builds that it runs. The provenance format generated by BuildKit is defined by the -[SLSA Provenance format](https://slsa.dev/provenance/v0.2). +SLSA Provenance format (supports both [v0.2](https://slsa.dev/spec/v0.2/provenance) +and [v1](https://slsa.dev/spec/v1.1/provenance)). This page describes how BuildKit populate each field, and whether the field gets included when you generate attestations `mode=min` and `mode=max`. -## `builder.id` +## SLSA v1 -Corresponds to [SLSA `builder.id`](https://slsa.dev/provenance/v0.2#builder.id). +### `buildDefinition.buildType` + +* Ref: https://slsa.dev/spec/v1.1/provenance#buildType +* Included with `mode=min` and `mode=max`. + +The `buildDefinition.buildType` field is set to `https://github.com/moby/buildkit/blob/master/docs/attestations/slsa-definitions.md` +and can be used to determine the structure of the provenance content. + +```json + "buildDefinition": { + "buildType": "https://github.com/moby/buildkit/blob/master/docs/attestations/slsa-definitions.md", + ... + } +``` + +### `buildDefinition.externalParameters.configSource` + +* Ref: https://slsa.dev/spec/v1.1/provenance#externalParameters +* Included with `mode=min` and `mode=max`. + +Describes the config that initialized the build. + +```json + "buildDefinition": { + "externalParameters": { + "configSource": { + "uri": "https://github.com/moby/buildkit.git#refs/tags/v0.11.0", + "digest": { + "sha1": "4b220de5058abfd01ff619c9d2ff6b09a049bea0" + }, + "path": "Dockerfile" + }, + ... + }, + } +``` + +For builds initialized from a remote context, like a Git or HTTP URL, this +object defines the context URL and its immutable digest in the `uri` and +`digest` fields. For builds using a local frontend, such as a Dockerfile, the +`path` field defines the path for the frontend file that initialized the build +(`filename` frontend option). + +### `buildDefinition.externalParameters.request` + +* Ref: https://slsa.dev/spec/v1.1/provenance#externalParameters +* Partially included with `mode=min`. + +Describes build inputs passed to the build. + +```json + "buildDefinition": { + "externalParameters": { + "request": { + "frontend": "gateway.v0", + "args": { + "build-arg:BUILDKIT_CONTEXT_KEEP_GIT_DIR": "1", + "label:FOO": "bar", + "source": "docker/dockerfile-upstream:master", + "target": "release" + }, + "secrets": [ + { + "id": "GIT_AUTH_HEADER", + "optional": true + }, + ... + ], + "ssh": [], + "locals": [] + }, + ... + }, + } +``` + +The following fields are included with both `mode=min` and `mode=max`: + +- `locals` lists any local sources used in the build, including the build + context and frontend file. +- `frontend` defines type of BuildKit frontend used for the build. Currently, + this can be `dockerfile.v0` or `gateway.v0`. +- `args` defines the build arguments passed to the BuildKit frontend. + + The keys inside the `args` object reflect the options as BuildKit receives + them. For example, `build-arg` and `label` prefixes are used for build + arguments and labels, and `target` key defines the target stage that was + built. The `source` key defines the source image for the Gateway frontend, if + used. + +The following fields are only included with `mode=max`: + +- `secrets` defines secrets used during the build. Note that actual secret + values are not included. +- `ssh` defines the ssh forwards used during the build. + +### `buildDefinition.internalParameters.buildConfig` + +* Ref: https://slsa.dev/spec/v1.1/provenance#internalParameters +* Only included with `mode=max`. + +Defines the build steps performed during the build. + +BuildKit internally uses LLB definition to execute the build steps. The LLB +definition of the build steps is defined in the +`buildDefinition.internalParameters.buildConfig.llbDefinition` field. + +Each LLB step is the JSON definition of the +[LLB ProtoBuf API](https://github.com/moby/buildkit/blob/v0.10.0/solver/pb/ops.proto). +The dependencies for a vertex in the LLB graph can be found in the `inputs` +field for every step. + +```json + "buildDefinition": { + "internalParameters": { + "buildConfig": { + "llbDefinition": [ + { + "id": "step0", + "op": { + "Op": { + "exec": { + "meta": { + "args": [ + "/bin/sh", + "-c", + "go build ." + ], + "env": [ + "PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin", + "GOPATH=/go", + "GOFLAGS=-mod=vendor", + ], + "cwd": "/src", + }, + "mounts": [...] + } + }, + "platform": {...}, + }, + "inputs": [ + "step8:0", + "step2:0", + ] + }, + ... + ] + }, + } + } +``` + +### `buildDefinition.internalParameters.builderPlatform` + +* Ref: https://slsa.dev/spec/v1.1/provenance#internalParameters +* Included with `mode=min` and `mode=max`. + +```json + "buildDefinition": { + "internalParameters": { + "builderPlatform": "linux/amd64" + ... + }, + } +``` + +BuildKit sets the `builderPlatform` of the build machine. Note that this is not +necessarily the platform of the build result that can be determined from the +`in-toto` subject field. + +### `buildDefinition.resolvedDependencies` + +* Ref: https://slsa.dev/spec/v1.1/provenance#resolvedDependencies +* Included with `mode=min` and `mode=max`. + +Defines all the external artifacts that were part of the build. The value +depends on the type of artifact: + +- The URL of Git repositories containing source code for the image +- HTTP URLs if you are building from a remote tarball, or that was included + using an `ADD` command in Dockerfile +- Any Docker images used during the build + +The URLs to the Docker images will be in +[Package URL](https://github.com/package-url/purl-spec) format. + +All the build materials will include the immutable checksum of the artifact. +When building from a mutable tag, you can use the digest information to +determine if the artifact has been updated compared to when the build ran. + +```json + "buildDefinition": { + "resolvedDependencies": [ + { + "uri": "pkg:docker/alpine@3.17?platform=linux%2Famd64", + "digest": { + "sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" + } + }, + { + "uri": "https://github.com/moby/buildkit.git#refs/tags/v0.11.0", + "digest": { + "sha1": "4b220de5058abfd01ff619c9d2ff6b09a049bea0" + } + }, + ... + ], + ... + } +``` + +### `runDetails.builder.id` + +* Ref: https://slsa.dev/spec/v1.1/provenance#builder.id +* Included with `mode=min` and `mode=max`. + +The field is set to the URL of the build, if available. + +```json + "runDetails": { + "builder": { + "id": "https://github.com/docker/buildx/actions/runs/3709599520" + ... + }, + ... + } +``` + +> [!NOTE] +> This value can be set using the `builder-id` attestation parameter. + +### `runDetails.metadata.invocationID` + +* Ref: https://slsa.dev/spec/v1.1/provenance#invocationId +* Included with `mode=min` and `mode=max`. + +Unique identifier for the build invocation. When building a multi-platform image +with a single build request, this value will be the shared by all the platform +versions of the image. + +```json + "runDetails": { + "metadata": { + "invocationID": "rpv7a389uzil5lqmrgwhijwjz", + ... + }, + ... + } +``` + +### `runDetails.metadata.startedOn` + +* Ref: https://slsa.dev/spec/v1.1/provenance#startedOn +* Included with `mode=min` and `mode=max`. + +Timestamp when the build started. + +```json + "runDetails": { + "metadata": { + "startedOn": "2021-11-17T15:00:00Z", + ... + }, + ... + } +``` + +### `runDetails.metadata.finishedOn` + +* Ref: https://slsa.dev/spec/v1.1/provenance#finishedOn +* Included with `mode=min` and `mode=max`. + +Timestamp when the build finished. + +```json + "runDetails": { + "metadata": { + "finishedOn": "2021-11-17T15:01:00Z", + ... + }, + } +``` + +### `runDetails.metadata.buildkit_metadata` + +* Ref: https://slsa.dev/spec/v1.1/provenance#extension-fields +* Partially included with `mode=min`. + +This extension field defines BuildKit-specific additional metadata that is not +part of the SLSA provenance spec. + +```json + "runDetails": { + "metadata": { + "buildkit_metadata": { + "source": {...}, + "layers": {...}, + "vcs": {...}, + }, + ... + }, + } +``` + +#### `source` + +Only included with `mode=max`. + +Defines a source mapping of LLB build steps, defined in the +`buildDefinition.internalParameters.buildConfig.llbDefinition` field, to their +original source code (for example, Dockerfile commands). The `source.locations` +field contains the ranges of all the Dockerfile commands ran in an LLB step. +`source.infos` array contains the source code itself. This mapping is present +if the BuildKit frontend provided it when creating the LLB definition. + +#### `layers` + +Only included with `mode=max`. + +Defines the layer mapping of LLB build step mounts defined in +`buildDefinition.internalParameters.buildConfig.llbDefinition` to the OCI +descriptors of equivalent layers. This mapping is present if the layer data was +available, usually when attestation is for an image or if the build step pulled +in image data as part of the build. + +#### `vcs` Included with `mode=min` and `mode=max`. -The `builder.id` field is set to the URL of the build, if available. +Defines optional metadata for the version control system used for the build. If +a build uses a remote context from Git repository, BuildKit extracts the details +of the version control system automatically and displays it in the +`buildDefinition.externalParameters.configSource` field. But if the build uses +a source from a local directory, the VCS information is lost even if the +directory contained a Git repository. In this case, the build client can send +additional `vcs:source` and `vcs:revision` build options and BuildKit will add +them to the provenance attestations as extra metadata. Note that, contrary to +the `buildDefinition.externalParameters.configSource` field, BuildKit doesn't +verify the `vcs` values, and as such they can't be trusted and should only be +used as a metadata hint. + +### `runDetails.metadata.buildkit_hermetic` + +* Ref: https://slsa.dev/spec/v1.1/provenance#extension-fields +* Included with `mode=min` and `mode=max`. + +This extension field is set to true if the build was hermetic and did not access +the network. In Dockerfiles, a build is hermetic if it does not use `RUN` +commands or disables network with `--network=none` flag. + +```json + "runDetails": { + "metadata": { + "buildkit_hermetic": true, + ... + }, + } +``` + +### `runDetails.metadata.buildkit_completeness` + +* Ref: https://slsa.dev/spec/v1.1/provenance#extension-fields +* Included with `mode=min` and `mode=max`. + +This extension field defines if the provenance information is complete. It is +similar to `metadata.completeness` field in SLSA v0.2. + +`buildkit_completeness.request` is true if all the build arguments are included +in the `buildDefinition.externalParameters.request` field. When building with +`min` mode, the build arguments are not included in the provenance information +and request is not complete. Request is also not complete on direct LLB builds +that did not use a frontend. + +`buildkit_completeness.resolvedDependencies` is true if +`buildDefinition.resolvedDependencies` field includes all the dependencies of +the build. When building from un-tracked source in a local directory, the +dependencies are not complete, while when building from a remote Git repository +all dependencies can be tracked by BuildKit and +`buildkit_completeness.resolvedDependencies` is true. + +```json + "runDetails": { + "metadata": { + "buildkit_completeness": { + "request": true, + "resolvedDependencies": true + }, + ... + }, + } +``` + +### `runDetails.metadata.buildkit_reproducible` + +* Ref: https://slsa.dev/spec/v1.1/provenance#extension-fields +* Included with `mode=min` and `mode=max`. + +This extension field defines if the build result is supposed to be byte-by-byte +reproducible. It is similar to `metadata.reproducible` field in SLSA v0.2. This +value can be set by the user with the `reproducible=true` attestation parameter. + +```json + "runDetails": { + "metadata": { + "buildkit_reproducible": false, + ... + }, + } +``` + +## SLSA v0.2 + +### `builder.id` + +* Ref: https://slsa.dev/spec/v0.2/provenance#builder.id +* Included with `mode=min` and `mode=max`. + +The field is set to the URL of the build, if available. ```json "builder": { @@ -25,15 +439,15 @@ The `builder.id` field is set to the URL of the build, if available. }, ``` -This value can be set using the `builder-id` attestation parameter. +> [!NOTE] +> This value can be set using the `builder-id` attestation parameter. ## `buildType` -Corresponds to [SLSA `buildType`](https://slsa.dev/provenance/v0.2#buildType). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#buildType +* Included with `mode=min` and `mode=max`. -The `buildType` field is set to `https://mobyproject.org/buildkit@v1` can be +The `buildType` field is set to `https://mobyproject.org/buildkit@v1` and can be used to determine the structure of the provenance content. ```json @@ -42,9 +456,8 @@ used to determine the structure of the provenance content. ## `invocation.configSource` -Corresponds to [SLSA `invocation.configSource`](https://slsa.dev/provenance/v0.2#invocation.configSource). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#invocation.configSource +* Included with `mode=min` and `mode=max`. Describes the config that initialized the build. @@ -62,15 +475,15 @@ Describes the config that initialized the build. ``` For builds initialized from a remote context, like a Git or HTTP URL, this -object defines the context URL and its immutable digest in the `uri` and `digest` fields. -For builds using a local frontend, such as a Dockerfile, the `entryPoint` field defines the path -for the frontend file that initialized the build (`filename` frontend option). +object defines the context URL and its immutable digest in the `uri` and +`digest` fields. For builds using a local frontend, such as a Dockerfile, the +`entryPoint` field defines the path for the frontend file that initialized the +build (`filename` frontend option). ## `invocation.parameters` -Corresponds to [SLSA `invocation.parameters`](https://slsa.dev/provenance/v0.2#invocation.parameters). - -Partially included with `mode=min`. +* Ref: https://slsa.dev/spec/v0.2/provenance#invocation.parameters +* Partially included with `mode=min`. Describes build inputs passed to the build. @@ -120,9 +533,8 @@ The following fields are only included with `mode=max`: ## `invocation.environment` -Corresponds to [SLSA `invocation.environment`](https://slsa.dev/provenance/v0.2#invocation.environment). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#invocation.environment +* Included with `mode=min` and `mode=max`. ```json "invocation": { @@ -139,9 +551,8 @@ can be determined from the `in-toto` subject field. ## `materials` -Corresponds to [SLSA `materials`](https://slsa.dev/provenance/v0.2#materials). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#materials +* Included with `mode=min` and `mode=max`. Defines all the external artifacts that were part of the build. The value depends on the type of artifact: @@ -178,9 +589,8 @@ determine if the artifact has been updated compared to when the build ran. ## `buildConfig` -Corresponds to [SLSA `buildConfig`](https://slsa.dev/provenance/v0.2#buildConfig). - -Only included with `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#buildConfig +* Only included with `mode=max`. Defines the build steps performed during the build. @@ -230,9 +640,8 @@ field for every step. ## `metadata.buildInvocationId` -Corresponds to [SLSA `metadata.buildInvocationId`](https://slsa.dev/provenance/v0.2#metadata.buildIncocationId). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#buildInvocationId +* Included with `mode=min` and `mode=max`. Unique identifier for the build invocation. When building a multi-platform image with a single build request, this value will be the shared by all the platform @@ -247,9 +656,8 @@ versions of the image. ## `metadata.buildStartedOn` -Corresponds to [SLSA `metadata.buildStartedOn`](https://slsa.dev/provenance/v0.2#metadata.buildStartedOn). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#buildStartedOn +* Included with `mode=min` and `mode=max`. Timestamp when the build started. @@ -262,9 +670,8 @@ Timestamp when the build started. ## `metadata.buildFinishedOn` -Corresponds to [SLSA `metadata.buildFinishedOn`](https://slsa.dev/provenance/v0.2#metadata.buildFinishedOn). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#buildFinishedOn +* Included with `mode=min` and `mode=max`. Timestamp when the build finished. @@ -277,17 +684,16 @@ Timestamp when the build finished. ## `metadata.completeness` -Corresponds to [SLSA `metadata.completeness`](https://slsa.dev/provenance/v0.2#metadata.completeness). - -Included with `mode=min` and `mode=max`. +* Ref: https://slsa.dev/spec/v0.2/provenance#metadata.completeness +* Included with `mode=min` and `mode=max`. Defines if the provenance information is complete. `completeness.parameters` is true if all the build arguments are included in the -`invocation.parameters` field. When building with `min` mode, the build -arguments are not included in the provenance information and parameters are not -complete. Parameters are also not complete on direct LLB builds that did not use -a frontend. +`parameters` field. When building with `min` mode, the build arguments are not +included in the provenance information and parameters are not complete. +Parameters are also not complete on direct LLB builds that did not use a +frontend. `completeness.environment` is always true for BuildKit builds. @@ -310,7 +716,8 @@ is true. ## `metadata.reproducible` -Corresponds to [SLSA `metadata.reproducible`](https://slsa.dev/provenance/v0.2#metadata.reproducible). +* Ref: https://slsa.dev/spec/v0.2/provenance#metadata.reproducible +* Included with `mode=min` and `mode=max`. Defines if the build result is supposed to be byte-by-byte reproducible. This value can be set by the user with the `reproducible=true` attestation parameter. @@ -389,227 +796,3 @@ repository. In this case, the build client can send additional `vcs:source` and attestations as extra metadata. Note that, contrary to the `invocation.configSource` field, BuildKit doesn't verify the `vcs` values, and as such they can't be trusted and should only be used as a metadata hint. - -## Output - -To inspect the provenance that was generated and attached to a container image, -you can use the `docker buildx imagetools` command to inspect the image in a -registry. Inspecting the attestation displays the format described in the -[attestation storage specification](./attestation-storage.md). - -For example, inspecting a simple Docker image based on `alpine:latest` results -in a provenance attestation similar to the following, for a `mode=min` build: - -```json -{ - "_type": "https://in-toto.io/Statement/v0.1", - "predicateType": "https://slsa.dev/provenance/v0.2", - "subject": [ - { - "name": "pkg:docker//@?platform=", - "digest": { - "sha256": "e8275b2b76280af67e26f068e5d585eb905f8dfd2f1918b3229db98133cb4862" - } - } - ], - "predicate": { - "builder": { - "id": "" - }, - "buildType": "https://mobyproject.org/buildkit@v1", - "materials": [ - { - "uri": "pkg:docker/alpine@latest?platform=linux%2Famd64", - "digest": { - "sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" - } - } - ], - "invocation": { - "configSource": { - "entryPoint": "Dockerfile" - }, - "parameters": { - "frontend": "dockerfile.v0", - "args": {}, - "locals": [ - { - "name": "context" - }, - { - "name": "dockerfile" - } - ] - }, - "environment": { - "platform": "linux/amd64" - } - }, - "metadata": { - "buildInvocationID": "yirbp1aosi1vqjmi3z6bc75nb", - "buildStartedOn": "2022-12-08T11:48:59.466513707Z", - "buildFinishedOn": "2022-12-08T11:49:01.256820297Z", - "reproducible": false, - "completeness": { - "parameters": true, - "environment": true, - "materials": false - }, - "https://mobyproject.org/buildkit@v1#metadata": {} - } - } -} -``` - -For a similar build, but with `mode=max`: - -```json -{ - "_type": "https://in-toto.io/Statement/v0.1", - "predicateType": "https://slsa.dev/provenance/v0.2", - "subject": [ - { - "name": "pkg:docker//@?platform=", - "digest": { - "sha256": "e8275b2b76280af67e26f068e5d585eb905f8dfd2f1918b3229db98133cb4862" - } - } - ], - "predicate": { - "builder": { - "id": "" - }, - "buildType": "https://mobyproject.org/buildkit@v1", - "materials": [ - { - "uri": "pkg:docker/alpine@latest?platform=linux%2Famd64", - "digest": { - "sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" - } - } - ], - "invocation": { - "configSource": { - "entryPoint": "Dockerfile" - }, - "parameters": { - "frontend": "dockerfile.v0", - "args": {}, - "locals": [ - { - "name": "context" - }, - { - "name": "dockerfile" - } - ] - }, - "environment": { - "platform": "linux/amd64" - } - }, - "buildConfig": { - "llbDefinition": [ - { - "id": "step0", - "op": { - "Op": { - "source": { - "identifier": "docker-image://docker.io/library/alpine:latest@sha256:8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" - } - }, - "platform": { - "Architecture": "amd64", - "OS": "linux" - }, - "constraints": {} - } - }, - { - "id": "step1", - "op": { - "Op": null - }, - "inputs": ["step0:0"] - } - ] - }, - "metadata": { - "buildInvocationID": "46ue2x93k3xj5l463dektwldw", - "buildStartedOn": "2022-12-08T11:50:54.953375437Z", - "buildFinishedOn": "2022-12-08T11:50:55.447841328Z", - "reproducible": false, - "completeness": { - "parameters": true, - "environment": true, - "materials": false - }, - "https://mobyproject.org/buildkit@v1#metadata": { - "source": { - "locations": { - "step0": { - "locations": [ - { - "ranges": [ - { - "start": { - "line": 1 - }, - "end": { - "line": 1 - } - } - ] - } - ] - } - }, - "infos": [ - { - "filename": "Dockerfile", - "data": "RlJPTSBhbHBpbmU6bGF0ZXN0Cg==", - "llbDefinition": [ - { - "id": "step0", - "op": { - "Op": { - "source": { - "identifier": "local://dockerfile", - "attrs": { - "local.differ": "none", - "local.followpaths": "[\"Dockerfile\",\"Dockerfile.dockerignore\",\"dockerfile\"]", - "local.session": "q2jnwdkas0i0iu4knchd92jaz", - "local.sharedkeyhint": "dockerfile" - } - } - }, - "constraints": {} - } - }, - { - "id": "step1", - "op": { - "Op": null - }, - "inputs": ["step0:0"] - } - ] - } - ] - }, - "layers": { - "step0:0": [ - [ - { - "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", - "digest": "sha256:c158987b05517b6f2c5913f3acef1f2182a32345a304fe357e3ace5fadcad715", - "size": 3370706 - } - ] - ] - } - } - } - } -} -``` diff --git a/docs/attestations/slsa-provenance.md b/docs/attestations/slsa-provenance.md index bd77d32d4c4c..e74a76892028 100644 --- a/docs/attestations/slsa-provenance.md +++ b/docs/attestations/slsa-provenance.md @@ -15,7 +15,7 @@ Provenance attestations created by BuildKit include details such as: - Descriptions of all build steps, with their source and layer mappings. Provenance generated by BuildKit is wrapped inside [in-toto attestations](https://github.com/in-toto/attestation) -in the [SLSA Provenance format](https://slsa.dev/provenance/v0.2). +in the SLSA Provenance format (supports both [v0.2](https://slsa.dev/spec/v0.2/provenance) and [v1](https://slsa.dev/spec/v1.1/provenance)). For more information about how the attestation fields get generated, see [SLSA definitions](./slsa-definitions.md). @@ -51,13 +51,14 @@ and exported with your build result, in the root directory. ## Parameters -| Parameter | Type | Default | Description | -| -------------- | -------------- | ---------------- | ----------------------------------------------------------------------------------------------------------- | -| `mode` | `min`,`max` | `max` | Configures the amount of provenance to be generated. See [mode](#mode) | -| `builder-id` | String | | Explicitly set SLSA [`builder.id`](https://slsa.dev/provenance/v0.2#builder.id) field | -| `filename` | String | `provenance.json` | Set filename for provenance attestation when exported with `local` or `tar` exporter | -| `reproducible` | `true`,`false` | `false` | Explicitly set SLSA [`metadata.reproducible`](https://slsa.dev/provenance/v0.2#metadata.reproducible) field | -| `inline-only` | `true`,`false` | `false` | Only embed provenance into exporters that support inline content. See [inline-only](#inline-only) | +| Parameter | Type | Default | Description | +|----------------|----------------|-------------------|---------------------------------------------------------------------------------------------------| +| `mode` | `min`,`max` | `max` | Configures the amount of provenance to be generated. See [mode](#mode) | +| `builder-id` | String | | Explicitly set SLSA Builder ID field. See [builder-id](#builder-id) | +| `filename` | String | `provenance.json` | Set filename for provenance attestation when exported with `local` or `tar` exporter | +| `reproducible` | `true`,`false` | `false` | Explicitly marked as reproducible. See [reproducible](#reproducible) | +| `inline-only` | `true`,`false` | `false` | Only embed provenance into exporters that support inline content. See [inline-only](#inline-only) | +| `version` | String | `v0.2` | SLSA provenance version to use (`v0.2` or `v1`) | ### `mode` @@ -89,6 +90,24 @@ about secrets - these builds should be refactored to prefer passing hidden values through secrets wherever possible to prevent unnecessary information leakage. +### `builder-id` + +Depends on the SLSA `version` used: + +| SLSA version | Field | +|--------------|-----------------------------------------------------------------------------| +| `v1` | [`runDetails.builder.id`](https://slsa.dev/spec/v1.1/provenance#builder.id) | +| `v0.2` | [`builder.id`](https://slsa.dev/spec/v0.2/provenance#builder.id) | + +### `reproducible` + +Depends on the SLSA `version` used: + +| SLSA version | Field | +|--------------|----------------------------------------------------------------------------------------| +| `v1` | [`runDetails.metadata.buildkit_reproducible` | +| `v0.2` | [`metadata.reproducible`](https://slsa.dev/spec/v0.2/provenance#metadata.reproducible) | + ### `inline-only` By default, provenance is by included in all exporters that support @@ -99,3 +118,232 @@ specifically only exporters that produce container images. Since other exporters produce attestations into separate files, in their filesystems, you may not want to include the provenance in these cases. +## Output + +To inspect the provenance that was generated and attached to a container image, +you can use the `docker buildx imagetools` command to inspect the image in a +registry. Inspecting the attestation displays the format described in the +[attestation storage specification](./attestation-storage.md). + +For example, inspecting a simple Docker image based on `alpine:latest`: + +```dockerfile +FROM alpine:latest +``` + +Results in a provenance attestation similar to the following, for a `mode=min` +build: + +```json +{ + "_type": "https://in-toto.io/Statement/v0.1", + "predicateType": "https://slsa.dev/provenance/v0.2", + "subject": [ + { + "name": "pkg:docker//@?platform=", + "digest": { + "sha256": "e8275b2b76280af67e26f068e5d585eb905f8dfd2f1918b3229db98133cb4862" + } + } + ], + "predicate": { + "builder": { + "id": "" + }, + "buildType": "https://mobyproject.org/buildkit@v1", + "materials": [ + { + "uri": "pkg:docker/alpine@latest?platform=linux%2Famd64", + "digest": { + "sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" + } + } + ], + "invocation": { + "configSource": { + "entryPoint": "Dockerfile" + }, + "parameters": { + "frontend": "dockerfile.v0", + "args": {}, + "locals": [ + { + "name": "context" + }, + { + "name": "dockerfile" + } + ] + }, + "environment": { + "platform": "linux/amd64" + } + }, + "metadata": { + "buildInvocationID": "yirbp1aosi1vqjmi3z6bc75nb", + "buildStartedOn": "2022-12-08T11:48:59.466513707Z", + "buildFinishedOn": "2022-12-08T11:49:01.256820297Z", + "reproducible": false, + "completeness": { + "parameters": true, + "environment": true, + "materials": false + }, + "https://mobyproject.org/buildkit@v1#metadata": {} + } + } +} +``` + +For a similar build, but with `mode=max`: + +```json +{ + "_type": "https://in-toto.io/Statement/v0.1", + "predicateType": "https://slsa.dev/provenance/v0.2", + "subject": [ + { + "name": "pkg:docker//@?platform=", + "digest": { + "sha256": "e8275b2b76280af67e26f068e5d585eb905f8dfd2f1918b3229db98133cb4862" + } + } + ], + "predicate": { + "builder": { + "id": "" + }, + "buildType": "https://mobyproject.org/buildkit@v1", + "materials": [ + { + "uri": "pkg:docker/alpine@latest?platform=linux%2Famd64", + "digest": { + "sha256": "8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" + } + } + ], + "invocation": { + "configSource": { + "entryPoint": "Dockerfile" + }, + "parameters": { + "frontend": "dockerfile.v0", + "args": {}, + "locals": [ + { + "name": "context" + }, + { + "name": "dockerfile" + } + ] + }, + "environment": { + "platform": "linux/amd64" + } + }, + "buildConfig": { + "llbDefinition": [ + { + "id": "step0", + "op": { + "Op": { + "source": { + "identifier": "docker-image://docker.io/library/alpine:latest@sha256:8914eb54f968791faf6a8638949e480fef81e697984fba772b3976835194c6d4" + } + }, + "platform": { + "Architecture": "amd64", + "OS": "linux" + }, + "constraints": {} + } + }, + { + "id": "step1", + "op": { + "Op": null + }, + "inputs": ["step0:0"] + } + ] + }, + "metadata": { + "buildInvocationID": "46ue2x93k3xj5l463dektwldw", + "buildStartedOn": "2022-12-08T11:50:54.953375437Z", + "buildFinishedOn": "2022-12-08T11:50:55.447841328Z", + "reproducible": false, + "completeness": { + "parameters": true, + "environment": true, + "materials": false + }, + "https://mobyproject.org/buildkit@v1#metadata": { + "source": { + "locations": { + "step0": { + "locations": [ + { + "ranges": [ + { + "start": { + "line": 1 + }, + "end": { + "line": 1 + } + } + ] + } + ] + } + }, + "infos": [ + { + "filename": "Dockerfile", + "data": "RlJPTSBhbHBpbmU6bGF0ZXN0Cg==", + "llbDefinition": [ + { + "id": "step0", + "op": { + "Op": { + "source": { + "identifier": "local://dockerfile", + "attrs": { + "local.differ": "none", + "local.followpaths": "[\"Dockerfile\",\"Dockerfile.dockerignore\",\"dockerfile\"]", + "local.session": "q2jnwdkas0i0iu4knchd92jaz", + "local.sharedkeyhint": "dockerfile" + } + } + }, + "constraints": {} + } + }, + { + "id": "step1", + "op": { + "Op": null + }, + "inputs": ["step0:0"] + } + ] + } + ] + }, + "layers": { + "step0:0": [ + [ + { + "mediaType": "application/vnd.oci.image.layer.v1.tar+gzip", + "digest": "sha256:c158987b05517b6f2c5913f3acef1f2182a32345a304fe357e3ace5fadcad715", + "size": 3370706 + } + ] + ] + } + } + } + } +} +``` diff --git a/frontend/attestations/parse.go b/frontend/attestations/parse.go index 2b2ed2499270..6a2b6e181149 100644 --- a/frontend/attestations/parse.go +++ b/frontend/attestations/parse.go @@ -3,6 +3,7 @@ package attestations import ( "strings" + provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types" "github.com/pkg/errors" "github.com/tonistiigi/go-csvvalue" ) @@ -14,6 +15,7 @@ const ( const ( defaultSBOMGenerator = "docker/buildkit-syft-scanner:stable-1" + defaultSLSAVersion = string(provenancetypes.ProvenanceSLSA02) ) func Filter(v map[string]string) map[string]string { @@ -57,8 +59,11 @@ func Parse(values map[string]string) (map[string]map[string]string, error) { for k, v := range attests { attrs := make(map[string]string) out[k] = attrs - if k == KeyTypeSbom { + switch k { + case KeyTypeSbom: attrs["generator"] = defaultSBOMGenerator + case KeyTypeProvenance: + attrs["version"] = defaultSLSAVersion } if v == "" { continue diff --git a/frontend/attestations/parse_test.go b/frontend/attestations/parse_test.go index 5872013640f8..9b48bd4096b7 100644 --- a/frontend/attestations/parse_test.go +++ b/frontend/attestations/parse_test.go @@ -24,7 +24,8 @@ func TestParse(t *testing.T) { "generator": "docker.io/foo/bar", }, "provenance": { - "mode": "max", + "mode": "max", + "version": "v0.2", // intentionally not const }, }, }, diff --git a/frontend/dockerfile/dockerfile_provenance_test.go b/frontend/dockerfile/dockerfile_provenance_test.go index a1b5b80abb78..46ee5e7b17be 100644 --- a/frontend/dockerfile/dockerfile_provenance_test.go +++ b/frontend/dockerfile/dockerfile_provenance_test.go @@ -76,32 +76,360 @@ RUN echo ok> /foo fstest.CreateFile("Dockerfile", dockerfile, 0600), ) - for _, mode := range []string{"", "min", "max"} { - t.Run(mode, func(t *testing.T) { - var target string - if target == "" { - target = registry + "/buildkit/testwithprovenance:none" - } else { - target = registry + "/buildkit/testwithprovenance:" + mode + for _, slsaVersion := range []string{"", "v1", "v0.2"} { + for _, mode := range []string{"", "min", "max"} { + var tname []string + if slsaVersion != "" { + tname = append(tname, slsaVersion) } - - provReq := "" if mode != "" { - provReq = "mode=" + mode + tname = append(tname, mode) + } + t.Run(strings.Join(tname, "-"), func(t *testing.T) { + var target string + if target == "" { + target = registry + "/buildkit/testwithprovenance:none" + } else { + target = registry + "/buildkit/testwithprovenance:" + mode + } + + var provArgs []string + if slsaVersion != "" { + provArgs = append(provArgs, "version="+slsaVersion) + } + if mode != "" { + provArgs = append(provArgs, "mode="+mode) + } + _, err = f.Solve(sb.Context(), c, client.SolveOpt{ + LocalMounts: map[string]fsutil.FS{ + dockerui.DefaultLocalNameDockerfile: dir, + dockerui.DefaultLocalNameContext: dir, + }, + FrontendAttrs: map[string]string{ + "attest:provenance": strings.Join(provArgs, ","), + "build-arg:FOO": "bar", + "label:lbl": "abc", + "vcs:source": "https://user:pass@example.invalid/repo.git", + "vcs:revision": "123456", + "filename": "Dockerfile", + dockerui.DefaultLocalNameContext + ":foo": "https://foo:bar@example.invalid/foo.html", + }, + Exports: []client.ExportEntry{ + { + Type: client.ExporterImage, + Attrs: map[string]string{ + "name": target, + "push": "true", + }, + }, + }, + }, nil) + require.NoError(t, err) + + desc, provider, err := contentutil.ProviderFromRef(target) + require.NoError(t, err) + imgs, err := testutil.ReadImages(sb.Context(), provider, desc) + require.NoError(t, err) + require.Equal(t, 2, len(imgs.Images)) + + img := imgs.Find(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) + require.NotNil(t, img) + outFile := integration.UnixOrWindows("foo", "Files/foo") + expectedFileData := integration.UnixOrWindows([]byte("ok\n"), []byte("ok\r\n")) + require.Equal(t, expectedFileData, img.Layers[1][outFile].Data) + + att := imgs.Find("unknown/unknown") + require.NotNil(t, att) + require.Equal(t, string(img.Desc.Digest), att.Desc.Annotations["vnd.docker.reference.digest"]) + require.Equal(t, "attestation-manifest", att.Desc.Annotations["vnd.docker.reference.type"]) + var attest intoto.Statement + require.NoError(t, json.Unmarshal(att.LayersRaw[0], &attest)) + require.Equal(t, "https://in-toto.io/Statement/v0.1", attest.Type) + + if slsaVersion == "v1" { + require.Equal(t, "https://slsa.dev/provenance/v1", attest.PredicateType) // intentionally not const + } else { + require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const + } + + _, isClient := f.(*clientFrontend) + _, isGateway := f.(*gatewayFrontend) + + if slsaVersion == "v1" { + type stmtT struct { + Predicate provenancetypes.ProvenancePredicateSLSA1 `json:"predicate"` + } + var stmt stmtT + require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) + pred := stmt.Predicate + + require.Equal(t, "https://github.com/moby/buildkit/blob/master/docs/attestations/slsa-definitions.md", pred.BuildDefinition.BuildType) + require.Equal(t, "", pred.RunDetails.Builder.ID) + + require.Equal(t, "", pred.BuildDefinition.ExternalParameters.ConfigSource.URI) + + args := pred.BuildDefinition.ExternalParameters.Request.Args + if isClient { + require.Equal(t, "", pred.BuildDefinition.ExternalParameters.Request.Frontend) + require.Equal(t, 0, len(args), "%v", args) + require.False(t, pred.RunDetails.Metadata.Completeness.Request) + require.Equal(t, "", pred.BuildDefinition.ExternalParameters.ConfigSource.Path) + } else if isGateway { + require.Equal(t, "gateway.v0", pred.BuildDefinition.ExternalParameters.Request.Frontend) + + if mode == "max" || mode == "" { + require.Equal(t, 4, len(args), "%v", args) + require.True(t, pred.RunDetails.Metadata.Completeness.Request) + + require.Equal(t, "bar", args["build-arg:FOO"]) + require.Equal(t, "abc", args["label:lbl"]) + require.Contains(t, args["source"], "buildkit_test/") + } else { + require.False(t, pred.RunDetails.Metadata.Completeness.Request) + require.Equal(t, 2, len(args), "%v", args) + require.Contains(t, args["source"], "buildkit_test/") + } + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) + } else { + require.Equal(t, "dockerfile.v0", pred.BuildDefinition.ExternalParameters.Request.Frontend) + + if mode == "max" || mode == "" { + require.Equal(t, 3, len(args)) + require.True(t, pred.RunDetails.Metadata.Completeness.Request) + + require.Equal(t, "bar", args["build-arg:FOO"]) + require.Equal(t, "abc", args["label:lbl"]) + } else { + require.False(t, pred.RunDetails.Metadata.Completeness.Request) + require.Equal(t, 1, len(args), "%v", args) + } + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) + } + + expectedBaseImage := integration.UnixOrWindows("busybox", "nanoserver") + escapedPlatform := url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) + expectedBase := fmt.Sprintf("pkg:docker/%s@latest?platform=%s", expectedBaseImage, escapedPlatform) + if isGateway { + require.Equal(t, 2, len(pred.BuildDefinition.ResolvedDependencies), "%+v", pred.BuildDefinition.ResolvedDependencies) + require.Contains(t, pred.BuildDefinition.ResolvedDependencies[0].URI, "docker/buildkit_test") + require.Equal(t, expectedBase, pred.BuildDefinition.ResolvedDependencies[1].URI) + require.NotEmpty(t, pred.BuildDefinition.ResolvedDependencies[1].Digest["sha256"]) + } else { + require.Equal(t, 1, len(pred.BuildDefinition.ResolvedDependencies), "%+v", pred.BuildDefinition.ResolvedDependencies) + require.Equal(t, expectedBase, pred.BuildDefinition.ResolvedDependencies[0].URI) + require.NotEmpty(t, pred.BuildDefinition.ResolvedDependencies[0].Digest["sha256"]) + } + + if !isClient { + require.Equal(t, "Dockerfile", pred.BuildDefinition.ExternalParameters.ConfigSource.Path) + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/repo.git", pred.RunDetails.Metadata.BuildKitMetadata.VCS["source"]) + require.Equal(t, "123456", pred.RunDetails.Metadata.BuildKitMetadata.VCS["revision"]) + } + + require.NotEmpty(t, pred.RunDetails.Metadata.InvocationID) + + require.Equal(t, 2, len(pred.BuildDefinition.ExternalParameters.Request.Locals), "%+v", pred.BuildDefinition.ExternalParameters.Request.Locals) + require.Equal(t, "context", pred.BuildDefinition.ExternalParameters.Request.Locals[0].Name) + require.Equal(t, "dockerfile", pred.BuildDefinition.ExternalParameters.Request.Locals[1].Name) + + require.NotNil(t, pred.RunDetails.Metadata.FinishedOn) + require.Less(t, time.Since(*pred.RunDetails.Metadata.FinishedOn), 5*time.Minute) + require.NotNil(t, pred.RunDetails.Metadata.StartedOn) + require.Less(t, time.Since(*pred.RunDetails.Metadata.StartedOn), 5*time.Minute) + require.True(t, pred.RunDetails.Metadata.StartedOn.Before(*pred.RunDetails.Metadata.FinishedOn)) + + require.Equal(t, platforms.Format(platforms.Normalize(platforms.DefaultSpec())), pred.BuildDefinition.InternalParameters.BuilderPlatform) + + require.False(t, pred.RunDetails.Metadata.Completeness.ResolvedDependencies) + require.False(t, pred.RunDetails.Metadata.Reproducible) + require.False(t, pred.RunDetails.Metadata.Hermetic) + + if mode == "max" || mode == "" { + require.Equal(t, 2, len(pred.RunDetails.Metadata.BuildKitMetadata.Layers)) + require.NotNil(t, pred.RunDetails.Metadata.BuildKitMetadata.Source) + require.Equal(t, "Dockerfile", pred.RunDetails.Metadata.BuildKitMetadata.Source.Infos[0].Filename) + require.Equal(t, dockerfile, pred.RunDetails.Metadata.BuildKitMetadata.Source.Infos[0].Data) + require.NotNil(t, pred.BuildDefinition.InternalParameters.BuildConfig) + require.Equal(t, 3, len(pred.BuildDefinition.InternalParameters.BuildConfig.Definition)) + } else { + require.Equal(t, 0, len(pred.RunDetails.Metadata.BuildKitMetadata.Layers)) + require.Nil(t, pred.RunDetails.Metadata.BuildKitMetadata.Source) + require.Nil(t, pred.BuildDefinition.InternalParameters.BuildConfig) + } + } else { + type stmtT struct { + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` + } + var stmt stmtT + require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) + pred := stmt.Predicate + + require.Equal(t, "https://mobyproject.org/buildkit@v1", pred.BuildType) + require.Equal(t, "", pred.Builder.ID) + + require.Equal(t, "", pred.Invocation.ConfigSource.URI) + + args := pred.Invocation.Parameters.Args + if isClient { + require.Equal(t, "", pred.Invocation.Parameters.Frontend) + require.Equal(t, 0, len(args), "%v", args) + require.False(t, pred.Metadata.Completeness.Parameters) + require.Equal(t, "", pred.Invocation.ConfigSource.EntryPoint) + } else if isGateway { + require.Equal(t, "gateway.v0", pred.Invocation.Parameters.Frontend) + + if mode == "max" || mode == "" { + require.Equal(t, 4, len(args), "%v", args) + require.True(t, pred.Metadata.Completeness.Parameters) + + require.Equal(t, "bar", args["build-arg:FOO"]) + require.Equal(t, "abc", args["label:lbl"]) + require.Contains(t, args["source"], "buildkit_test/") + } else { + require.False(t, pred.Metadata.Completeness.Parameters) + require.Equal(t, 2, len(args), "%v", args) + require.Contains(t, args["source"], "buildkit_test/") + } + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) + } else { + require.Equal(t, "dockerfile.v0", pred.Invocation.Parameters.Frontend) + + if mode == "max" || mode == "" { + require.Equal(t, 3, len(args)) + require.True(t, pred.Metadata.Completeness.Parameters) + + require.Equal(t, "bar", args["build-arg:FOO"]) + require.Equal(t, "abc", args["label:lbl"]) + } else { + require.False(t, pred.Metadata.Completeness.Parameters) + require.Equal(t, 1, len(args), "%v", args) + } + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) + } + + expectedBaseImage := integration.UnixOrWindows("busybox", "nanoserver") + escapedPlatform := url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) + expectedBase := fmt.Sprintf("pkg:docker/%s@latest?platform=%s", expectedBaseImage, escapedPlatform) + if isGateway { + require.Equal(t, 2, len(pred.Materials), "%+v", pred.Materials) + require.Contains(t, pred.Materials[0].URI, "docker/buildkit_test") + require.Equal(t, expectedBase, pred.Materials[1].URI) + require.NotEmpty(t, pred.Materials[1].Digest["sha256"]) + } else { + require.Equal(t, 1, len(pred.Materials), "%+v", pred.Materials) + require.Equal(t, expectedBase, pred.Materials[0].URI) + require.NotEmpty(t, pred.Materials[0].Digest["sha256"]) + } + + if !isClient { + require.Equal(t, "Dockerfile", pred.Invocation.ConfigSource.EntryPoint) + require.Equal(t, "https://xxxxx:xxxxx@example.invalid/repo.git", pred.Metadata.BuildKitMetadata.VCS["source"]) + require.Equal(t, "123456", pred.Metadata.BuildKitMetadata.VCS["revision"]) + } + + require.NotEmpty(t, pred.Metadata.BuildInvocationID) + + require.Equal(t, 2, len(pred.Invocation.Parameters.Locals), "%+v", pred.Invocation.Parameters.Locals) + require.Equal(t, "context", pred.Invocation.Parameters.Locals[0].Name) + require.Equal(t, "dockerfile", pred.Invocation.Parameters.Locals[1].Name) + + require.NotNil(t, pred.Metadata.BuildFinishedOn) + require.Less(t, time.Since(*pred.Metadata.BuildFinishedOn), 5*time.Minute) + require.NotNil(t, pred.Metadata.BuildStartedOn) + require.Less(t, time.Since(*pred.Metadata.BuildStartedOn), 5*time.Minute) + require.True(t, pred.Metadata.BuildStartedOn.Before(*pred.Metadata.BuildFinishedOn)) + + require.True(t, pred.Metadata.Completeness.Environment) + require.Equal(t, platforms.Format(platforms.Normalize(platforms.DefaultSpec())), pred.Invocation.Environment.Platform) + + require.False(t, pred.Metadata.Completeness.Materials) + require.False(t, pred.Metadata.Reproducible) + require.False(t, pred.Metadata.Hermetic) + + if mode == "max" || mode == "" { + require.Equal(t, 2, len(pred.Metadata.BuildKitMetadata.Layers)) + require.NotNil(t, pred.Metadata.BuildKitMetadata.Source) + require.Equal(t, "Dockerfile", pred.Metadata.BuildKitMetadata.Source.Infos[0].Filename) + require.Equal(t, dockerfile, pred.Metadata.BuildKitMetadata.Source.Infos[0].Data) + require.NotNil(t, pred.BuildConfig) + + require.Equal(t, 3, len(pred.BuildConfig.Definition)) + } else { + require.Equal(t, 0, len(pred.Metadata.BuildKitMetadata.Layers)) + require.Nil(t, pred.Metadata.BuildKitMetadata.Source) + require.Nil(t, pred.BuildConfig) + } + } + }) + } + } +} + +func testGitProvenanceAttestation(t *testing.T, sb integration.Sandbox) { + integration.SkipOnPlatform(t, "windows") + workers.CheckFeatureCompat(t, sb, workers.FeatureDirectPush, workers.FeatureProvenance) + ctx := sb.Context() + + c, err := client.New(ctx, sb.Address()) + require.NoError(t, err) + defer c.Close() + + registry, err := sb.NewRegistry() + if errors.Is(err, integration.ErrRequirements) { + t.Skip(err.Error()) + } + require.NoError(t, err) + + f := getFrontend(t, sb) + + for _, slsaVersion := range []string{"", "v1", "v0.2"} { + t.Run(slsaVersion, func(t *testing.T) { + var provArgs []string + if slsaVersion != "" { + provArgs = append(provArgs, "version="+slsaVersion) } + + dockerfile := []byte(` +FROM busybox:latest +RUN --network=none echo "git" > /foo +COPY myapp.Dockerfile / +`) + dir := integration.Tmpdir( + t, + fstest.CreateFile("myapp.Dockerfile", dockerfile, 0600), + ) + + err = runShell(dir.Name, + "git init", + "git config --local user.email test", + "git config --local user.name test", + "git add myapp.Dockerfile", + "git commit -m initial", + "git branch v1", + "git update-server-info", + ) + require.NoError(t, err) + + cmd := exec.Command("git", "rev-parse", "v1") + cmd.Dir = dir.Name + expectedGitSHA, err := cmd.Output() + require.NoError(t, err) + + server := httptest.NewServer(http.FileServer(http.Dir(filepath.Clean(dir.Name)))) + defer server.Close() + + target := registry + "/buildkit/testwithprovenance:git" + + // inject dummy credentials to test that they are masked + expectedURL := strings.Replace(server.URL, "http://", "http://xxxxx:xxxxx@", 1) + require.NotEqual(t, expectedURL, server.URL) + server.URL = strings.Replace(server.URL, "http://", "http://user:pass@", 1) + _, err = f.Solve(sb.Context(), c, client.SolveOpt{ - LocalMounts: map[string]fsutil.FS{ - dockerui.DefaultLocalNameDockerfile: dir, - dockerui.DefaultLocalNameContext: dir, - }, FrontendAttrs: map[string]string{ - "attest:provenance": provReq, - "build-arg:FOO": "bar", - "label:lbl": "abc", - "vcs:source": "https://user:pass@example.invalid/repo.git", - "vcs:revision": "123456", - "filename": "Dockerfile", - dockerui.DefaultLocalNameContext + ":foo": "https://foo:bar@example.invalid/foo.html", + "context": server.URL + "/.git#v1", + "attest:provenance": strings.Join(provArgs, ","), + "filename": "myapp.Dockerfile", }, Exports: []client.ExportEntry{ { @@ -123,9 +451,7 @@ RUN echo ok> /foo img := imgs.Find(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) require.NotNil(t, img) - outFile := integration.UnixOrWindows("foo", "Files/foo") - expectedFileData := integration.UnixOrWindows([]byte("ok\n"), []byte("ok\r\n")) - require.Equal(t, expectedFileData, img.Layers[1][outFile].Data) + require.Equal(t, []byte("git\n"), img.Layers[1]["foo"].Data) att := imgs.Find("unknown/unknown") require.NotNil(t, att) @@ -134,263 +460,124 @@ RUN echo ok> /foo var attest intoto.Statement require.NoError(t, json.Unmarshal(att.LayersRaw[0], &attest)) require.Equal(t, "https://in-toto.io/Statement/v0.1", attest.Type) - require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const - - type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` - } - var stmt stmtT - require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) - pred := stmt.Predicate - - require.Equal(t, "https://mobyproject.org/buildkit@v1", pred.BuildType) - require.Equal(t, "", pred.Builder.ID) - - require.Equal(t, "", pred.Invocation.ConfigSource.URI) _, isClient := f.(*clientFrontend) _, isGateway := f.(*gatewayFrontend) - args := pred.Invocation.Parameters.Args - if isClient { - require.Equal(t, "", pred.Invocation.Parameters.Frontend) - require.Equal(t, 0, len(args), "%v", args) - require.False(t, pred.Metadata.Completeness.Parameters) - require.Equal(t, "", pred.Invocation.ConfigSource.EntryPoint) - } else if isGateway { - require.Equal(t, "gateway.v0", pred.Invocation.Parameters.Frontend) - - if mode == "max" || mode == "" { - require.Equal(t, 4, len(args), "%v", args) - require.True(t, pred.Metadata.Completeness.Parameters) + if slsaVersion == "v1" { + require.Equal(t, "https://slsa.dev/provenance/v1", attest.PredicateType) // intentionally not const - require.Equal(t, "bar", args["build-arg:FOO"]) - require.Equal(t, "abc", args["label:lbl"]) - require.Contains(t, args["source"], "buildkit_test/") - } else { - require.False(t, pred.Metadata.Completeness.Parameters) - require.Equal(t, 2, len(args), "%v", args) - require.Contains(t, args["source"], "buildkit_test/") + type stmtT struct { + Predicate provenancetypes.ProvenancePredicateSLSA1 `json:"predicate"` } - require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) - } else { - require.Equal(t, "dockerfile.v0", pred.Invocation.Parameters.Frontend) - - if mode == "max" || mode == "" { - require.Equal(t, 3, len(args)) - require.True(t, pred.Metadata.Completeness.Parameters) - - require.Equal(t, "bar", args["build-arg:FOO"]) - require.Equal(t, "abc", args["label:lbl"]) + var stmt stmtT + require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) + pred := stmt.Predicate + + if isClient { + require.Empty(t, pred.BuildDefinition.ExternalParameters.Request.Frontend) + require.Equal(t, "", pred.BuildDefinition.ExternalParameters.ConfigSource.URI) + require.Equal(t, "", pred.BuildDefinition.ExternalParameters.ConfigSource.Path) } else { - require.False(t, pred.Metadata.Completeness.Parameters) - require.Equal(t, 1, len(args), "%v", args) + require.NotEmpty(t, pred.BuildDefinition.ExternalParameters.Request.Frontend) + require.Equal(t, expectedURL+"/.git#v1", pred.BuildDefinition.ExternalParameters.ConfigSource.URI) + require.Equal(t, "myapp.Dockerfile", pred.BuildDefinition.ExternalParameters.ConfigSource.Path) } - require.Equal(t, "https://xxxxx:xxxxx@example.invalid/foo.html", args["context:foo"]) - } - - expectedBaseImage := integration.UnixOrWindows("busybox", "nanoserver") - escapedPlatform := url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) - expectedBase := fmt.Sprintf("pkg:docker/%s@latest?platform=%s", expectedBaseImage, escapedPlatform) - if isGateway { - require.Equal(t, 2, len(pred.Materials), "%+v", pred.Materials) - require.Contains(t, pred.Materials[0].URI, "docker/buildkit_test") - require.Equal(t, expectedBase, pred.Materials[1].URI) - require.NotEmpty(t, pred.Materials[1].Digest["sha256"]) - } else { - require.Equal(t, 1, len(pred.Materials), "%+v", pred.Materials) - require.Equal(t, expectedBase, pred.Materials[0].URI) - require.NotEmpty(t, pred.Materials[0].Digest["sha256"]) - } - - if !isClient { - require.Equal(t, "Dockerfile", pred.Invocation.ConfigSource.EntryPoint) - require.Equal(t, "https://xxxxx:xxxxx@example.invalid/repo.git", pred.Metadata.BuildKitMetadata.VCS["source"]) - require.Equal(t, "123456", pred.Metadata.BuildKitMetadata.VCS["revision"]) - } - - require.NotEmpty(t, pred.Metadata.BuildInvocationID) - - require.Equal(t, 2, len(pred.Invocation.Parameters.Locals), "%+v", pred.Invocation.Parameters.Locals) - require.Equal(t, "context", pred.Invocation.Parameters.Locals[0].Name) - require.Equal(t, "dockerfile", pred.Invocation.Parameters.Locals[1].Name) - - require.NotNil(t, pred.Metadata.BuildFinishedOn) - require.Less(t, time.Since(*pred.Metadata.BuildFinishedOn), 5*time.Minute) - require.NotNil(t, pred.Metadata.BuildStartedOn) - require.Less(t, time.Since(*pred.Metadata.BuildStartedOn), 5*time.Minute) - require.True(t, pred.Metadata.BuildStartedOn.Before(*pred.Metadata.BuildFinishedOn)) - - require.True(t, pred.Metadata.Completeness.Environment) - require.Equal(t, platforms.Format(platforms.Normalize(platforms.DefaultSpec())), pred.Invocation.Environment.Platform) - - require.False(t, pred.Metadata.Completeness.Materials) - require.False(t, pred.Metadata.Reproducible) - require.False(t, pred.Metadata.Hermetic) - - if mode == "max" || mode == "" { - require.Equal(t, 2, len(pred.Metadata.BuildKitMetadata.Layers)) - require.NotNil(t, pred.Metadata.BuildKitMetadata.Source) - require.Equal(t, "Dockerfile", pred.Metadata.BuildKitMetadata.Source.Infos[0].Filename) - require.Equal(t, dockerfile, pred.Metadata.BuildKitMetadata.Source.Infos[0].Data) - require.NotNil(t, pred.BuildConfig) - - require.Equal(t, 3, len(pred.BuildConfig.Definition)) - } else { - require.Equal(t, 0, len(pred.Metadata.BuildKitMetadata.Layers)) - require.Nil(t, pred.Metadata.BuildKitMetadata.Source) - require.Nil(t, pred.BuildConfig) - } - }) - } -} -func testGitProvenanceAttestation(t *testing.T, sb integration.Sandbox) { - integration.SkipOnPlatform(t, "windows") - workers.CheckFeatureCompat(t, sb, workers.FeatureDirectPush, workers.FeatureProvenance) - ctx := sb.Context() + expBase := "pkg:docker/busybox@latest?platform=" + url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) + if isGateway { + require.Equal(t, 3, len(pred.BuildDefinition.ResolvedDependencies), "%+v", pred.BuildDefinition.ResolvedDependencies) - c, err := client.New(ctx, sb.Address()) - require.NoError(t, err) - defer c.Close() + require.Contains(t, pred.BuildDefinition.ResolvedDependencies[0].URI, "pkg:docker/buildkit_test/") + require.NotEmpty(t, pred.BuildDefinition.ResolvedDependencies[0].Digest) - registry, err := sb.NewRegistry() - if errors.Is(err, integration.ErrRequirements) { - t.Skip(err.Error()) - } - require.NoError(t, err) - - f := getFrontend(t, sb) + require.Equal(t, expBase, pred.BuildDefinition.ResolvedDependencies[1].URI) + require.NotEmpty(t, pred.BuildDefinition.ResolvedDependencies[1].Digest["sha256"]) - dockerfile := []byte(` -FROM busybox:latest -RUN --network=none echo "git" > /foo -COPY myapp.Dockerfile / -`) - dir := integration.Tmpdir( - t, - fstest.CreateFile("myapp.Dockerfile", dockerfile, 0600), - ) - - err = runShell(dir.Name, - "git init", - "git config --local user.email test", - "git config --local user.name test", - "git add myapp.Dockerfile", - "git commit -m initial", - "git branch v1", - "git update-server-info", - ) - require.NoError(t, err) - - cmd := exec.Command("git", "rev-parse", "v1") - cmd.Dir = dir.Name - expectedGitSHA, err := cmd.Output() - require.NoError(t, err) - - server := httptest.NewServer(http.FileServer(http.Dir(filepath.Clean(dir.Name)))) - defer server.Close() - - target := registry + "/buildkit/testwithprovenance:git" + require.Equal(t, expectedURL+"/.git#v1", pred.BuildDefinition.ResolvedDependencies[2].URI) + require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.BuildDefinition.ResolvedDependencies[2].Digest["sha1"]) + } else { + require.Equal(t, 2, len(pred.BuildDefinition.ResolvedDependencies), "%+v", pred.BuildDefinition.ResolvedDependencies) - // inject dummy credentials to test that they are masked - expectedURL := strings.Replace(server.URL, "http://", "http://xxxxx:xxxxx@", 1) - require.NotEqual(t, expectedURL, server.URL) - server.URL = strings.Replace(server.URL, "http://", "http://user:pass@", 1) + require.Equal(t, expBase, pred.BuildDefinition.ResolvedDependencies[0].URI) + require.NotEmpty(t, pred.BuildDefinition.ResolvedDependencies[0].Digest["sha256"]) - _, err = f.Solve(sb.Context(), c, client.SolveOpt{ - FrontendAttrs: map[string]string{ - "context": server.URL + "/.git#v1", - "attest:provenance": "", - "filename": "myapp.Dockerfile", - }, - Exports: []client.ExportEntry{ - { - Type: client.ExporterImage, - Attrs: map[string]string{ - "name": target, - "push": "true", - }, - }, - }, - }, nil) - require.NoError(t, err) + require.Equal(t, expectedURL+"/.git#v1", pred.BuildDefinition.ResolvedDependencies[1].URI) + require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.BuildDefinition.ResolvedDependencies[1].Digest["sha1"]) + } - desc, provider, err := contentutil.ProviderFromRef(target) - require.NoError(t, err) - imgs, err := testutil.ReadImages(sb.Context(), provider, desc) - require.NoError(t, err) - require.Equal(t, 2, len(imgs.Images)) + require.Equal(t, 0, len(pred.BuildDefinition.ExternalParameters.Request.Locals)) - img := imgs.Find(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) - require.NotNil(t, img) - require.Equal(t, []byte("git\n"), img.Layers[1]["foo"].Data) + require.True(t, pred.RunDetails.Metadata.Completeness.ResolvedDependencies) + require.True(t, pred.RunDetails.Metadata.Hermetic) - att := imgs.Find("unknown/unknown") - require.NotNil(t, att) - require.Equal(t, string(img.Desc.Digest), att.Desc.Annotations["vnd.docker.reference.digest"]) - require.Equal(t, "attestation-manifest", att.Desc.Annotations["vnd.docker.reference.type"]) - var attest intoto.Statement - require.NoError(t, json.Unmarshal(att.LayersRaw[0], &attest)) - require.Equal(t, "https://in-toto.io/Statement/v0.1", attest.Type) - require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const + if isClient { + require.False(t, pred.RunDetails.Metadata.Completeness.Request) + } else { + require.True(t, pred.RunDetails.Metadata.Completeness.Request) + } + require.False(t, pred.RunDetails.Metadata.Reproducible) - type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` - } - var stmt stmtT - require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) - pred := stmt.Predicate + require.Equal(t, 0, len(pred.RunDetails.Metadata.BuildKitMetadata.VCS), "%+v", pred.RunDetails.Metadata.BuildKitMetadata.VCS) + } else { + require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const - _, isClient := f.(*clientFrontend) - _, isGateway := f.(*gatewayFrontend) + type stmtT struct { + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` + } + var stmt stmtT + require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) + pred := stmt.Predicate + + if isClient { + require.Empty(t, pred.Invocation.Parameters.Frontend) + require.Equal(t, "", pred.Invocation.ConfigSource.URI) + require.Equal(t, "", pred.Invocation.ConfigSource.EntryPoint) + } else { + require.NotEmpty(t, pred.Invocation.Parameters.Frontend) + require.Equal(t, expectedURL+"/.git#v1", pred.Invocation.ConfigSource.URI) + require.Equal(t, "myapp.Dockerfile", pred.Invocation.ConfigSource.EntryPoint) + } - if isClient { - require.Empty(t, pred.Invocation.Parameters.Frontend) - require.Equal(t, "", pred.Invocation.ConfigSource.URI) - require.Equal(t, "", pred.Invocation.ConfigSource.EntryPoint) - } else { - require.NotEmpty(t, pred.Invocation.Parameters.Frontend) - require.Equal(t, expectedURL+"/.git#v1", pred.Invocation.ConfigSource.URI) - require.Equal(t, "myapp.Dockerfile", pred.Invocation.ConfigSource.EntryPoint) - } + expBase := "pkg:docker/busybox@latest?platform=" + url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) + if isGateway { + require.Equal(t, 3, len(pred.Materials), "%+v", pred.Materials) - expBase := "pkg:docker/busybox@latest?platform=" + url.PathEscape(platforms.Format(platforms.Normalize(platforms.DefaultSpec()))) - if isGateway { - require.Equal(t, 3, len(pred.Materials), "%+v", pred.Materials) + require.Contains(t, pred.Materials[0].URI, "pkg:docker/buildkit_test/") + require.NotEmpty(t, pred.Materials[0].Digest) - require.Contains(t, pred.Materials[0].URI, "pkg:docker/buildkit_test/") - require.NotEmpty(t, pred.Materials[0].Digest) + require.Equal(t, expBase, pred.Materials[1].URI) + require.NotEmpty(t, pred.Materials[1].Digest["sha256"]) - require.Equal(t, expBase, pred.Materials[1].URI) - require.NotEmpty(t, pred.Materials[1].Digest["sha256"]) + require.Equal(t, expectedURL+"/.git#v1", pred.Materials[2].URI) + require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.Materials[2].Digest["sha1"]) + } else { + require.Equal(t, 2, len(pred.Materials), "%+v", pred.Materials) - require.Equal(t, expectedURL+"/.git#v1", pred.Materials[2].URI) - require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.Materials[2].Digest["sha1"]) - } else { - require.Equal(t, 2, len(pred.Materials), "%+v", pred.Materials) + require.Equal(t, expBase, pred.Materials[0].URI) + require.NotEmpty(t, pred.Materials[0].Digest["sha256"]) - require.Equal(t, expBase, pred.Materials[0].URI) - require.NotEmpty(t, pred.Materials[0].Digest["sha256"]) + require.Equal(t, expectedURL+"/.git#v1", pred.Materials[1].URI) + require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.Materials[1].Digest["sha1"]) + } - require.Equal(t, expectedURL+"/.git#v1", pred.Materials[1].URI) - require.Equal(t, strings.TrimSpace(string(expectedGitSHA)), pred.Materials[1].Digest["sha1"]) - } + require.Equal(t, 0, len(pred.Invocation.Parameters.Locals)) - require.Equal(t, 0, len(pred.Invocation.Parameters.Locals)) + require.True(t, pred.Metadata.Completeness.Materials) + require.True(t, pred.Metadata.Completeness.Environment) + require.True(t, pred.Metadata.Hermetic) - require.True(t, pred.Metadata.Completeness.Materials) - require.True(t, pred.Metadata.Completeness.Environment) - require.True(t, pred.Metadata.Hermetic) + if isClient { + require.False(t, pred.Metadata.Completeness.Parameters) + } else { + require.True(t, pred.Metadata.Completeness.Parameters) + } + require.False(t, pred.Metadata.Reproducible) - if isClient { - require.False(t, pred.Metadata.Completeness.Parameters) - } else { - require.True(t, pred.Metadata.Completeness.Parameters) + require.Equal(t, 0, len(pred.Metadata.BuildKitMetadata.VCS), "%+v", pred.Metadata.BuildKitMetadata.VCS) + } + }) } - require.False(t, pred.Metadata.Reproducible) - - require.Equal(t, 0, len(pred.Metadata.BuildKitMetadata.VCS), "%+v", pred.Metadata.BuildKitMetadata.VCS) } func testMultiPlatformProvenance(t *testing.T, sb integration.Sandbox) { @@ -472,7 +659,7 @@ RUN echo "ok-$TARGETARCH" > /foo require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -654,7 +841,7 @@ func testClientFrontendProvenance(t *testing.T, sb integration.Sandbox) { require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -800,7 +987,7 @@ func testClientLLBProvenance(t *testing.T, sb integration.Sandbox) { require.Equal(t, "https://slsa.dev/provenance/v0.2", attest.PredicateType) // intentionally not const type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -880,7 +1067,7 @@ RUN --mount=type=secret,id=mysecret --mount=type=secret,id=othersecret --mount=t att := imgs.FindAttestation(expPlatform) type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -1007,7 +1194,7 @@ EOF att := imgs.FindAttestation(expPlatform) type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -1260,7 +1447,7 @@ ADD bar bar`) att := imgs.FindAttestation(expPlatform) type stmtT struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } var stmt stmtT require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) @@ -1454,7 +1641,7 @@ COPY bar bar2 require.NotEqual(t, 0, len(provDt)) - var pred provenancetypes.ProvenancePredicate + var pred provenancetypes.ProvenancePredicateSLSA02 require.NoError(t, json.Unmarshal(provDt, &pred)) sources := pred.Metadata.BuildKitMetadata.Source.Infos @@ -1535,7 +1722,7 @@ RUN date +%s > /b.txt require.NotNil(t, att) var stmt struct { - Predicate provenancetypes.ProvenancePredicate `json:"predicate"` + Predicate provenancetypes.ProvenancePredicateSLSA02 `json:"predicate"` } require.NoError(t, json.Unmarshal(att.LayersRaw[0], &stmt)) pred := stmt.Predicate diff --git a/frontend/dockerfile/docs/reference.md b/frontend/dockerfile/docs/reference.md index c5105e524876..5b32b7f01e13 100644 --- a/frontend/dockerfile/docs/reference.md +++ b/frontend/dockerfile/docs/reference.md @@ -2608,15 +2608,16 @@ RUN echo "I'm building for $TARGETPLATFORM" ### BuildKit built-in build args -| Arg | Type | Description | -| ------------------------------- | ------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| `BUILDKIT_CACHE_MOUNT_NS` | String | Set optional cache ID namespace. | -| `BUILDKIT_CONTEXT_KEEP_GIT_DIR` | Bool | Trigger Git context to keep the `.git` directory. | -| `BUILDKIT_INLINE_CACHE`[^2] | Bool | Inline cache metadata to image config or not. | -| `BUILDKIT_MULTI_PLATFORM` | Bool | Opt into deterministic output regardless of multi-platform output or not. | -| `BUILDKIT_SANDBOX_HOSTNAME` | String | Set the hostname (default `buildkitsandbox`) | -| `BUILDKIT_SYNTAX` | String | Set frontend image | -| `SOURCE_DATE_EPOCH` | Int | Set the Unix timestamp for created image and layers. More info from [reproducible builds](https://reproducible-builds.org/docs/source-date-epoch/). Supported since Dockerfile 1.5, BuildKit 0.11 | +| Arg | Type | Description | +|----------------------------------|--------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| `BUILDKIT_CACHE_MOUNT_NS` | String | Set optional cache ID namespace. | +| `BUILDKIT_CONTEXT_KEEP_GIT_DIR` | Bool | Trigger Git context to keep the `.git` directory. | +| `BUILDKIT_HISTORY_PROVENANCE_V1` | Bool | Enable [SLSA Provenance v1](https://slsa.dev/spec/v1.1/provenance) for build history record. | +| `BUILDKIT_INLINE_CACHE`[^2] | Bool | Inline cache metadata to image config or not. | +| `BUILDKIT_MULTI_PLATFORM` | Bool | Opt into deterministic output regardless of multi-platform output or not. | +| `BUILDKIT_SANDBOX_HOSTNAME` | String | Set the hostname (default `buildkitsandbox`) | +| `BUILDKIT_SYNTAX` | String | Set frontend image | +| `SOURCE_DATE_EPOCH` | Int | Set the Unix timestamp for created image and layers. More info from [reproducible builds](https://reproducible-builds.org/docs/source-date-epoch/). Supported since Dockerfile 1.5, BuildKit 0.11 | #### Example: keep `.git` dir diff --git a/go.mod b/go.mod index 6b4944c41f62..b8a7567b1e2f 100644 --- a/go.mod +++ b/go.mod @@ -45,7 +45,7 @@ require ( github.com/hashicorp/go-immutable-radix/v2 v2.1.0 github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/golang-lru/v2 v2.0.7 - github.com/in-toto/in-toto-golang v0.5.0 + github.com/in-toto/in-toto-golang v0.9.0 github.com/klauspost/compress v1.18.0 github.com/mitchellh/hashstructure/v2 v2.0.2 github.com/moby/docker-image-spec v1.3.1 @@ -172,7 +172,7 @@ require ( github.com/prometheus/common v0.62.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sasha-s/go-deadlock v0.3.5 // indirect - github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect + github.com/secure-systems-lab/go-securesystemslib v0.6.0 // indirect github.com/shibumi/go-pathspec v1.3.0 // indirect github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 // indirect github.com/vbatts/tar-split v0.12.1 // indirect diff --git a/go.sum b/go.sum index ecfe6fca2628..c918a5f73649 100644 --- a/go.sum +++ b/go.sum @@ -245,8 +245,8 @@ github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/ianlancetaylor/demangle v0.0.0-20210905161508-09a460cdf81d/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w= -github.com/in-toto/in-toto-golang v0.5.0 h1:hb8bgwr0M2hGdDsLjkJ3ZqJ8JFLL/tgYdAxF/XEFBbY= -github.com/in-toto/in-toto-golang v0.5.0/go.mod h1:/Rq0IZHLV7Ku5gielPT4wPHJfH1GdHMCq8+WPxw8/BE= +github.com/in-toto/in-toto-golang v0.9.0 h1:tHny7ac4KgtsfrG6ybU8gVOZux2H8jN05AXJ9EBM1XU= +github.com/in-toto/in-toto-golang v0.9.0/go.mod h1:xsBVrVsHNsB61++S6Dy2vWosKhuA3lUTQd+eF9HdeMo= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.7/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= @@ -366,8 +366,8 @@ github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU= github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U= -github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE= -github.com/secure-systems-lab/go-securesystemslib v0.4.0/go.mod h1:FGBZgq2tXWICsxWQW1msNf49F0Pf2Op5Htayx335Qbs= +github.com/secure-systems-lab/go-securesystemslib v0.6.0 h1:T65atpAVCJQK14UA57LMdZGpHi4QYSH/9FZyNGqMYIA= +github.com/secure-systems-lab/go-securesystemslib v0.6.0/go.mod h1:8Mtpo9JKks/qhPG4HGZ2LGMvrPbzuxwfz/f/zLfEWkk= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b h1:h+3JX2VoWTFuyQEo87pStk/a99dzIO1mM9KxIyLPGTU= github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b/go.mod h1:/yeG0My1xr/u+HZrFQ1tOQQQQrOawfyMUH13ai5brBc= github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= diff --git a/solver/llbsolver/proc/provenance.go b/solver/llbsolver/proc/provenance.go index 618906866cf2..f32ab4fd96b3 100644 --- a/solver/llbsolver/proc/provenance.go +++ b/solver/llbsolver/proc/provenance.go @@ -5,18 +5,18 @@ import ( "encoding/json" "strconv" - slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" "github.com/moby/buildkit/executor/resources" "github.com/moby/buildkit/exporter/containerimage/exptypes" gatewaypb "github.com/moby/buildkit/frontend/gateway/pb" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver/llbsolver" + provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types" "github.com/moby/buildkit/solver/result" "github.com/moby/buildkit/util/tracing" "github.com/pkg/errors" ) -func ProvenanceProcessor(attrs map[string]string) llbsolver.Processor { +func ProvenanceProcessor(slsaVersion provenancetypes.ProvenanceSLSA, attrs map[string]string) llbsolver.Processor { return func(ctx context.Context, res *llbsolver.Result, s *llbsolver.Solver, j *solver.Job, usage *resources.SysSampler) (*llbsolver.Result, error) { span, ctx := tracing.StartSpan(ctx, "create provenance attestation") defer span.End() @@ -46,7 +46,7 @@ func ProvenanceProcessor(attrs map[string]string) llbsolver.Processor { return nil, errors.Errorf("could not find ref %s", p.ID) } - pc, err := llbsolver.NewProvenanceCreator(ctx, cp, ref, attrs, j, usage) + pc, err := llbsolver.NewProvenanceCreator(ctx, slsaVersion, cp, ref, attrs, j, usage) if err != nil { return nil, err } @@ -63,7 +63,7 @@ func ProvenanceProcessor(attrs map[string]string) llbsolver.Processor { result.AttestationInlineOnlyKey: []byte(strconv.FormatBool(inlineOnly)), }, InToto: result.InTotoAttestation{ - PredicateType: slsa02.PredicateSLSAProvenance, + PredicateType: pc.PredicateType(), }, Path: filename, ContentFunc: func(ctx context.Context) ([]byte, error) { diff --git a/solver/llbsolver/provenance.go b/solver/llbsolver/provenance.go index e3423a764f54..627b3a0e95c8 100644 --- a/solver/llbsolver/provenance.go +++ b/solver/llbsolver/provenance.go @@ -9,6 +9,8 @@ import ( "time" "github.com/containerd/platforms" + slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" + slsa1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" "github.com/moby/buildkit/cache" "github.com/moby/buildkit/cache/config" "github.com/moby/buildkit/client/llb/sourceresolver" @@ -330,13 +332,14 @@ func captureProvenance(ctx context.Context, res solver.CachedResultWithProvenanc } type ProvenanceCreator struct { - pr *provenancetypes.ProvenancePredicate - j *solver.Job - sampler *resources.SysSampler - addLayers func(context.Context) error + pr *provenancetypes.ProvenancePredicateSLSA02 + slsaVersion provenancetypes.ProvenanceSLSA + j *solver.Job + sampler *resources.SysSampler + addLayers func(context.Context) error } -func NewProvenanceCreator(ctx context.Context, cp *provenance.Capture, res solver.ResultProxy, attrs map[string]string, j *solver.Job, usage *resources.SysSampler) (*ProvenanceCreator, error) { +func NewProvenanceCreator(ctx context.Context, slsaVersion provenancetypes.ProvenanceSLSA, cp *provenance.Capture, res solver.ResultProxy, attrs map[string]string, j *solver.Job, usage *resources.SysSampler) (*ProvenanceCreator, error) { var reproducible bool if v, ok := attrs["reproducible"]; ok { b, err := strconv.ParseBool(v) @@ -437,9 +440,8 @@ func NewProvenanceCreator(ctx context.Context, cp *provenance.Capture, res solve if len(m) != 0 { if pr.Metadata == nil { - pr.Metadata = &provenancetypes.ProvenanceMetadata{} + pr.Metadata = &provenancetypes.ProvenanceMetadataSLSA02{} } - pr.Metadata.BuildKitMetadata.Layers = m } @@ -450,9 +452,10 @@ func NewProvenanceCreator(ctx context.Context, cp *provenance.Capture, res solve } pc := &ProvenanceCreator{ - pr: pr, - j: j, - addLayers: addLayers, + pr: pr, + slsaVersion: slsaVersion, + j: j, + addLayers: addLayers, } if withUsage { pc.sampler = usage @@ -460,7 +463,14 @@ func NewProvenanceCreator(ctx context.Context, cp *provenance.Capture, res solve return pc, nil } -func (p *ProvenanceCreator) Predicate(ctx context.Context) (*provenancetypes.ProvenancePredicate, error) { +func (p *ProvenanceCreator) PredicateType() string { + if p.slsaVersion == provenancetypes.ProvenanceSLSA1 { + return slsa1.PredicateSLSAProvenance + } + return slsa02.PredicateSLSAProvenance +} + +func (p *ProvenanceCreator) Predicate(ctx context.Context) (any, error) { end := p.j.RegisterCompleteTime() p.pr.Metadata.BuildFinishedOn = &end @@ -478,6 +488,10 @@ func (p *ProvenanceCreator) Predicate(ctx context.Context) (*provenancetypes.Pro p.pr.Metadata.BuildKitMetadata.SysUsage = sysSamples } + if p.slsaVersion == provenancetypes.ProvenanceSLSA1 { + return provenancetypes.ConvertSLSA02ToSLSA1(p.pr), nil + } + return p.pr, nil } @@ -553,7 +567,7 @@ func resolveRemotes(ctx context.Context, res solver.Result) ([]*solver.Remote, e return remotes, nil } -func AddBuildConfig(ctx context.Context, p *provenancetypes.ProvenancePredicate, c *provenance.Capture, rp solver.ResultProxy, withUsage bool) (map[digest.Digest]int, error) { +func AddBuildConfig(ctx context.Context, p *provenancetypes.ProvenancePredicateSLSA02, c *provenance.Capture, rp solver.ResultProxy, withUsage bool) (map[digest.Digest]int, error) { def := rp.Definition() steps, indexes, err := toBuildSteps(def, c, withUsage) if err != nil { @@ -595,7 +609,7 @@ func AddBuildConfig(ctx context.Context, p *provenancetypes.ProvenancePredicate, } if p.Metadata == nil { - p.Metadata = &provenancetypes.ProvenanceMetadata{} + p.Metadata = &provenancetypes.ProvenanceMetadataSLSA02{} } p.Metadata.BuildKitMetadata.Source = &provenancetypes.Source{ Infos: sis, diff --git a/solver/llbsolver/provenance/predicate.go b/solver/llbsolver/provenance/predicate.go index 365ad2c7c2c9..9dbdec6fd761 100644 --- a/solver/llbsolver/provenance/predicate.go +++ b/solver/llbsolver/provenance/predicate.go @@ -83,12 +83,12 @@ func findMaterial(srcs provenancetypes.Sources, uri string) (*slsa.ProvenanceMat return nil, false } -func NewPredicate(c *Capture) (*provenancetypes.ProvenancePredicate, error) { +func NewPredicate(c *Capture) (*provenancetypes.ProvenancePredicateSLSA02, error) { materials, err := slsaMaterials(c.Sources) if err != nil { return nil, err } - inv := provenancetypes.ProvenanceInvocation{} + inv := provenancetypes.ProvenanceInvocationSLSA02{} contextKey := "context" if v, ok := c.Args["contextkey"]; ok && v != "" { @@ -154,13 +154,13 @@ func NewPredicate(c *Capture) (*provenancetypes.ProvenancePredicate, error) { } } - pr := &provenancetypes.ProvenancePredicate{ + pr := &provenancetypes.ProvenancePredicateSLSA02{ Invocation: inv, ProvenancePredicate: slsa02.ProvenancePredicate{ BuildType: provenancetypes.BuildKitBuildType, Materials: materials, }, - Metadata: &provenancetypes.ProvenanceMetadata{ + Metadata: &provenancetypes.ProvenanceMetadataSLSA02{ ProvenanceMetadata: slsa02.ProvenanceMetadata{ Completeness: slsa02.ProvenanceComplete{ Parameters: c.Frontend != "", diff --git a/solver/llbsolver/provenance/types/types.go b/solver/llbsolver/provenance/types/types.go index debc74587f8d..869ad0b5b126 100644 --- a/solver/llbsolver/provenance/types/types.go +++ b/solver/llbsolver/provenance/types/types.go @@ -1,11 +1,16 @@ package types import ( + "slices" + + slsa "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" + slsa1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" resourcestypes "github.com/moby/buildkit/executor/resources/types" "github.com/moby/buildkit/solver/pb" digest "github.com/opencontainers/go-digest" ocispecs "github.com/opencontainers/image-spec/specs-go/v1" + "github.com/pkg/errors" ) const ( @@ -75,19 +80,88 @@ type Sources struct { Local []LocalSource } -type ProvenancePredicate struct { +const ( + ProvenanceSLSA1 = ProvenanceSLSA("v1") + ProvenanceSLSA02 = ProvenanceSLSA("v0.2") +) + +type ProvenanceSLSA string + +var provenanceSLSAs = []ProvenanceSLSA{ + ProvenanceSLSA1, + ProvenanceSLSA02, +} + +func (ps *ProvenanceSLSA) Validate() error { + if *ps == "" { + return errors.New("provenance SLSA version cannot be empty") + } + if slices.Contains(provenanceSLSAs, *ps) { + return nil + } + return errors.New("invalid provenance SLSA version: " + string(*ps)) +} + +type ProvenancePredicateSLSA02 struct { slsa02.ProvenancePredicate - Invocation ProvenanceInvocation `json:"invocation,omitempty"` - BuildConfig *BuildConfig `json:"buildConfig,omitempty"` - Metadata *ProvenanceMetadata `json:"metadata,omitempty"` + Invocation ProvenanceInvocationSLSA02 `json:"invocation,omitempty"` + BuildConfig *BuildConfig `json:"buildConfig,omitempty"` + Metadata *ProvenanceMetadataSLSA02 `json:"metadata,omitempty"` } -type ProvenanceInvocation struct { +type ProvenanceInvocationSLSA02 struct { ConfigSource slsa02.ConfigSource `json:"configSource,omitempty"` Parameters Parameters `json:"parameters,omitempty"` Environment Environment `json:"environment,omitempty"` } +type ProvenanceMetadataSLSA02 struct { + slsa02.ProvenanceMetadata + BuildKitMetadata BuildKitMetadata `json:"https://mobyproject.org/buildkit@v1#metadata,omitempty"` + Hermetic bool `json:"https://mobyproject.org/buildkit@v1#hermetic,omitempty"` +} + +type ProvenancePredicateSLSA1 struct { + slsa1.ProvenancePredicate + BuildDefinition ProvenanceBuildDefinitionSLSA1 `json:"buildDefinition,omitempty"` + RunDetails ProvenanceRunDetailsSLSA1 `json:"runDetails,omitempty"` +} + +type ProvenanceBuildDefinitionSLSA1 struct { + slsa1.ProvenanceBuildDefinition + ExternalParameters ProvenanceExternalParametersSLSA1 `json:"externalParameters,omitempty"` + InternalParameters ProvenanceInternalParametersSLSA1 `json:"internalParameters,omitempty"` +} + +type ProvenanceRunDetailsSLSA1 struct { + slsa1.ProvenanceRunDetails + Metadata *ProvenanceMetadataSLSA1 `json:"metadata,omitempty"` +} + +type ProvenanceExternalParametersSLSA1 struct { + ConfigSource ProvenanceConfigSourceSLSA1 `json:"configSource,omitempty"` + Request Parameters `json:"request,omitempty"` +} + +type ProvenanceConfigSourceSLSA1 struct { + URI string `json:"uri,omitempty"` + Digest slsa.DigestSet `json:"digest,omitempty"` + Path string `json:"path,omitempty"` +} + +type ProvenanceInternalParametersSLSA1 struct { + BuildConfig *BuildConfig `json:"buildConfig,omitempty"` + BuilderPlatform string `json:"builderPlatform"` +} + +type ProvenanceMetadataSLSA1 struct { + slsa1.BuildMetadata + BuildKitMetadata BuildKitMetadata `json:"buildkit_metadata,omitempty"` + Hermetic bool `json:"buildkit_hermetic,omitempty"` + Completeness BuildKitComplete `json:"buildkit_completeness,omitempty"` + Reproducible bool `json:"buildkit_reproducible,omitempty"` +} + type Parameters struct { Frontend string `json:"frontend,omitempty"` Args map[string]string `json:"args,omitempty"` @@ -102,15 +176,86 @@ type Environment struct { Platform string `json:"platform"` } -type ProvenanceMetadata struct { - slsa02.ProvenanceMetadata - BuildKitMetadata BuildKitMetadata `json:"https://mobyproject.org/buildkit@v1#metadata,omitempty"` - Hermetic bool `json:"https://mobyproject.org/buildkit@v1#hermetic,omitempty"` -} - type BuildKitMetadata struct { VCS map[string]string `json:"vcs,omitempty"` Source *Source `json:"source,omitempty"` Layers map[string][][]ocispecs.Descriptor `json:"layers,omitempty"` SysUsage []*resourcestypes.SysSample `json:"sysUsage,omitempty"` } + +type BuildKitComplete struct { + Request bool `json:"request"` + ResolvedDependencies bool `json:"resolvedDependencies"` +} + +// ConvertSLSA02ToSLSA1 converts a SLSA 0.2 provenance predicate to a SLSA 1.0 +// provenance predicate. +// FIXME: It should be the other way around when v1 is the default. +func ConvertSLSA02ToSLSA1(p02 *ProvenancePredicateSLSA02) *ProvenancePredicateSLSA1 { + if p02 == nil { + return nil + } + + var resolvedDeps []slsa1.ResourceDescriptor + for _, m := range p02.Materials { + resolvedDeps = append(resolvedDeps, slsa1.ResourceDescriptor{ + URI: m.URI, + Digest: m.Digest, + }) + } + + buildDef := ProvenanceBuildDefinitionSLSA1{ + ProvenanceBuildDefinition: slsa1.ProvenanceBuildDefinition{ + BuildType: "https://github.com/moby/buildkit/blob/master/docs/attestations/slsa-definitions.md", + ResolvedDependencies: resolvedDeps, + }, + ExternalParameters: ProvenanceExternalParametersSLSA1{ + ConfigSource: ProvenanceConfigSourceSLSA1{ + URI: p02.Invocation.ConfigSource.URI, + Digest: p02.Invocation.ConfigSource.Digest, + Path: p02.Invocation.ConfigSource.EntryPoint, + }, + Request: p02.Invocation.Parameters, + }, + InternalParameters: ProvenanceInternalParametersSLSA1{ + BuildConfig: p02.BuildConfig, + BuilderPlatform: p02.Invocation.Environment.Platform, + }, + } + + var meta *ProvenanceMetadataSLSA1 + if p02.Metadata != nil { + meta = &ProvenanceMetadataSLSA1{ + BuildMetadata: slsa1.BuildMetadata{ + InvocationID: p02.Metadata.BuildInvocationID, + StartedOn: p02.Metadata.BuildStartedOn, + FinishedOn: p02.Metadata.BuildFinishedOn, + }, + BuildKitMetadata: p02.Metadata.BuildKitMetadata, + Hermetic: p02.Metadata.Hermetic, + Completeness: BuildKitComplete{ + Request: p02.Metadata.Completeness.Parameters, + ResolvedDependencies: p02.Metadata.Completeness.Materials, + }, + Reproducible: p02.Metadata.Reproducible, + } + } + + runDetails := ProvenanceRunDetailsSLSA1{ + ProvenanceRunDetails: slsa1.ProvenanceRunDetails{ + Builder: slsa1.Builder{ + ID: p02.Builder.ID, + // TODO: handle builder components versions + // Version: map[string]string{ + // "buildkit": version.Version, + // }, + }, + }, + Metadata: meta, + } + + return &ProvenancePredicateSLSA1{ + BuildDefinition: buildDef, + RunDetails: runDetails, + } +} diff --git a/solver/llbsolver/solver.go b/solver/llbsolver/solver.go index f544785f3139..11b100780c84 100644 --- a/solver/llbsolver/solver.go +++ b/solver/llbsolver/solver.go @@ -6,12 +6,12 @@ import ( "fmt" "maps" "os" + "strconv" "strings" "sync" "time" intoto "github.com/in-toto/in-toto-golang/in_toto" - slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" controlapi "github.com/moby/buildkit/api/services/control" "github.com/moby/buildkit/cache" cacheconfig "github.com/moby/buildkit/cache/config" @@ -32,6 +32,7 @@ import ( sessionexporter "github.com/moby/buildkit/session/exporter" "github.com/moby/buildkit/solver" "github.com/moby/buildkit/solver/llbsolver/provenance" + provenancetypes "github.com/moby/buildkit/solver/llbsolver/provenance/types" "github.com/moby/buildkit/solver/result" spb "github.com/moby/buildkit/sourcepolicy/pb" "github.com/moby/buildkit/util/bklog" @@ -229,15 +230,22 @@ func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend } } + slsaVersion := provenancetypes.ProvenanceSLSA02 + if v, ok := req.FrontendOpt["build-arg:BUILDKIT_HISTORY_PROVENANCE_V1"]; ok { + if b, err := strconv.ParseBool(v); err == nil && b { + slsaVersion = provenancetypes.ProvenanceSLSA1 + } + } + makeProvenance := func(name string, res solver.ResultProxy, cap *provenance.Capture) (*controlapi.Descriptor, func(), error) { span, ctx := tracing.StartSpan(ctx, fmt.Sprintf("create %s history provenance", name)) defer span.End() - prc, err := NewProvenanceCreator(ctx2, cap, res, attrs, j, usage) + pc, err := NewProvenanceCreator(ctx2, slsaVersion, cap, res, attrs, j, usage) if err != nil { return nil, nil, err } - pr, err := prc.Predicate(ctx) + pr, err := pc.Predicate(ctx) if err != nil { return nil, nil, err } @@ -267,7 +275,7 @@ func (s *Solver) recordBuildHistory(ctx context.Context, id string, req frontend Size: desc.Size, MediaType: desc.MediaType, Annotations: map[string]string{ - "in-toto.io/predicate-type": slsa02.PredicateSLSAProvenance, + "in-toto.io/predicate-type": pc.PredicateType(), }, }, release, nil } diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/attestations.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/attestations.go new file mode 100644 index 000000000000..73aafe7e1c44 --- /dev/null +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/attestations.go @@ -0,0 +1,99 @@ +package in_toto + +import ( + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" + slsa01 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.1" + slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" + slsa1 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1" +) + +const ( + // StatementInTotoV01 is the statement type for the generalized link format + // containing statements. This is constant for all predicate types. + StatementInTotoV01 = "https://in-toto.io/Statement/v0.1" + // PredicateSPDX represents a SBOM using the SPDX standard. + // The SPDX mandates 'spdxVersion' field, so predicate type can omit + // version. + PredicateSPDX = "https://spdx.dev/Document" + // PredicateCycloneDX represents a CycloneDX SBOM + PredicateCycloneDX = "https://cyclonedx.org/bom" + // PredicateLinkV1 represents an in-toto 0.9 link. + PredicateLinkV1 = "https://in-toto.io/Link/v1" +) + +// Subject describes the set of software artifacts the statement applies to. +type Subject struct { + Name string `json:"name"` + Digest common.DigestSet `json:"digest"` +} + +// StatementHeader defines the common fields for all statements +type StatementHeader struct { + Type string `json:"_type"` + PredicateType string `json:"predicateType"` + Subject []Subject `json:"subject"` +} + +/* +Statement binds the attestation to a particular subject and identifies the +of the predicate. This struct represents a generic statement. +*/ +type Statement struct { + StatementHeader + // Predicate contains type speficic metadata. + Predicate interface{} `json:"predicate"` +} + +// ProvenanceStatementSLSA01 is the definition for an entire provenance statement with SLSA 0.1 predicate. +type ProvenanceStatementSLSA01 struct { + StatementHeader + Predicate slsa01.ProvenancePredicate `json:"predicate"` +} + +// ProvenanceStatementSLSA02 is the definition for an entire provenance statement with SLSA 0.2 predicate. +type ProvenanceStatementSLSA02 struct { + StatementHeader + Predicate slsa02.ProvenancePredicate `json:"predicate"` +} + +// ProvenanceStatementSLSA1 is the definition for an entire provenance statement with SLSA 1.0 predicate. +type ProvenanceStatementSLSA1 struct { + StatementHeader + Predicate slsa1.ProvenancePredicate `json:"predicate"` +} + +// ProvenanceStatement is the definition for an entire provenance statement with SLSA 0.2 predicate. +// Deprecated: Only version-specific provenance structs will be maintained (ProvenanceStatementSLSA01, ProvenanceStatementSLSA02). +type ProvenanceStatement struct { + StatementHeader + Predicate slsa02.ProvenancePredicate `json:"predicate"` +} + +// LinkStatement is the definition for an entire link statement. +type LinkStatement struct { + StatementHeader + Predicate Link `json:"predicate"` +} + +/* +SPDXStatement is the definition for an entire SPDX statement. +This is currently not implemented. Some tooling exists here: +https://github.com/spdx/tools-golang, but this software is still in +early state. +This struct is the same as the generic Statement struct but is added for +completeness +*/ +type SPDXStatement struct { + StatementHeader + Predicate interface{} `json:"predicate"` +} + +/* +CycloneDXStatement defines a cyclonedx sbom in the predicate. It is not +currently serialized just as its SPDX counterpart. It is an empty +interface, like the generic Statement. +*/ +type CycloneDXStatement struct { + StatementHeader + Predicate interface{} `json:"predicate"` +} diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/envelope.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/envelope.go new file mode 100644 index 000000000000..2c8afff1f75a --- /dev/null +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/envelope.go @@ -0,0 +1,166 @@ +package in_toto + +import ( + "context" + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "os" + + "github.com/secure-systems-lab/go-securesystemslib/cjson" + "github.com/secure-systems-lab/go-securesystemslib/dsse" + "github.com/secure-systems-lab/go-securesystemslib/signerverifier" +) + +// PayloadType is the payload type used for links and layouts. +const PayloadType = "application/vnd.in-toto+json" + +// ErrInvalidPayloadType indicates that the envelope used an unknown payload type +var ErrInvalidPayloadType = errors.New("unknown payload type") + +type Envelope struct { + envelope *dsse.Envelope + payload any +} + +func loadEnvelope(env *dsse.Envelope) (*Envelope, error) { + e := &Envelope{envelope: env} + + contentBytes, err := env.DecodeB64Payload() + if err != nil { + return nil, err + } + + payload, err := loadPayload(contentBytes) + if err != nil { + return nil, err + } + e.payload = payload + + return e, nil +} + +func (e *Envelope) SetPayload(payload any) error { + encodedBytes, err := cjson.EncodeCanonical(payload) + if err != nil { + return err + } + + e.payload = payload + e.envelope = &dsse.Envelope{ + Payload: base64.StdEncoding.EncodeToString(encodedBytes), + PayloadType: PayloadType, + } + + return nil +} + +func (e *Envelope) GetPayload() any { + return e.payload +} + +func (e *Envelope) VerifySignature(key Key) error { + verifier, err := getSignerVerifierFromKey(key) + if err != nil { + return err + } + + ev, err := dsse.NewEnvelopeVerifier(verifier) + if err != nil { + return err + } + + _, err = ev.Verify(context.Background(), e.envelope) + return err +} + +func (e *Envelope) Sign(key Key) error { + signer, err := getSignerVerifierFromKey(key) + if err != nil { + return err + } + + es, err := dsse.NewEnvelopeSigner(signer) + if err != nil { + return err + } + + payload, err := e.envelope.DecodeB64Payload() + if err != nil { + return err + } + + env, err := es.SignPayload(context.Background(), e.envelope.PayloadType, payload) + if err != nil { + return err + } + + e.envelope = env + return nil +} + +func (e *Envelope) Sigs() []Signature { + sigs := []Signature{} + for _, s := range e.envelope.Signatures { + sigs = append(sigs, Signature{ + KeyID: s.KeyID, + Sig: s.Sig, + }) + } + return sigs +} + +func (e *Envelope) GetSignatureForKeyID(keyID string) (Signature, error) { + for _, s := range e.Sigs() { + if s.KeyID == keyID { + return s, nil + } + } + + return Signature{}, fmt.Errorf("no signature found for key '%s'", keyID) +} + +func (e *Envelope) Dump(path string) error { + jsonBytes, err := json.MarshalIndent(e.envelope, "", " ") + if err != nil { + return err + } + + // Write JSON bytes to the passed path with permissions (-rw-r--r--) + err = os.WriteFile(path, jsonBytes, 0644) + if err != nil { + return err + } + + return nil +} + +func getSignerVerifierFromKey(key Key) (dsse.SignerVerifier, error) { + sslibKey := getSSLibKeyFromKey(key) + + switch sslibKey.KeyType { + case signerverifier.RSAKeyType: + return signerverifier.NewRSAPSSSignerVerifierFromSSLibKey(&sslibKey) + case signerverifier.ED25519KeyType: + return signerverifier.NewED25519SignerVerifierFromSSLibKey(&sslibKey) + case signerverifier.ECDSAKeyType: + return signerverifier.NewECDSASignerVerifierFromSSLibKey(&sslibKey) + } + + return nil, ErrUnsupportedKeyType +} + +func getSSLibKeyFromKey(key Key) signerverifier.SSLibKey { + return signerverifier.SSLibKey{ + KeyType: key.KeyType, + KeyIDHashAlgorithms: key.KeyIDHashAlgorithms, + KeyID: key.KeyID, + Scheme: key.Scheme, + KeyVal: signerverifier.KeyVal{ + Public: key.KeyVal.Public, + Private: key.KeyVal.Private, + Certificate: key.KeyVal.Certificate, + }, + } +} diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/keylib.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/keylib.go index 7de482821ad4..52429ca44bee 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/keylib.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/keylib.go @@ -13,7 +13,6 @@ import ( "errors" "fmt" "io" - "io/ioutil" "os" "strings" @@ -325,7 +324,7 @@ func (k *Key) LoadKeyReader(r io.Reader, scheme string, KeyIDHashAlgorithms []st return ErrNoPEMBlock } // Read key bytes - pemBytes, err := ioutil.ReadAll(r) + pemBytes, err := io.ReadAll(r) if err != nil { return err } @@ -344,7 +343,7 @@ func (k *Key) LoadKeyReaderDefaults(r io.Reader) error { return ErrNoPEMBlock } // Read key bytes - pemBytes, err := ioutil.ReadAll(r) + pemBytes, err := io.ReadAll(r) if err != nil { return err } @@ -366,7 +365,7 @@ func (k *Key) LoadKeyReaderDefaults(r io.Reader) error { func getDefaultKeyScheme(key interface{}) (scheme string, keyIDHashAlgorithms []string, err error) { keyIDHashAlgorithms = []string{"sha256", "sha512"} - switch key.(type) { + switch k := key.(type) { case *rsa.PublicKey, *rsa.PrivateKey: scheme = rsassapsssha256Scheme case ed25519.PrivateKey, ed25519.PublicKey: @@ -374,7 +373,7 @@ func getDefaultKeyScheme(key interface{}) (scheme string, keyIDHashAlgorithms [] case *ecdsa.PrivateKey, *ecdsa.PublicKey: scheme = ecdsaSha2nistp256 case *x509.Certificate: - return getDefaultKeyScheme(key.(*x509.Certificate).PublicKey) + return getDefaultKeyScheme(k.PublicKey) default: err = ErrUnsupportedKeyType } @@ -382,11 +381,10 @@ func getDefaultKeyScheme(key interface{}) (scheme string, keyIDHashAlgorithms [] return scheme, keyIDHashAlgorithms, err } -func (k *Key) loadKey(key interface{}, pemData *pem.Block, scheme string, keyIDHashAlgorithms []string) error { - - switch key.(type) { +func (k *Key) loadKey(keyObj interface{}, pemData *pem.Block, scheme string, keyIDHashAlgorithms []string) error { + switch key := keyObj.(type) { case *rsa.PublicKey: - pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.(*rsa.PublicKey)) + pubKeyBytes, err := x509.MarshalPKIXPublicKey(key) if err != nil { return err } @@ -396,7 +394,7 @@ func (k *Key) loadKey(key interface{}, pemData *pem.Block, scheme string, keyIDH case *rsa.PrivateKey: // Note: RSA Public Keys will get stored as X.509 SubjectPublicKeyInfo (RFC5280) // This behavior is consistent to the securesystemslib - pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.(*rsa.PrivateKey).Public()) + pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.Public()) if err != nil { return err } @@ -404,16 +402,16 @@ func (k *Key) loadKey(key interface{}, pemData *pem.Block, scheme string, keyIDH return err } case ed25519.PublicKey: - if err := k.setKeyComponents(key.(ed25519.PublicKey), []byte{}, ed25519KeyType, scheme, keyIDHashAlgorithms); err != nil { + if err := k.setKeyComponents(key, []byte{}, ed25519KeyType, scheme, keyIDHashAlgorithms); err != nil { return err } case ed25519.PrivateKey: - pubKeyBytes := key.(ed25519.PrivateKey).Public() - if err := k.setKeyComponents(pubKeyBytes.(ed25519.PublicKey), key.(ed25519.PrivateKey), ed25519KeyType, scheme, keyIDHashAlgorithms); err != nil { + pubKeyBytes := key.Public() + if err := k.setKeyComponents(pubKeyBytes.(ed25519.PublicKey), key, ed25519KeyType, scheme, keyIDHashAlgorithms); err != nil { return err } case *ecdsa.PrivateKey: - pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.(*ecdsa.PrivateKey).Public()) + pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.Public()) if err != nil { return err } @@ -421,7 +419,7 @@ func (k *Key) loadKey(key interface{}, pemData *pem.Block, scheme string, keyIDH return err } case *ecdsa.PublicKey: - pubKeyBytes, err := x509.MarshalPKIXPublicKey(key.(*ecdsa.PublicKey)) + pubKeyBytes, err := x509.MarshalPKIXPublicKey(key) if err != nil { return err } @@ -429,7 +427,7 @@ func (k *Key) loadKey(key interface{}, pemData *pem.Block, scheme string, keyIDH return err } case *x509.Certificate: - err := k.loadKey(key.(*x509.Certificate).PublicKey, pemData, scheme, keyIDHashAlgorithms) + err := k.loadKey(key.PublicKey, pemData, scheme, keyIDHashAlgorithms) if err != nil { return err } diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/model.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/model.go index e22b79da320e..f56b784ea0c0 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/model.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/model.go @@ -7,7 +7,6 @@ import ( "encoding/json" "errors" "fmt" - "io/ioutil" "os" "reflect" "regexp" @@ -15,10 +14,6 @@ import ( "strings" "time" - "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" - slsa01 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.1" - slsa02 "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2" - "github.com/secure-systems-lab/go-securesystemslib/cjson" "github.com/secure-systems-lab/go-securesystemslib/dsse" ) @@ -30,7 +25,7 @@ and private keys in PEM format stored as strings. For public keys the Private field may be an empty string. */ type KeyVal struct { - Private string `json:"private"` + Private string `json:"private,omitempty"` Public string `json:"public"` Certificate string `json:"certificate,omitempty"` } @@ -48,9 +43,6 @@ type Key struct { Scheme string `json:"scheme"` } -// PayloadType is the payload type used for links and layouts. -const PayloadType = "application/vnd.in-toto+json" - // ErrEmptyKeyField will be thrown if a field in our Key struct is empty. var ErrEmptyKeyField = errors.New("empty field in key") @@ -73,23 +65,6 @@ var ErrNoPublicKey = errors.New("the given key is not a public key") // for example: curve size = "521" and scheme = "ecdsa-sha2-nistp224" var ErrCurveSizeSchemeMismatch = errors.New("the scheme does not match the curve size") -const ( - // StatementInTotoV01 is the statement type for the generalized link format - // containing statements. This is constant for all predicate types. - StatementInTotoV01 = "https://in-toto.io/Statement/v0.1" - // PredicateSPDX represents a SBOM using the SPDX standard. - // The SPDX mandates 'spdxVersion' field, so predicate type can omit - // version. - PredicateSPDX = "https://spdx.dev/Document" - // PredicateCycloneDX represents a CycloneDX SBOM - PredicateCycloneDX = "https://cyclonedx.org/bom" - // PredicateLinkV1 represents an in-toto 0.9 link. - PredicateLinkV1 = "https://in-toto.io/Link/v1" -) - -// ErrInvalidPayloadType indicates that the envelope used an unkown payload type -var ErrInvalidPayloadType = errors.New("unknown payload type") - /* matchEcdsaScheme checks if the scheme suffix, matches the ecdsa key curve size. We do not need a full regex match here, because @@ -702,6 +677,67 @@ func validateLayout(layout Layout) error { return nil } +type Metadata interface { + Sign(Key) error + VerifySignature(Key) error + GetPayload() any + Sigs() []Signature + GetSignatureForKeyID(string) (Signature, error) + Dump(string) error +} + +func LoadMetadata(path string) (Metadata, error) { + jsonBytes, err := os.ReadFile(path) + if err != nil { + return nil, err + } + + var rawData map[string]*json.RawMessage + if err := json.Unmarshal(jsonBytes, &rawData); err != nil { + return nil, err + } + + if _, ok := rawData["payloadType"]; ok { + dsseEnv := &dsse.Envelope{} + if rawData["payload"] == nil || rawData["signatures"] == nil { + return nil, fmt.Errorf("in-toto metadata envelope requires 'payload' and 'signatures' parts") + } + + if err := json.Unmarshal(jsonBytes, dsseEnv); err != nil { + return nil, err + } + + if dsseEnv.PayloadType != PayloadType { + return nil, ErrInvalidPayloadType + } + + return loadEnvelope(dsseEnv) + } + + mb := &Metablock{} + + // Error out on missing `signed` or `signatures` field or if + // one of them has a `null` value, which would lead to a nil pointer + // dereference in Unmarshal below. + if rawData["signed"] == nil || rawData["signatures"] == nil { + return nil, fmt.Errorf("in-toto metadata requires 'signed' and 'signatures' parts") + } + + // Fully unmarshal signatures part + if err := json.Unmarshal(*rawData["signatures"], &mb.Signatures); err != nil { + return nil, err + } + + payload, err := loadPayload(*rawData["signed"]) + if err != nil { + return nil, err + } + + mb.Signed = payload + + return mb, nil +} + /* Metablock is a generic container for signable in-toto objects such as Layout or Link. It has two fields, one that contains the signable object and one that @@ -767,17 +803,13 @@ func checkRequiredJSONFields(obj map[string]interface{}, Load parses JSON formatted metadata at the passed path into the Metablock object on which it was called. It returns an error if it cannot parse a valid JSON formatted Metablock that contains a Link or Layout. + +Deprecated: Use LoadMetadata for a signature wrapper agnostic way to load an +envelope. */ func (mb *Metablock) Load(path string) error { - // Open file and close before returning - jsonFile, err := os.Open(path) - if err != nil { - return err - } - defer jsonFile.Close() - // Read entire file - jsonBytes, err := ioutil.ReadAll(jsonFile) + jsonBytes, err := os.ReadFile(path) if err != nil { return err } @@ -803,54 +835,14 @@ func (mb *Metablock) Load(path string) error { return err } - // Temporarily copy signed to opaque map to inspect the `_type` of signed - // and create link or layout accordingly - var signed map[string]interface{} - if err := json.Unmarshal(*rawMb["signed"], &signed); err != nil { + payload, err := loadPayload(*rawMb["signed"]) + if err != nil { return err } - if signed["_type"] == "link" { - var link Link - if err := checkRequiredJSONFields(signed, reflect.TypeOf(link)); err != nil { - return err - } - - data, err := rawMb["signed"].MarshalJSON() - if err != nil { - return err - } - decoder := json.NewDecoder(strings.NewReader(string(data))) - decoder.DisallowUnknownFields() - if err := decoder.Decode(&link); err != nil { - return err - } - mb.Signed = link - - } else if signed["_type"] == "layout" { - var layout Layout - if err := checkRequiredJSONFields(signed, reflect.TypeOf(layout)); err != nil { - return err - } - - data, err := rawMb["signed"].MarshalJSON() - if err != nil { - return err - } - decoder := json.NewDecoder(strings.NewReader(string(data))) - decoder.DisallowUnknownFields() - if err := decoder.Decode(&layout); err != nil { - return err - } - - mb.Signed = layout - - } else { - return fmt.Errorf("the '_type' field of the 'signed' part of in-toto" + - " metadata must be one of 'link' or 'layout'") - } + mb.Signed = payload - return jsonFile.Close() + return nil } /* @@ -866,7 +858,7 @@ func (mb *Metablock) Dump(path string) error { } // Write JSON bytes to the passed path with permissions (-rw-r--r--) - err = ioutil.WriteFile(path, jsonBytes, 0644) + err = os.WriteFile(path, jsonBytes, 0644) if err != nil { return err } @@ -883,6 +875,14 @@ func (mb *Metablock) GetSignableRepresentation() ([]byte, error) { return cjson.EncodeCanonical(mb.Signed) } +func (mb *Metablock) GetPayload() any { + return mb.Signed +} + +func (mb *Metablock) Sigs() []Signature { + return mb.Signatures +} + /* VerifySignature verifies the first signature, corresponding to the passed Key, that it finds in the Signatures field of the Metablock on which it was called. @@ -965,109 +965,3 @@ func (mb *Metablock) Sign(key Key) error { mb.Signatures = append(mb.Signatures, newSignature) return nil } - -// Subject describes the set of software artifacts the statement applies to. -type Subject struct { - Name string `json:"name"` - Digest common.DigestSet `json:"digest"` -} - -// StatementHeader defines the common fields for all statements -type StatementHeader struct { - Type string `json:"_type"` - PredicateType string `json:"predicateType"` - Subject []Subject `json:"subject"` -} - -/* -Statement binds the attestation to a particular subject and identifies the -of the predicate. This struct represents a generic statement. -*/ -type Statement struct { - StatementHeader - // Predicate contains type speficic metadata. - Predicate interface{} `json:"predicate"` -} - -// ProvenanceStatementSLSA01 is the definition for an entire provenance statement with SLSA 0.1 predicate. -type ProvenanceStatementSLSA01 struct { - StatementHeader - Predicate slsa01.ProvenancePredicate `json:"predicate"` -} - -// ProvenanceStatementSLSA02 is the definition for an entire provenance statement with SLSA 0.2 predicate. -type ProvenanceStatementSLSA02 struct { - StatementHeader - Predicate slsa02.ProvenancePredicate `json:"predicate"` -} - -// ProvenanceStatement is the definition for an entire provenance statement with SLSA 0.2 predicate. -// Deprecated: Only version-specific provenance structs will be maintained (ProvenanceStatementSLSA01, ProvenanceStatementSLSA02). -type ProvenanceStatement struct { - StatementHeader - Predicate slsa02.ProvenancePredicate `json:"predicate"` -} - -// LinkStatement is the definition for an entire link statement. -type LinkStatement struct { - StatementHeader - Predicate Link `json:"predicate"` -} - -/* -SPDXStatement is the definition for an entire SPDX statement. -This is currently not implemented. Some tooling exists here: -https://github.com/spdx/tools-golang, but this software is still in -early state. -This struct is the same as the generic Statement struct but is added for -completeness -*/ -type SPDXStatement struct { - StatementHeader - Predicate interface{} `json:"predicate"` -} - -/* -CycloneDXStatement defines a cyclonedx sbom in the predicate. It is not -currently serialized just as its SPDX counterpart. It is an empty -interface, like the generic Statement. -*/ -type CycloneDXStatement struct { - StatementHeader - Predicate interface{} `json:"predicate"` -} - -/* -DSSESigner provides signature generation and validation based on the SSL -Signing Spec: https://github.com/secure-systems-lab/signing-spec -as describe by: https://github.com/MarkLodato/ITE/tree/media-type/ITE/5 -It wraps the generic SSL envelope signer and enforces the correct payload -type both during signature generation and validation. -*/ -type DSSESigner struct { - signer *dsse.EnvelopeSigner -} - -func NewDSSESigner(p ...dsse.SignVerifier) (*DSSESigner, error) { - es, err := dsse.NewEnvelopeSigner(p...) - if err != nil { - return nil, err - } - - return &DSSESigner{ - signer: es, - }, nil -} - -func (s *DSSESigner) SignPayload(body []byte) (*dsse.Envelope, error) { - return s.signer.SignPayload(PayloadType, body) -} - -func (s *DSSESigner) Verify(e *dsse.Envelope) error { - if e.PayloadType != PayloadType { - return ErrInvalidPayloadType - } - - _, err := s.signer.Verify(e) - return err -} diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/runlib.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/runlib.go index 87e690507011..f0a55d821995 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/runlib.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/runlib.go @@ -4,7 +4,7 @@ import ( "bytes" "errors" "fmt" - "io/ioutil" + "io" "os" "os/exec" "path/filepath" @@ -44,7 +44,7 @@ normalized to Unix-style line separators (LF) before hashing file contents. func RecordArtifact(path string, hashAlgorithms []string, lineNormalization bool) (map[string]interface{}, error) { supportedHashMappings := getHashMapping() // Read file from passed path - contents, err := ioutil.ReadFile(path) + contents, err := os.ReadFile(path) hashedContentsMap := make(map[string]interface{}) if err != nil { return nil, err @@ -92,10 +92,10 @@ the following format: If recording an artifact fails the first return value is nil and the second return value is the error. */ -func RecordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns []string, lStripPaths []string, lineNormalization bool) (evalArtifacts map[string]interface{}, err error) { +func RecordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns []string, lStripPaths []string, lineNormalization bool, followSymlinkDirs bool) (evalArtifacts map[string]interface{}, err error) { // Make sure to initialize a fresh hashset for every RecordArtifacts call visitedSymlinks = NewSet() - evalArtifacts, err = recordArtifacts(paths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) + evalArtifacts, err = recordArtifacts(paths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) // pass result and error through return evalArtifacts, err } @@ -118,7 +118,7 @@ the following format: If recording an artifact fails the first return value is nil and the second return value is the error. */ -func recordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns []string, lStripPaths []string, lineNormalization bool) (map[string]interface{}, error) { +func recordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns []string, lStripPaths []string, lineNormalization bool, followSymlinkDirs bool) (map[string]interface{}, error) { artifacts := make(map[string]interface{}) for _, path := range paths { err := filepath.Walk(path, @@ -160,18 +160,35 @@ func recordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns if err != nil { return err } + info, err := os.Stat(evalSym) + if err != nil { + return err + } + targetIsDir := false + if info.IsDir() { + if !followSymlinkDirs { + // We don't follow symlinked directories + return nil + } + targetIsDir = true + } // add symlink to visitedSymlinks set // this way, we know which link we have visited already // if we visit a symlink twice, we have detected a symlink cycle visitedSymlinks.Add(path) - // We recursively call RecordArtifacts() to follow + // We recursively call recordArtifacts() to follow // the new path. - evalArtifacts, evalErr := recordArtifacts([]string{evalSym}, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) + evalArtifacts, evalErr := recordArtifacts([]string{evalSym}, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) if evalErr != nil { return evalErr } for key, value := range evalArtifacts { - artifacts[key] = value + if targetIsDir { + symlinkPath := filepath.Join(path, strings.TrimPrefix(key, evalSym)) + artifacts[symlinkPath] = value + } else { + artifacts[path] = value + } } return nil } @@ -189,8 +206,7 @@ func recordArtifacts(paths []string, hashAlgorithms []string, gitignorePatterns } } // Check if path is unique - _, existingPath := artifacts[path] - if existingPath { + if _, exists := artifacts[path]; exists { return fmt.Errorf("left stripping has resulted in non unique dictionary key: %s", path) } artifacts[path] = artifact @@ -273,8 +289,8 @@ func RunCommand(cmdArgs []string, runDir string) (map[string]interface{}, error) } // TODO: duplicate stdout, stderr - stdout, _ := ioutil.ReadAll(stdoutPipe) - stderr, _ := ioutil.ReadAll(stderrPipe) + stdout, _ := io.ReadAll(stdoutPipe) + stderr, _ := io.ReadAll(stderrPipe) retVal := waitErrToExitCode(cmd.Wait()) @@ -293,14 +309,10 @@ and materials at the passed materialPaths. The returned link is wrapped in a Metablock object. If command execution or artifact recording fails the first return value is an empty Metablock and the second return value is the error. */ -func InTotoRun(name string, runDir string, materialPaths []string, productPaths []string, - cmdArgs []string, key Key, hashAlgorithms []string, gitignorePatterns []string, - lStripPaths []string, lineNormalization bool) (Metablock, error) { - var linkMb Metablock - - materials, err := RecordArtifacts(materialPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) +func InTotoRun(name string, runDir string, materialPaths []string, productPaths []string, cmdArgs []string, key Key, hashAlgorithms []string, gitignorePatterns []string, lStripPaths []string, lineNormalization bool, followSymlinkDirs bool, useDSSE bool) (Metadata, error) { + materials, err := RecordArtifacts(materialPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) if err != nil { - return linkMb, err + return nil, err } // make sure that we only run RunCommand if cmdArgs is not nil or empty @@ -308,16 +320,16 @@ func InTotoRun(name string, runDir string, materialPaths []string, productPaths if len(cmdArgs) != 0 { byProducts, err = RunCommand(cmdArgs, runDir) if err != nil { - return linkMb, err + return nil, err } } - products, err := RecordArtifacts(productPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) + products, err := RecordArtifacts(productPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) if err != nil { - return linkMb, err + return nil, err } - linkMb.Signed = Link{ + link := Link{ Type: "link", Name: name, Materials: materials, @@ -327,14 +339,25 @@ func InTotoRun(name string, runDir string, materialPaths []string, productPaths Environment: map[string]interface{}{}, } - linkMb.Signatures = []Signature{} - // We use a new feature from Go1.13 here, to check the key struct. - // IsZero() will return True, if the key hasn't been initialized + if useDSSE { + env := &Envelope{} + if err := env.SetPayload(link); err != nil { + return nil, err + } + + if !reflect.ValueOf(key).IsZero() { + if err := env.Sign(key); err != nil { + return nil, err + } + } + + return env, nil + } - // with other values than the default ones. + linkMb := &Metablock{Signed: link, Signatures: []Signature{}} if !reflect.ValueOf(key).IsZero() { if err := linkMb.Sign(key); err != nil { - return linkMb, err + return nil, err } } @@ -347,14 +370,13 @@ in order to provide evidence for supply chain steps that cannot be carries out by a single command. InTotoRecordStart collects the hashes of the materials before any commands are run, signs the unfinished link, and returns the link. */ -func InTotoRecordStart(name string, materialPaths []string, key Key, hashAlgorithms, gitignorePatterns []string, lStripPaths []string, lineNormalization bool) (Metablock, error) { - var linkMb Metablock - materials, err := RecordArtifacts(materialPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) +func InTotoRecordStart(name string, materialPaths []string, key Key, hashAlgorithms, gitignorePatterns []string, lStripPaths []string, lineNormalization bool, followSymlinkDirs bool, useDSSE bool) (Metadata, error) { + materials, err := RecordArtifacts(materialPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) if err != nil { - return linkMb, err + return nil, err } - linkMb.Signed = Link{ + link := Link{ Type: "link", Name: name, Materials: materials, @@ -364,9 +386,26 @@ func InTotoRecordStart(name string, materialPaths []string, key Key, hashAlgorit Environment: map[string]interface{}{}, } + if useDSSE { + env := &Envelope{} + if err := env.SetPayload(link); err != nil { + return nil, err + } + + if !reflect.ValueOf(key).IsZero() { + if err := env.Sign(key); err != nil { + return nil, err + } + } + + return env, nil + } + + linkMb := &Metablock{Signed: link, Signatures: []Signature{}} + linkMb.Signatures = []Signature{} if !reflect.ValueOf(key).IsZero() { if err := linkMb.Sign(key); err != nil { - return linkMb, err + return nil, err } } @@ -380,25 +419,39 @@ created by InTotoRecordStart and records the hashes of any products creted by commands run between InTotoRecordStart and InTotoRecordStop. The resultant finished link metablock is then signed by the provided key and returned. */ -func InTotoRecordStop(prelimLinkMb Metablock, productPaths []string, key Key, hashAlgorithms, gitignorePatterns []string, lStripPaths []string, lineNormalization bool) (Metablock, error) { - var linkMb Metablock - if err := prelimLinkMb.VerifySignature(key); err != nil { - return linkMb, err +func InTotoRecordStop(prelimLinkEnv Metadata, productPaths []string, key Key, hashAlgorithms, gitignorePatterns []string, lStripPaths []string, lineNormalization bool, followSymlinkDirs bool, useDSSE bool) (Metadata, error) { + if err := prelimLinkEnv.VerifySignature(key); err != nil { + return nil, err } - link, ok := prelimLinkMb.Signed.(Link) + link, ok := prelimLinkEnv.GetPayload().(Link) if !ok { - return linkMb, errors.New("invalid metadata block") + return nil, errors.New("invalid metadata block") } - products, err := RecordArtifacts(productPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization) + products, err := RecordArtifacts(productPaths, hashAlgorithms, gitignorePatterns, lStripPaths, lineNormalization, followSymlinkDirs) if err != nil { - return linkMb, err + return nil, err } link.Products = products - linkMb.Signed = link + if useDSSE { + env := &Envelope{} + if err := env.SetPayload(link); err != nil { + return nil, err + } + + if !reflect.ValueOf(key).IsZero() { + if err := env.Sign(key); err != nil { + return nil, err + } + } + + return env, nil + } + + linkMb := &Metablock{Signed: link, Signatures: []Signature{}} if !reflect.ValueOf(key).IsZero() { if err := linkMb.Sign(key); err != nil { return linkMb, err diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2/provenance.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2/provenance.go index 5fca7abb7326..40416e29a85a 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2/provenance.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2/provenance.go @@ -11,6 +11,13 @@ const ( PredicateSLSAProvenance = "https://slsa.dev/provenance/v0.2" ) +// These are type aliases to common to avoid backwards incompatible changes. +type ( + DigestSet = common.DigestSet + ProvenanceBuilder = common.ProvenanceBuilder + ProvenanceMaterial = common.ProvenanceMaterial +) + // ProvenancePredicate is the provenance predicate definition. type ProvenancePredicate struct { // Builder identifies the entity that executed the invocation, which is trusted to have diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1/provenance.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1/provenance.go new file mode 100644 index 000000000000..e849731dceb8 --- /dev/null +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1/provenance.go @@ -0,0 +1,151 @@ +package v1 + +import ( + "time" + + "github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common" +) + +const ( + // PredicateSLSAProvenance represents a build provenance for an artifact. + PredicateSLSAProvenance = "https://slsa.dev/provenance/v1" +) + +// ProvenancePredicate is the provenance predicate definition. +type ProvenancePredicate struct { + // The BuildDefinition describes all of the inputs to the build. The + // accuracy and completeness are implied by runDetails.builder.id. + // + // It SHOULD contain all the information necessary and sufficient to + // initialize the build and begin execution. + BuildDefinition ProvenanceBuildDefinition `json:"buildDefinition"` + + // Details specific to this particular execution of the build. + RunDetails ProvenanceRunDetails `json:"runDetails"` +} + +// ProvenanceBuildDefinition describes the inputs to the build. +type ProvenanceBuildDefinition struct { + // Identifies the template for how to perform the build and interpret the + // parameters and dependencies. + + // The URI SHOULD resolve to a human-readable specification that includes: + // overall description of the build type; schema for externalParameters and + // systemParameters; unambiguous instructions for how to initiate the build + // given this BuildDefinition, and a complete example. + BuildType string `json:"buildType"` + + // The parameters that are under external control, such as those set by a + // user or tenant of the build system. They MUST be complete at SLSA Build + // L3, meaning that that there is no additional mechanism for an external + // party to influence the build. (At lower SLSA Build levels, the + // completeness MAY be best effort.) + + // The build system SHOULD be designed to minimize the size and complexity + // of externalParameters, in order to reduce fragility and ease + // verification. Consumers SHOULD have an expectation of what “good” looks + // like; the more information that they need to check, the harder that task + // becomes. + ExternalParameters interface{} `json:"externalParameters"` + + // The parameters that are under the control of the entity represented by + // builder.id. The primary intention of this field is for debugging, + // incident response, and vulnerability management. The values here MAY be + // necessary for reproducing the build. There is no need to verify these + // parameters because the build system is already trusted, and in many cases + // it is not practical to do so. + InternalParameters interface{} `json:"internalParameters,omitempty"` + + // Unordered collection of artifacts needed at build time. Completeness is + // best effort, at least through SLSA Build L3. For example, if the build + // script fetches and executes “example.com/foo.sh”, which in turn fetches + // “example.com/bar.tar.gz”, then both “foo.sh” and “bar.tar.gz” SHOULD be + // listed here. + ResolvedDependencies []ResourceDescriptor `json:"resolvedDependencies,omitempty"` +} + +// ProvenanceRunDetails includes details specific to a particular execution of a +// build. +type ProvenanceRunDetails struct { + // Identifies the entity that executed the invocation, which is trusted to + // have correctly performed the operation and populated this provenance. + // + // This field is REQUIRED for SLSA Build 1 unless id is implicit from the + // attestation envelope. + Builder Builder `json:"builder"` + + // Metadata about this particular execution of the build. + BuildMetadata BuildMetadata `json:"metadata,omitempty"` + + // Additional artifacts generated during the build that are not considered + // the “output” of the build but that might be needed during debugging or + // incident response. For example, this might reference logs generated + // during the build and/or a digest of the fully evaluated build + // configuration. + // + // In most cases, this SHOULD NOT contain all intermediate files generated + // during the build. Instead, this SHOULD only contain files that are + // likely to be useful later and that cannot be easily reproduced. + Byproducts []ResourceDescriptor `json:"byproducts,omitempty"` +} + +// ResourceDescriptor describes a particular software artifact or resource +// (mutable or immutable). +// See https://github.com/in-toto/attestation/blob/main/spec/v1.0/resource_descriptor.md +type ResourceDescriptor struct { + // A URI used to identify the resource or artifact globally. This field is + // REQUIRED unless either digest or content is set. + URI string `json:"uri,omitempty"` + + // A set of cryptographic digests of the contents of the resource or + // artifact. This field is REQUIRED unless either uri or content is set. + Digest common.DigestSet `json:"digest,omitempty"` + + // TMachine-readable identifier for distinguishing between descriptors. + Name string `json:"name,omitempty"` + + // The location of the described resource or artifact, if different from the + // uri. + DownloadLocation string `json:"downloadLocation,omitempty"` + + // The MIME Type (i.e., media type) of the described resource or artifact. + MediaType string `json:"mediaType,omitempty"` + + // The contents of the resource or artifact. This field is REQUIRED unless + // either uri or digest is set. + Content []byte `json:"content,omitempty"` + + // This field MAY be used to provide additional information or metadata + // about the resource or artifact that may be useful to the consumer when + // evaluating the attestation against a policy. + Annotations map[string]interface{} `json:"annotations,omitempty"` +} + +// Builder represents the transitive closure of all the entities that are, by +// necessity, trusted to faithfully run the build and record the provenance. +type Builder struct { + // URI indicating the transitive closure of the trusted builder. + ID string `json:"id"` + + // Version numbers of components of the builder. + Version map[string]string `json:"version,omitempty"` + + // Dependencies used by the orchestrator that are not run within the + // workload and that do not affect the build, but might affect the + // provenance generation or security guarantees. + BuilderDependencies []ResourceDescriptor `json:"builderDependencies,omitempty"` +} + +type BuildMetadata struct { + // Identifies this particular build invocation, which can be useful for + // finding associated logs or other ad-hoc analysis. The exact meaning and + // format is defined by builder.id; by default it is treated as opaque and + // case-sensitive. The value SHOULD be globally unique. + InvocationID string `json:"invocationID,omitempty"` + + // The timestamp of when the build started. + StartedOn *time.Time `json:"startedOn,omitempty"` + + // The timestamp of when the build completed. + FinishedOn *time.Time `json:"finishedOn,omitempty"` +} diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/util.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/util.go index 59cba86eb52c..5c36dede13d1 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/util.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/util.go @@ -1,9 +1,15 @@ package in_toto import ( + "encoding/json" + "errors" "fmt" + "reflect" + "strings" ) +var ErrUnknownMetadataType = errors.New("unknown metadata type encountered: not link or layout") + /* Set represents a data structure for set operations. See `NewSet` for how to create a Set, and available Set receivers for useful set operations. @@ -145,3 +151,40 @@ func (s Set) IsSubSet(subset Set) bool { } return true } + +func loadPayload(payloadBytes []byte) (any, error) { + var payload map[string]any + if err := json.Unmarshal(payloadBytes, &payload); err != nil { + return nil, fmt.Errorf("error decoding payload: %w", err) + } + + if payload["_type"] == "link" { + var link Link + if err := checkRequiredJSONFields(payload, reflect.TypeOf(link)); err != nil { + return nil, fmt.Errorf("error decoding payload: %w", err) + } + + decoder := json.NewDecoder(strings.NewReader(string(payloadBytes))) + decoder.DisallowUnknownFields() + if err := decoder.Decode(&link); err != nil { + return nil, fmt.Errorf("error decoding payload: %w", err) + } + + return link, nil + } else if payload["_type"] == "layout" { + var layout Layout + if err := checkRequiredJSONFields(payload, reflect.TypeOf(layout)); err != nil { + return nil, fmt.Errorf("error decoding payload: %w", err) + } + + decoder := json.NewDecoder(strings.NewReader(string(payloadBytes))) + decoder.DisallowUnknownFields() + if err := decoder.Decode(&layout); err != nil { + return nil, fmt.Errorf("error decoding payload: %w", err) + } + + return layout, nil + } + + return nil, ErrUnknownMetadataType +} diff --git a/vendor/github.com/in-toto/in-toto-golang/in_toto/verifylib.go b/vendor/github.com/in-toto/in-toto-golang/in_toto/verifylib.go index 2302040f4600..2564bd47eb2b 100644 --- a/vendor/github.com/in-toto/in-toto-golang/in_toto/verifylib.go +++ b/vendor/github.com/in-toto/in-toto-golang/in_toto/verifylib.go @@ -12,7 +12,6 @@ import ( "io" "os" "path" - osPath "path" "path/filepath" "reflect" "regexp" @@ -23,6 +22,8 @@ import ( // ErrInspectionRunDirIsSymlink gets thrown if the runDir is a symlink var ErrInspectionRunDirIsSymlink = errors.New("runDir is a symlink. This is a security risk") +var ErrNotLayout = errors.New("verification workflow passed a non-layout") + /* RunInspections iteratively executes the command in the Run field of all inspections of the passed layout, creating unsigned link metadata that records @@ -41,8 +42,8 @@ If executing the inspection command fails, or if the executed command has a non-zero exit code, the first return value is an empty Metablock map and the second return value is the error. */ -func RunInspections(layout Layout, runDir string, lineNormalization bool) (map[string]Metablock, error) { - inspectionMetadata := make(map[string]Metablock) +func RunInspections(layout Layout, runDir string, lineNormalization bool, useDSSE bool) (map[string]Metadata, error) { + inspectionMetadata := make(map[string]Metadata) for _, inspection := range layout.Inspect { @@ -51,14 +52,14 @@ func RunInspections(layout Layout, runDir string, lineNormalization bool) (map[s paths = []string{runDir} } - linkMb, err := InTotoRun(inspection.Name, runDir, paths, paths, - inspection.Run, Key{}, []string{"sha256"}, nil, nil, lineNormalization) + linkEnv, err := InTotoRun(inspection.Name, runDir, paths, paths, + inspection.Run, Key{}, []string{"sha256"}, nil, nil, lineNormalization, false, useDSSE) if err != nil { return nil, err } - retVal := linkMb.Signed.(Link).ByProducts["return-value"] + retVal := linkEnv.GetPayload().(Link).ByProducts["return-value"] if retVal != float64(0) { return nil, fmt.Errorf("inspection command '%s' of inspection '%s'"+ " returned a non-zero value: %d", inspection.Run, inspection.Name, @@ -67,11 +68,11 @@ func RunInspections(layout Layout, runDir string, lineNormalization bool) (map[s // Dump inspection link to cwd using the short link name format linkName := fmt.Sprintf(LinkNameFormatShort, inspection.Name) - if err := linkMb.Dump(linkName); err != nil { + if err := linkEnv.Dump(linkName); err != nil { fmt.Printf("JSON serialization or writing failed: %s", err) } - inspectionMetadata[inspection.Name] = linkMb + inspectionMetadata[inspection.Name] = linkEnv } return inspectionMetadata, nil } @@ -80,10 +81,10 @@ func RunInspections(layout Layout, runDir string, lineNormalization bool) (map[s // type MATCH. See VerifyArtifacts for more details. func verifyMatchRule(ruleData map[string]string, srcArtifacts map[string]interface{}, srcArtifactQueue Set, - itemsMetadata map[string]Metablock) Set { + itemsMetadata map[string]Metadata) Set { consumed := NewSet() // Get destination link metadata - dstLinkMb, exists := itemsMetadata[ruleData["dstName"]] + dstLinkEnv, exists := itemsMetadata[ruleData["dstName"]] if !exists { // Destination link does not exist, rule can't consume any // artifacts @@ -94,9 +95,9 @@ func verifyMatchRule(ruleData map[string]string, var dstArtifacts map[string]interface{} switch ruleData["dstType"] { case "materials": - dstArtifacts = dstLinkMb.Signed.(Link).Materials + dstArtifacts = dstLinkEnv.GetPayload().(Link).Materials case "products": - dstArtifacts = dstLinkMb.Signed.(Link).Products + dstArtifacts = dstLinkEnv.GetPayload().(Link).Products } // cleanup paths in pattern and artifact maps @@ -140,7 +141,7 @@ func verifyMatchRule(ruleData map[string]string, // Construct corresponding destination artifact path, i.e. // an optional destination prefix plus the source base path - dstPath := path.Clean(osPath.Join(ruleData["dstPrefix"], srcBasePath)) + dstPath := path.Clean(path.Join(ruleData["dstPrefix"], srcBasePath)) // Try to find the corresponding destination artifact dstArtifact, exists := dstArtifacts[dstPath] @@ -180,7 +181,7 @@ DISALLOW rule to fail overall verification, if artifacts are left in the queue that should have been consumed by preceding rules. */ func VerifyArtifacts(items []interface{}, - itemsMetadata map[string]Metablock) error { + itemsMetadata map[string]Metadata) error { // Verify artifact rules for each item in the layout for _, itemI := range items { // The layout item (interface) must be a Link or an Inspection we are only @@ -207,7 +208,7 @@ func VerifyArtifacts(items []interface{}, } // Use the item's name to extract the corresponding link - srcLinkMb, exists := itemsMetadata[itemName] + srcLinkEnv, exists := itemsMetadata[itemName] if !exists { return fmt.Errorf("VerifyArtifacts could not find metadata"+ " for item '%s', got: '%s'", itemName, itemsMetadata) @@ -215,8 +216,8 @@ func VerifyArtifacts(items []interface{}, // Create shortcuts to materials and products (including hashes) reported // by the item's link, required to verify "match" rules - materials := srcLinkMb.Signed.(Link).Materials - products := srcLinkMb.Signed.(Link).Products + materials := srcLinkEnv.GetPayload().(Link).Materials + products := srcLinkEnv.GetPayload().(Link).Products // All other rules only require the material or product paths (without // hashes). We extract them from the corresponding maps and store them as @@ -364,9 +365,9 @@ Products, the first return value is an empty Metablock map and the second return value is the error. */ func ReduceStepsMetadata(layout Layout, - stepsMetadata map[string]map[string]Metablock) (map[string]Metablock, + stepsMetadata map[string]map[string]Metadata) (map[string]Metadata, error) { - stepsMetadataReduced := make(map[string]Metablock) + stepsMetadataReduced := make(map[string]Metadata) for _, step := range layout.Steps { linksPerStep, ok := stepsMetadata[step.Name] @@ -379,16 +380,16 @@ func ReduceStepsMetadata(layout Layout, // Get the first link (could be any link) for the current step, which will // serve as reference link for below comparisons var referenceKeyID string - var referenceLinkMb Metablock - for keyID, linkMb := range linksPerStep { - referenceLinkMb = linkMb + var referenceLinkEnv Metadata + for keyID, linkEnv := range linksPerStep { + referenceLinkEnv = linkEnv referenceKeyID = keyID break } // Only one link, nothing to reduce, take the reference link if len(linksPerStep) == 1 { - stepsMetadataReduced[step.Name] = referenceLinkMb + stepsMetadataReduced[step.Name] = referenceLinkEnv // Multiple links, reduce but first check } else { @@ -396,11 +397,11 @@ func ReduceStepsMetadata(layout Layout, // TODO: What should we do if there are more links, than the // threshold requires, but not all of them are equal? Right now we would // also error. - for keyID, linkMb := range linksPerStep { - if !reflect.DeepEqual(linkMb.Signed.(Link).Materials, - referenceLinkMb.Signed.(Link).Materials) || - !reflect.DeepEqual(linkMb.Signed.(Link).Products, - referenceLinkMb.Signed.(Link).Products) { + for keyID, linkEnv := range linksPerStep { + if !reflect.DeepEqual(linkEnv.GetPayload().(Link).Materials, + referenceLinkEnv.GetPayload().(Link).Materials) || + !reflect.DeepEqual(linkEnv.GetPayload().(Link).Products, + referenceLinkEnv.GetPayload().(Link).Products) { return nil, fmt.Errorf("link '%s' and '%s' have different"+ " artifacts", fmt.Sprintf(LinkNameFormat, step.Name, referenceKeyID), @@ -408,7 +409,7 @@ func ReduceStepsMetadata(layout Layout, } } // We haven't errored out, so we can reduce (i.e take the reference link) - stepsMetadataReduced[step.Name] = referenceLinkMb + stepsMetadataReduced[step.Name] = referenceLinkEnv } } return stepsMetadataReduced, nil @@ -421,7 +422,7 @@ command, as per the layout. Soft verification means that, in case a command does not align, a warning is issued. */ func VerifyStepCommandAlignment(layout Layout, - stepsMetadata map[string]map[string]Metablock) { + stepsMetadata map[string]map[string]Metadata) { for _, step := range layout.Steps { linksPerStep, ok := stepsMetadata[step.Name] // We should never get here, layout verification must fail earlier @@ -430,9 +431,9 @@ func VerifyStepCommandAlignment(layout Layout, "', no link metadata found.") } - for signerKeyID, linkMb := range linksPerStep { + for signerKeyID, linkEnv := range linksPerStep { expectedCommandS := strings.Join(step.ExpectedCommand, " ") - executedCommandS := strings.Join(linkMb.Signed.(Link).Command, " ") + executedCommandS := strings.Join(linkEnv.GetPayload().(Link).Command, " ") if expectedCommandS != executedCommandS { linkName := fmt.Sprintf(LinkNameFormat, step.Name, signerKeyID) @@ -502,11 +503,11 @@ return value is an empty map of Metablock maps and the second return value is the error. */ func VerifyLinkSignatureThesholds(layout Layout, - stepsMetadata map[string]map[string]Metablock, rootCertPool, intermediateCertPool *x509.CertPool) ( - map[string]map[string]Metablock, error) { + stepsMetadata map[string]map[string]Metadata, rootCertPool, intermediateCertPool *x509.CertPool) ( + map[string]map[string]Metadata, error) { // This will stores links with valid signature from an authorized functionary // for all steps - stepsMetadataVerified := make(map[string]map[string]Metablock) + stepsMetadataVerified := make(map[string]map[string]Metadata) // Try to find enough (>= threshold) links each with a valid signature from // distinct authorized functionaries for each step @@ -515,7 +516,7 @@ func VerifyLinkSignatureThesholds(layout Layout, // This will store links with valid signature from an authorized // functionary for the given step - linksPerStepVerified := make(map[string]Metablock) + linksPerStepVerified := make(map[string]Metadata) // Check if there are any links at all for the given step linksPerStep, ok := stepsMetadata[step.Name] @@ -528,12 +529,12 @@ func VerifyLinkSignatureThesholds(layout Layout, // verification passes. Only good links are stored, to verify thresholds // below. isAuthorizedSignature := false - for signerKeyID, linkMb := range linksPerStep { + for signerKeyID, linkEnv := range linksPerStep { for _, authorizedKeyID := range step.PubKeys { if signerKeyID == authorizedKeyID { if verifierKey, ok := layout.Keys[authorizedKeyID]; ok { - if err := linkMb.VerifySignature(verifierKey); err == nil { - linksPerStepVerified[signerKeyID] = linkMb + if err := linkEnv.VerifySignature(verifierKey); err == nil { + linksPerStepVerified[signerKeyID] = linkEnv isAuthorizedSignature = true break } @@ -544,7 +545,7 @@ func VerifyLinkSignatureThesholds(layout Layout, // If the signer's key wasn't in our step's pubkeys array, check the cert pool to // see if the key is known to us. if !isAuthorizedSignature { - sig, err := linkMb.GetSignatureForKeyID(signerKeyID) + sig, err := linkEnv.GetSignatureForKeyID(signerKeyID) if err != nil { stepErr = err continue @@ -563,13 +564,13 @@ func VerifyLinkSignatureThesholds(layout Layout, continue } - err = linkMb.VerifySignature(cert) + err = linkEnv.VerifySignature(cert) if err != nil { stepErr = err continue } - linksPerStepVerified[signerKeyID] = linkMb + linksPerStepVerified[signerKeyID] = linkEnv } } @@ -614,30 +615,30 @@ ignored. Only a preliminary threshold check is performed, that is, if there aren't at least Threshold links for any given step, the first return value is an empty map of Metablock maps and the second return value is the error. */ -func LoadLinksForLayout(layout Layout, linkDir string) (map[string]map[string]Metablock, error) { - stepsMetadata := make(map[string]map[string]Metablock) +func LoadLinksForLayout(layout Layout, linkDir string) (map[string]map[string]Metadata, error) { + stepsMetadata := make(map[string]map[string]Metadata) for _, step := range layout.Steps { - linksPerStep := make(map[string]Metablock) + linksPerStep := make(map[string]Metadata) // Since we can verify against certificates belonging to a CA, we need to // load any possible links - linkFiles, err := filepath.Glob(osPath.Join(linkDir, fmt.Sprintf(LinkGlobFormat, step.Name))) + linkFiles, err := filepath.Glob(path.Join(linkDir, fmt.Sprintf(LinkGlobFormat, step.Name))) if err != nil { return nil, err } for _, linkPath := range linkFiles { - var linkMb Metablock - if err := linkMb.Load(linkPath); err != nil { + linkEnv, err := LoadMetadata(linkPath) + if err != nil { continue } // To get the full key from the metadata's signatures, we have to check // for one with the same short id... signerShortKeyID := strings.TrimSuffix(strings.TrimPrefix(filepath.Base(linkPath), step.Name+"."), ".link") - for _, sig := range linkMb.Signatures { + for _, sig := range linkEnv.Sigs() { if strings.HasPrefix(sig.KeyID, signerShortKeyID) { - linksPerStep[sig.KeyID] = linkMb + linksPerStep[sig.KeyID] = linkEnv break } } @@ -677,14 +678,14 @@ Signatures and keys are associated by key id. If the key map is empty, or the Metablock's Signature field does not have a signature for one or more of the passed keys, or a matching signature is invalid, an error is returned. */ -func VerifyLayoutSignatures(layoutMb Metablock, +func VerifyLayoutSignatures(layoutEnv Metadata, layoutKeys map[string]Key) error { if len(layoutKeys) < 1 { return fmt.Errorf("layout verification requires at least one key") } for _, key := range layoutKeys { - if err := layoutMb.VerifySignature(key); err != nil { + if err := layoutEnv.VerifySignature(key); err != nil { return err } } @@ -700,29 +701,35 @@ NOTE: The assumption is that the steps mentioned in the layout are to be performed sequentially. So, the first step mentioned in the layout denotes what comes into the supply chain and the last step denotes what goes out. */ -func GetSummaryLink(layout Layout, stepsMetadataReduced map[string]Metablock, - stepName string) (Metablock, error) { +func GetSummaryLink(layout Layout, stepsMetadataReduced map[string]Metadata, + stepName string, useDSSE bool) (Metadata, error) { var summaryLink Link - var result Metablock if len(layout.Steps) > 0 { firstStepLink := stepsMetadataReduced[layout.Steps[0].Name] lastStepLink := stepsMetadataReduced[layout.Steps[len(layout.Steps)-1].Name] - summaryLink.Materials = firstStepLink.Signed.(Link).Materials + summaryLink.Materials = firstStepLink.GetPayload().(Link).Materials summaryLink.Name = stepName - summaryLink.Type = firstStepLink.Signed.(Link).Type + summaryLink.Type = firstStepLink.GetPayload().(Link).Type - summaryLink.Products = lastStepLink.Signed.(Link).Products - summaryLink.ByProducts = lastStepLink.Signed.(Link).ByProducts + summaryLink.Products = lastStepLink.GetPayload().(Link).Products + summaryLink.ByProducts = lastStepLink.GetPayload().(Link).ByProducts // Using the last command of the sublayout as the command // of the summary link can be misleading. Is it necessary to // include all the commands executed as part of sublayout? - summaryLink.Command = lastStepLink.Signed.(Link).Command + summaryLink.Command = lastStepLink.GetPayload().(Link).Command } - result.Signed = summaryLink + if useDSSE { + env := &Envelope{} + if err := env.SetPayload(summaryLink); err != nil { + return nil, err + } - return result, nil + return env, nil + } + + return &Metablock{Signed: summaryLink}, nil } /* @@ -731,11 +738,11 @@ so, recursively resolves it and replaces it with a summary link summarizing the steps carried out in the sublayout. */ func VerifySublayouts(layout Layout, - stepsMetadataVerified map[string]map[string]Metablock, - superLayoutLinkPath string, intermediatePems [][]byte, lineNormalization bool) (map[string]map[string]Metablock, error) { + stepsMetadataVerified map[string]map[string]Metadata, + superLayoutLinkPath string, intermediatePems [][]byte, lineNormalization bool) (map[string]map[string]Metadata, error) { for stepName, linkData := range stepsMetadataVerified { for keyID, metadata := range linkData { - if _, ok := metadata.Signed.(Layout); ok { + if _, ok := metadata.GetPayload().(Layout); ok { layoutKeys := make(map[string]Key) layoutKeys[keyID] = layout.Keys[keyID] @@ -861,55 +868,60 @@ Metablock object. NOTE: Artifact rules of type "create", "modify" and "delete" are currently not supported. */ -func InTotoVerify(layoutMb Metablock, layoutKeys map[string]Key, +func InTotoVerify(layoutEnv Metadata, layoutKeys map[string]Key, linkDir string, stepName string, parameterDictionary map[string]string, intermediatePems [][]byte, lineNormalization bool) ( - Metablock, error) { - - var summaryLink Metablock - var err error + Metadata, error) { // Verify root signatures - if err := VerifyLayoutSignatures(layoutMb, layoutKeys); err != nil { - return summaryLink, err + if err := VerifyLayoutSignatures(layoutEnv, layoutKeys); err != nil { + return nil, err + } + + useDSSE := false + if _, ok := layoutEnv.(*Envelope); ok { + useDSSE = true } - // Extract the layout from its Metablock container (for further processing) - layout := layoutMb.Signed.(Layout) + // Extract the layout from its Metadata container (for further processing) + layout, ok := layoutEnv.GetPayload().(Layout) + if !ok { + return nil, ErrNotLayout + } // Verify layout expiration if err := VerifyLayoutExpiration(layout); err != nil { - return summaryLink, err + return nil, err } // Substitute parameters in layout - layout, err = SubstituteParameters(layout, parameterDictionary) + layout, err := SubstituteParameters(layout, parameterDictionary) if err != nil { - return summaryLink, err + return nil, err } rootCertPool, intermediateCertPool, err := LoadLayoutCertificates(layout, intermediatePems) if err != nil { - return summaryLink, err + return nil, err } // Load links for layout stepsMetadata, err := LoadLinksForLayout(layout, linkDir) if err != nil { - return summaryLink, err + return nil, err } // Verify link signatures stepsMetadataVerified, err := VerifyLinkSignatureThesholds(layout, stepsMetadata, rootCertPool, intermediateCertPool) if err != nil { - return summaryLink, err + return nil, err } // Verify and resolve sublayouts stepsSublayoutVerified, err := VerifySublayouts(layout, stepsMetadataVerified, linkDir, intermediatePems, lineNormalization) if err != nil { - return summaryLink, err + return nil, err } // Verify command alignment (WARNING only) @@ -922,18 +934,18 @@ func InTotoVerify(layoutMb Metablock, layoutKeys map[string]Key, stepsMetadataReduced, err := ReduceStepsMetadata(layout, stepsSublayoutVerified) if err != nil { - return summaryLink, err + return nil, err } // Verify artifact rules if err = VerifyArtifacts(layout.stepsAsInterfaceSlice(), stepsMetadataReduced); err != nil { - return summaryLink, err + return nil, err } - inspectionMetadata, err := RunInspections(layout, "", lineNormalization) + inspectionMetadata, err := RunInspections(layout, "", lineNormalization, useDSSE) if err != nil { - return summaryLink, err + return nil, err } // Add steps metadata to inspection metadata, because inspection artifact @@ -944,51 +956,48 @@ func InTotoVerify(layoutMb Metablock, layoutKeys map[string]Key, if err = VerifyArtifacts(layout.inspectAsInterfaceSlice(), inspectionMetadata); err != nil { - return summaryLink, err + return nil, err } - summaryLink, err = GetSummaryLink(layout, stepsMetadataReduced, stepName) + summaryLink, err := GetSummaryLink(layout, stepsMetadataReduced, stepName, useDSSE) if err != nil { - return summaryLink, err + return nil, err } return summaryLink, nil } /* -InTotoVerifyWithDirectory provides the same functionality as IntotoVerify, but +InTotoVerifyWithDirectory provides the same functionality as InTotoVerify, but adds the possibility to select a local directory from where the inspections are run. */ -func InTotoVerifyWithDirectory(layoutMb Metablock, layoutKeys map[string]Key, +func InTotoVerifyWithDirectory(layoutEnv Metadata, layoutKeys map[string]Key, linkDir string, runDir string, stepName string, parameterDictionary map[string]string, intermediatePems [][]byte, lineNormalization bool) ( - Metablock, error) { - - var summaryLink Metablock - var err error + Metadata, error) { // runDir sanity checks // check if path exists info, err := os.Stat(runDir) if err != nil { - return Metablock{}, err + return nil, err } // check if runDir is a symlink if info.Mode()&os.ModeSymlink == os.ModeSymlink { - return Metablock{}, ErrInspectionRunDirIsSymlink + return nil, ErrInspectionRunDirIsSymlink } // check if runDir is writable and a directory err = isWritable(runDir) if err != nil { - return Metablock{}, err + return nil, err } // check if runDir is empty (we do not want to overwrite files) // We abuse File.Readdirnames for this action. f, err := os.Open(runDir) if err != nil { - return Metablock{}, err + return nil, err } defer f.Close() // We use Readdirnames(1) for performance reasons, one child node @@ -996,55 +1005,63 @@ func InTotoVerifyWithDirectory(layoutMb Metablock, layoutKeys map[string]Key, _, err = f.Readdirnames(1) // if io.EOF gets returned as error the directory is empty if err == io.EOF { - return Metablock{}, err + return nil, err } err = f.Close() if err != nil { - return Metablock{}, err + return nil, err } // Verify root signatures - if err := VerifyLayoutSignatures(layoutMb, layoutKeys); err != nil { - return summaryLink, err + if err := VerifyLayoutSignatures(layoutEnv, layoutKeys); err != nil { + return nil, err } - // Extract the layout from its Metablock container (for further processing) - layout := layoutMb.Signed.(Layout) + useDSSE := false + if _, ok := layoutEnv.(*Envelope); ok { + useDSSE = true + } + + // Extract the layout from its Metadata container (for further processing) + layout, ok := layoutEnv.GetPayload().(Layout) + if !ok { + return nil, ErrNotLayout + } // Verify layout expiration if err := VerifyLayoutExpiration(layout); err != nil { - return summaryLink, err + return nil, err } // Substitute parameters in layout layout, err = SubstituteParameters(layout, parameterDictionary) if err != nil { - return summaryLink, err + return nil, err } rootCertPool, intermediateCertPool, err := LoadLayoutCertificates(layout, intermediatePems) if err != nil { - return summaryLink, err + return nil, err } // Load links for layout stepsMetadata, err := LoadLinksForLayout(layout, linkDir) if err != nil { - return summaryLink, err + return nil, err } // Verify link signatures stepsMetadataVerified, err := VerifyLinkSignatureThesholds(layout, stepsMetadata, rootCertPool, intermediateCertPool) if err != nil { - return summaryLink, err + return nil, err } // Verify and resolve sublayouts stepsSublayoutVerified, err := VerifySublayouts(layout, stepsMetadataVerified, linkDir, intermediatePems, lineNormalization) if err != nil { - return summaryLink, err + return nil, err } // Verify command alignment (WARNING only) @@ -1057,18 +1074,18 @@ func InTotoVerifyWithDirectory(layoutMb Metablock, layoutKeys map[string]Key, stepsMetadataReduced, err := ReduceStepsMetadata(layout, stepsSublayoutVerified) if err != nil { - return summaryLink, err + return nil, err } // Verify artifact rules if err = VerifyArtifacts(layout.stepsAsInterfaceSlice(), stepsMetadataReduced); err != nil { - return summaryLink, err + return nil, err } - inspectionMetadata, err := RunInspections(layout, runDir, lineNormalization) + inspectionMetadata, err := RunInspections(layout, runDir, lineNormalization, useDSSE) if err != nil { - return summaryLink, err + return nil, err } // Add steps metadata to inspection metadata, because inspection artifact @@ -1079,12 +1096,12 @@ func InTotoVerifyWithDirectory(layoutMb Metablock, layoutKeys map[string]Key, if err = VerifyArtifacts(layout.inspectAsInterfaceSlice(), inspectionMetadata); err != nil { - return summaryLink, err + return nil, err } - summaryLink, err = GetSummaryLink(layout, stepsMetadataReduced, stepName) + summaryLink, err := GetSummaryLink(layout, stepsMetadataReduced, stepName, useDSSE) if err != nil { - return summaryLink, err + return nil, err } return summaryLink, nil diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/cjson/canonicaljson.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/cjson/canonicaljson.go index fb1d5918b282..abc860a491bf 100644 --- a/vendor/github.com/secure-systems-lab/go-securesystemslib/cjson/canonicaljson.go +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/cjson/canonicaljson.go @@ -6,8 +6,8 @@ import ( "errors" "fmt" "reflect" - "regexp" "sort" + "strings" ) /* @@ -18,8 +18,12 @@ escaping backslashes ("\") and double quotes (") and wrapping the resulting string in double quotes ("). */ func encodeCanonicalString(s string) string { - re := regexp.MustCompile(`([\"\\])`) - return fmt.Sprintf("\"%s\"", re.ReplaceAllString(s, "\\$1")) + // Escape backslashes + s = strings.ReplaceAll(s, "\\", "\\\\") + // Escape double quotes + s = strings.ReplaceAll(s, "\"", "\\\"") + // Wrap with double quotes + return fmt.Sprintf("\"%s\"", s) } /* @@ -28,16 +32,7 @@ object according to the OLPC canonical JSON specification (see http://wiki.laptop.org/go/Canonical_JSON) and write it to the passed *bytes.Buffer. If canonicalization fails it returns an error. */ -func encodeCanonical(obj interface{}, result *bytes.Buffer) (err error) { - // Since this function is called recursively, we use panic if an error occurs - // and recover in a deferred function, which is always called before - // returning. There we set the error that is returned eventually. - defer func() { - if r := recover(); r != nil { - err = errors.New(r.(string)) - } - }() - +func encodeCanonical(obj interface{}, result *strings.Builder) (err error) { switch objAsserted := obj.(type) { case string: result.WriteString(encodeCanonicalString(objAsserted)) @@ -90,10 +85,9 @@ func encodeCanonical(obj interface{}, result *bytes.Buffer) (err error) { // Canonicalize map for i, key := range mapKeys { - // Note: `key` must be a `string` (see `case map[string]interface{}`) and - // canonicalization of strings cannot err out (see `case string`), thus - // no error handling is needed here. - encodeCanonical(key, result) + if err := encodeCanonical(key, result); err != nil { + return err + } result.WriteString(":") if err := encodeCanonical(objAsserted[key], result); err != nil { @@ -120,7 +114,16 @@ slice. It uses the OLPC canonical JSON specification (see http://wiki.laptop.org/go/Canonical_JSON). If canonicalization fails the byte slice is nil and the second return value contains the error. */ -func EncodeCanonical(obj interface{}) ([]byte, error) { +func EncodeCanonical(obj interface{}) (out []byte, err error) { + // We use panic if an error occurs and recover in a deferred function, + // which is always called before returning. + // There we set the error that is returned eventually. + defer func() { + if r := recover(); r != nil { + err = errors.New(r.(string)) + } + }() + // FIXME: Terrible hack to turn the passed struct into a map, converting // the struct's variable names to the json key names defined in the struct data, err := json.Marshal(obj) @@ -136,10 +139,13 @@ func EncodeCanonical(obj interface{}) ([]byte, error) { } // Create a buffer and write the canonicalized JSON bytes to it - var result bytes.Buffer + var result strings.Builder + // Allocate output result buffer with the input size. + result.Grow(len(data)) + // Recursively encode the jsonmap if err := encodeCanonical(jsonMap, &result); err != nil { return nil, err } - return result.Bytes(), nil + return []byte(result.String()), nil } diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/envelope.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/envelope.go new file mode 100644 index 000000000000..ed223e90b5a1 --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/envelope.go @@ -0,0 +1,64 @@ +package dsse + +import ( + "encoding/base64" + "fmt" +) + +/* +Envelope captures an envelope as described by the DSSE specification. See here: +https://github.com/secure-systems-lab/dsse/blob/master/envelope.md +*/ +type Envelope struct { + PayloadType string `json:"payloadType"` + Payload string `json:"payload"` + Signatures []Signature `json:"signatures"` +} + +/* +DecodeB64Payload returns the serialized body, decoded from the envelope's +payload field. A flexible decoder is used, first trying standard base64, then +URL-encoded base64. +*/ +func (e *Envelope) DecodeB64Payload() ([]byte, error) { + return b64Decode(e.Payload) +} + +/* +Signature represents a generic in-toto signature that contains the identifier +of the key which was used to create the signature. +The used signature scheme has to be agreed upon by the signer and verifer +out of band. +The signature is a base64 encoding of the raw bytes from the signature +algorithm. +*/ +type Signature struct { + KeyID string `json:"keyid"` + Sig string `json:"sig"` +} + +/* +PAE implementes the DSSE Pre-Authentic Encoding +https://github.com/secure-systems-lab/dsse/blob/master/protocol.md#signature-definition +*/ +func PAE(payloadType string, payload []byte) []byte { + return []byte(fmt.Sprintf("DSSEv1 %d %s %d %s", + len(payloadType), payloadType, + len(payload), payload)) +} + +/* +Both standard and url encoding are allowed: +https://github.com/secure-systems-lab/dsse/blob/master/envelope.md +*/ +func b64Decode(s string) ([]byte, error) { + b, err := base64.StdEncoding.DecodeString(s) + if err != nil { + b, err = base64.URLEncoding.DecodeString(s) + if err != nil { + return nil, fmt.Errorf("unable to base64 decode payload (is payload in the right format?)") + } + } + + return b, nil +} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/sign.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/sign.go index 3dc05a4294e1..85aed102d4b2 100644 --- a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/sign.go +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/sign.go @@ -5,111 +5,35 @@ https://github.com/secure-systems-lab/dsse package dsse import ( + "context" "encoding/base64" "errors" - "fmt" ) -// ErrUnknownKey indicates that the implementation does not recognize the -// key. -var ErrUnknownKey = errors.New("unknown key") - -// ErrNoSignature indicates that an envelope did not contain any signatures. -var ErrNoSignature = errors.New("no signature found") - // ErrNoSigners indicates that no signer was provided. var ErrNoSigners = errors.New("no signers provided") -/* -Envelope captures an envelope as described by the Secure Systems Lab -Signing Specification. See here: -https://github.com/secure-systems-lab/signing-spec/blob/master/envelope.md -*/ -type Envelope struct { - PayloadType string `json:"payloadType"` - Payload string `json:"payload"` - Signatures []Signature `json:"signatures"` -} - -/* -DecodeB64Payload returns the serialized body, decoded -from the envelope's payload field. A flexible -decoder is used, first trying standard base64, then -URL-encoded base64. -*/ -func (e *Envelope) DecodeB64Payload() ([]byte, error) { - return b64Decode(e.Payload) -} - -/* -Signature represents a generic in-toto signature that contains the identifier -of the key which was used to create the signature. -The used signature scheme has to be agreed upon by the signer and verifer -out of band. -The signature is a base64 encoding of the raw bytes from the signature -algorithm. -*/ -type Signature struct { - KeyID string `json:"keyid"` - Sig string `json:"sig"` -} - -/* -PAE implementes the DSSE Pre-Authentic Encoding -https://github.com/secure-systems-lab/dsse/blob/master/protocol.md#signature-definition -*/ -func PAE(payloadType string, payload []byte) []byte { - return []byte(fmt.Sprintf("DSSEv1 %d %s %d %s", - len(payloadType), payloadType, - len(payload), payload)) -} - -/* -Signer defines the interface for an abstract signing algorithm. -The Signer interface is used to inject signature algorithm implementations -into the EnevelopeSigner. This decoupling allows for any signing algorithm -and key management system can be used. -The full message is provided as the parameter. If the signature algorithm -depends on hashing of the message prior to signature calculation, the -implementor of this interface must perform such hashing. -The function must return raw bytes representing the calculated signature -using the current algorithm, and the key used (if applicable). -For an example see EcdsaSigner in sign_test.go. -*/ -type Signer interface { - Sign(data []byte) ([]byte, error) - KeyID() (string, error) -} - -// SignVerifer provides both the signing and verification interface. -type SignVerifier interface { - Signer - Verifier -} - // EnvelopeSigner creates signed Envelopes. type EnvelopeSigner struct { - providers []SignVerifier - ev *EnvelopeVerifier + providers []SignerVerifier } /* -NewEnvelopeSigner creates an EnvelopeSigner that uses 1+ Signer -algorithms to sign the data. -Creates a verifier with threshold=1, at least one of the providers must validate signitures successfully. +NewEnvelopeSigner creates an EnvelopeSigner that uses 1+ Signer algorithms to +sign the data. Creates a verifier with threshold=1, at least one of the +providers must validate signatures successfully. */ -func NewEnvelopeSigner(p ...SignVerifier) (*EnvelopeSigner, error) { +func NewEnvelopeSigner(p ...SignerVerifier) (*EnvelopeSigner, error) { return NewMultiEnvelopeSigner(1, p...) } /* NewMultiEnvelopeSigner creates an EnvelopeSigner that uses 1+ Signer -algorithms to sign the data. -Creates a verifier with threshold. -threashold indicates the amount of providers that must validate the envelope. +algorithms to sign the data. Creates a verifier with threshold. Threshold +indicates the amount of providers that must validate the envelope. */ -func NewMultiEnvelopeSigner(threshold int, p ...SignVerifier) (*EnvelopeSigner, error) { - var providers []SignVerifier +func NewMultiEnvelopeSigner(threshold int, p ...SignerVerifier) (*EnvelopeSigner, error) { + var providers []SignerVerifier for _, sv := range p { if sv != nil { @@ -121,19 +45,8 @@ func NewMultiEnvelopeSigner(threshold int, p ...SignVerifier) (*EnvelopeSigner, return nil, ErrNoSigners } - evps := []Verifier{} - for _, p := range providers { - evps = append(evps, p.(Verifier)) - } - - ev, err := NewMultiEnvelopeVerifier(threshold, evps...) - if err != nil { - return nil, err - } - return &EnvelopeSigner{ providers: providers, - ev: ev, }, nil } @@ -143,7 +56,7 @@ Returned is an envelope as defined here: https://github.com/secure-systems-lab/dsse/blob/master/envelope.md One signature will be added for each Signer in the EnvelopeSigner. */ -func (es *EnvelopeSigner) SignPayload(payloadType string, body []byte) (*Envelope, error) { +func (es *EnvelopeSigner) SignPayload(ctx context.Context, payloadType string, body []byte) (*Envelope, error) { var e = Envelope{ Payload: base64.StdEncoding.EncodeToString(body), PayloadType: payloadType, @@ -152,7 +65,7 @@ func (es *EnvelopeSigner) SignPayload(payloadType string, body []byte) (*Envelop paeEnc := PAE(payloadType, body) for _, signer := range es.providers { - sig, err := signer.Sign(paeEnc) + sig, err := signer.Sign(ctx, paeEnc) if err != nil { return nil, err } @@ -169,29 +82,3 @@ func (es *EnvelopeSigner) SignPayload(payloadType string, body []byte) (*Envelop return &e, nil } - -/* -Verify decodes the payload and verifies the signature. -Any domain specific validation such as parsing the decoded body and -validating the payload type is left out to the caller. -Verify returns a list of accepted keys each including a keyid, public and signiture of the accepted provider keys. -*/ -func (es *EnvelopeSigner) Verify(e *Envelope) ([]AcceptedKey, error) { - return es.ev.Verify(e) -} - -/* -Both standard and url encoding are allowed: -https://github.com/secure-systems-lab/dsse/blob/master/envelope.md -*/ -func b64Decode(s string) ([]byte, error) { - b, err := base64.StdEncoding.DecodeString(s) - if err != nil { - b, err = base64.URLEncoding.DecodeString(s) - if err != nil { - return nil, err - } - } - - return b, nil -} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/signerverifier.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/signerverifier.go new file mode 100644 index 000000000000..99d03c7df9b6 --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/signerverifier.go @@ -0,0 +1,43 @@ +package dsse + +import ( + "context" + "crypto" +) + +/* +Signer defines the interface for an abstract signing algorithm. The Signer +interface is used to inject signature algorithm implementations into the +EnvelopeSigner. This decoupling allows for any signing algorithm and key +management system can be used. The full message is provided as the parameter. +If the signature algorithm depends on hashing of the message prior to signature +calculation, the implementor of this interface must perform such hashing. The +function must return raw bytes representing the calculated signature using the +current algorithm, and the key used (if applicable). +*/ +type Signer interface { + Sign(ctx context.Context, data []byte) ([]byte, error) + KeyID() (string, error) +} + +/* +Verifier verifies a complete message against a signature and key. If the message +was hashed prior to signature generation, the verifier must perform the same +steps. If KeyID returns successfully, only signature matching the key ID will be +verified. +*/ +type Verifier interface { + Verify(ctx context.Context, data, sig []byte) error + KeyID() (string, error) + Public() crypto.PublicKey +} + +// SignerVerifier provides both the signing and verification interface. +type SignerVerifier interface { + Signer + Verifier +} + +// Deprecated: switch to renamed SignerVerifier. This is currently aliased for +// backwards compatibility. +type SignVerifier = SignerVerifier diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/verify.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/verify.go index ead1c32ca80b..a36146b82a7d 100644 --- a/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/verify.go +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/dsse/verify.go @@ -1,6 +1,7 @@ package dsse import ( + "context" "crypto" "errors" "fmt" @@ -8,17 +9,8 @@ import ( "golang.org/x/crypto/ssh" ) -/* -Verifier verifies a complete message against a signature and key. -If the message was hashed prior to signature generation, the verifier -must perform the same steps. -If KeyID returns successfully, only signature matching the key ID will be verified. -*/ -type Verifier interface { - Verify(data, sig []byte) error - KeyID() (string, error) - Public() crypto.PublicKey -} +// ErrNoSignature indicates that an envelope did not contain any signatures. +var ErrNoSignature = errors.New("no signature found") type EnvelopeVerifier struct { providers []Verifier @@ -31,7 +23,7 @@ type AcceptedKey struct { Sig Signature } -func (ev *EnvelopeVerifier) Verify(e *Envelope) ([]AcceptedKey, error) { +func (ev *EnvelopeVerifier) Verify(ctx context.Context, e *Envelope) ([]AcceptedKey, error) { if e == nil { return nil, errors.New("cannot verify a nil envelope") } @@ -78,7 +70,7 @@ func (ev *EnvelopeVerifier) Verify(e *Envelope) ([]AcceptedKey, error) { continue } - err = v.Verify(paeEnc, sig) + err = v.Verify(ctx, paeEnc, sig) if err != nil { continue } @@ -104,11 +96,11 @@ func (ev *EnvelopeVerifier) Verify(e *Envelope) ([]AcceptedKey, error) { // Sanity if with some reflect magic this happens. if ev.threshold <= 0 || ev.threshold > len(ev.providers) { - return nil, errors.New("Invalid threshold") + return nil, errors.New("invalid threshold") } if len(usedKeyids) < ev.threshold { - return acceptedKeys, errors.New(fmt.Sprintf("Accepted signatures do not match threshold, Found: %d, Expected %d", len(acceptedKeys), ev.threshold)) + return acceptedKeys, fmt.Errorf("accepted signatures do not match threshold, Found: %d, Expected %d", len(acceptedKeys), ev.threshold) } return acceptedKeys, nil @@ -119,15 +111,15 @@ func NewEnvelopeVerifier(v ...Verifier) (*EnvelopeVerifier, error) { } func NewMultiEnvelopeVerifier(threshold int, p ...Verifier) (*EnvelopeVerifier, error) { - if threshold <= 0 || threshold > len(p) { - return nil, errors.New("Invalid threshold") + return nil, errors.New("invalid threshold") } ev := EnvelopeVerifier{ providers: p, threshold: threshold, } + return &ev, nil } diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ecdsa.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ecdsa.go new file mode 100644 index 000000000000..578d6a5483d5 --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ecdsa.go @@ -0,0 +1,111 @@ +package signerverifier + +import ( + "context" + "crypto" + "crypto/ecdsa" + "crypto/rand" + "crypto/sha256" + "crypto/sha512" + "fmt" + "os" +) + +const ECDSAKeyType = "ecdsa" + +// ECDSASignerVerifier is a dsse.SignerVerifier compliant interface to sign and +// verify signatures using ECDSA keys. +type ECDSASignerVerifier struct { + keyID string + curveSize int + private *ecdsa.PrivateKey + public *ecdsa.PublicKey +} + +// NewECDSASignerVerifierFromSSLibKey creates an ECDSASignerVerifier from an +// SSLibKey. +func NewECDSASignerVerifierFromSSLibKey(key *SSLibKey) (*ECDSASignerVerifier, error) { + if len(key.KeyVal.Public) == 0 { + return nil, ErrInvalidKey + } + + _, publicParsedKey, err := decodeAndParsePEM([]byte(key.KeyVal.Public)) + if err != nil { + return nil, fmt.Errorf("unable to create ECDSA signerverifier: %w", err) + } + + sv := &ECDSASignerVerifier{ + keyID: key.KeyID, + curveSize: publicParsedKey.(*ecdsa.PublicKey).Params().BitSize, + public: publicParsedKey.(*ecdsa.PublicKey), + private: nil, + } + + if len(key.KeyVal.Private) > 0 { + _, privateParsedKey, err := decodeAndParsePEM([]byte(key.KeyVal.Private)) + if err != nil { + return nil, fmt.Errorf("unable to create ECDSA signerverifier: %w", err) + } + + sv.private = privateParsedKey.(*ecdsa.PrivateKey) + } + + return sv, nil +} + +// Sign creates a signature for `data`. +func (sv *ECDSASignerVerifier) Sign(ctx context.Context, data []byte) ([]byte, error) { + if sv.private == nil { + return nil, ErrNotPrivateKey + } + + hashedData := getECDSAHashedData(data, sv.curveSize) + + return ecdsa.SignASN1(rand.Reader, sv.private, hashedData) +} + +// Verify verifies the `sig` value passed in against `data`. +func (sv *ECDSASignerVerifier) Verify(ctx context.Context, data []byte, sig []byte) error { + hashedData := getECDSAHashedData(data, sv.curveSize) + + if ok := ecdsa.VerifyASN1(sv.public, hashedData, sig); !ok { + return ErrSignatureVerificationFailed + } + + return nil +} + +// KeyID returns the identifier of the key used to create the +// ECDSASignerVerifier instance. +func (sv *ECDSASignerVerifier) KeyID() (string, error) { + return sv.keyID, nil +} + +// Public returns the public portion of the key used to create the +// ECDSASignerVerifier instance. +func (sv *ECDSASignerVerifier) Public() crypto.PublicKey { + return sv.public +} + +// LoadECDSAKeyFromFile returns an SSLibKey instance for an ECDSA key stored in +// a file in the custom securesystemslib format. +func LoadECDSAKeyFromFile(path string) (*SSLibKey, error) { + contents, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("unable to load ECDSA key from file: %w", err) + } + + return loadKeyFromSSLibBytes(contents) +} + +func getECDSAHashedData(data []byte, curveSize int) []byte { + switch { + case curveSize <= 256: + return hashBeforeSigning(data, sha256.New()) + case 256 < curveSize && curveSize <= 384: + return hashBeforeSigning(data, sha512.New384()) + case curveSize > 384: + return hashBeforeSigning(data, sha512.New()) + } + return []byte{} +} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ed25519.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ed25519.go new file mode 100644 index 000000000000..c71d313a75dc --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/ed25519.go @@ -0,0 +1,98 @@ +package signerverifier + +import ( + "context" + "crypto" + "crypto/ed25519" + "encoding/hex" + "fmt" + "os" +) + +const ED25519KeyType = "ed25519" + +// ED25519SignerVerifier is a dsse.SignerVerifier compliant interface to sign +// and verify signatures using ED25519 keys. +type ED25519SignerVerifier struct { + keyID string + private ed25519.PrivateKey + public ed25519.PublicKey +} + +// NewED25519SignerVerifierFromSSLibKey creates an Ed25519SignerVerifier from an +// SSLibKey. +func NewED25519SignerVerifierFromSSLibKey(key *SSLibKey) (*ED25519SignerVerifier, error) { + if len(key.KeyVal.Public) == 0 { + return nil, ErrInvalidKey + } + + public, err := hex.DecodeString(key.KeyVal.Public) + if err != nil { + return nil, fmt.Errorf("unable to create ED25519 signerverifier: %w", err) + } + + var private []byte + if len(key.KeyVal.Private) > 0 { + private, err = hex.DecodeString(key.KeyVal.Private) + if err != nil { + return nil, fmt.Errorf("unable to create ED25519 signerverifier: %w", err) + } + + // python-securesystemslib provides an interface to generate ed25519 + // keys but it differs slightly in how it serializes the key to disk. + // Specifically, the keyval.private field includes _only_ the private + // portion of the key while libraries such as crypto/ed25519 also expect + // the public portion. So, if the private portion is half of what we + // expect, we append the public portion as well. + if len(private) == ed25519.PrivateKeySize/2 { + private = append(private, public...) + } + } + + return &ED25519SignerVerifier{ + keyID: key.KeyID, + public: ed25519.PublicKey(public), + private: ed25519.PrivateKey(private), + }, nil +} + +// Sign creates a signature for `data`. +func (sv *ED25519SignerVerifier) Sign(ctx context.Context, data []byte) ([]byte, error) { + if len(sv.private) == 0 { + return nil, ErrNotPrivateKey + } + + signature := ed25519.Sign(sv.private, data) + return signature, nil +} + +// Verify verifies the `sig` value passed in against `data`. +func (sv *ED25519SignerVerifier) Verify(ctx context.Context, data []byte, sig []byte) error { + if ok := ed25519.Verify(sv.public, data, sig); ok { + return nil + } + return ErrSignatureVerificationFailed +} + +// KeyID returns the identifier of the key used to create the +// ED25519SignerVerifier instance. +func (sv *ED25519SignerVerifier) KeyID() (string, error) { + return sv.keyID, nil +} + +// Public returns the public portion of the key used to create the +// ED25519SignerVerifier instance. +func (sv *ED25519SignerVerifier) Public() crypto.PublicKey { + return sv.public +} + +// LoadED25519KeyFromFile returns an SSLibKey instance for an ED25519 key stored +// in a file in the custom securesystemslib format. +func LoadED25519KeyFromFile(path string) (*SSLibKey, error) { + contents, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("unable to load ED25519 key from file: %w", err) + } + + return loadKeyFromSSLibBytes(contents) +} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/rsa.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/rsa.go new file mode 100644 index 000000000000..3612f28a4b2f --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/rsa.go @@ -0,0 +1,141 @@ +package signerverifier + +import ( + "context" + "crypto" + "crypto/rand" + "crypto/rsa" + "crypto/sha256" + "crypto/x509" + "fmt" + "os" + "strings" +) + +const ( + RSAKeyType = "rsa" + RSAKeyScheme = "rsassa-pss-sha256" + RSAPrivateKeyPEM = "RSA PRIVATE KEY" +) + +// RSAPSSSignerVerifier is a dsse.SignerVerifier compliant interface to sign and +// verify signatures using RSA keys following the RSA-PSS scheme. +type RSAPSSSignerVerifier struct { + keyID string + private *rsa.PrivateKey + public *rsa.PublicKey +} + +// NewRSAPSSSignerVerifierFromSSLibKey creates an RSAPSSSignerVerifier from an +// SSLibKey. +func NewRSAPSSSignerVerifierFromSSLibKey(key *SSLibKey) (*RSAPSSSignerVerifier, error) { + if len(key.KeyVal.Public) == 0 { + return nil, ErrInvalidKey + } + + _, publicParsedKey, err := decodeAndParsePEM([]byte(key.KeyVal.Public)) + if err != nil { + return nil, fmt.Errorf("unable to create RSA-PSS signerverifier: %w", err) + } + + if len(key.KeyVal.Private) > 0 { + _, privateParsedKey, err := decodeAndParsePEM([]byte(key.KeyVal.Private)) + if err != nil { + return nil, fmt.Errorf("unable to create RSA-PSS signerverifier: %w", err) + } + + return &RSAPSSSignerVerifier{ + keyID: key.KeyID, + public: publicParsedKey.(*rsa.PublicKey), + private: privateParsedKey.(*rsa.PrivateKey), + }, nil + } + + return &RSAPSSSignerVerifier{ + keyID: key.KeyID, + public: publicParsedKey.(*rsa.PublicKey), + private: nil, + }, nil +} + +// Sign creates a signature for `data`. +func (sv *RSAPSSSignerVerifier) Sign(ctx context.Context, data []byte) ([]byte, error) { + if sv.private == nil { + return nil, ErrNotPrivateKey + } + + hashedData := hashBeforeSigning(data, sha256.New()) + + return rsa.SignPSS(rand.Reader, sv.private, crypto.SHA256, hashedData, &rsa.PSSOptions{SaltLength: sha256.Size, Hash: crypto.SHA256}) +} + +// Verify verifies the `sig` value passed in against `data`. +func (sv *RSAPSSSignerVerifier) Verify(ctx context.Context, data []byte, sig []byte) error { + hashedData := hashBeforeSigning(data, sha256.New()) + + if err := rsa.VerifyPSS(sv.public, crypto.SHA256, hashedData, sig, &rsa.PSSOptions{SaltLength: sha256.Size, Hash: crypto.SHA256}); err != nil { + return ErrSignatureVerificationFailed + } + + return nil +} + +// KeyID returns the identifier of the key used to create the +// RSAPSSSignerVerifier instance. +func (sv *RSAPSSSignerVerifier) KeyID() (string, error) { + return sv.keyID, nil +} + +// Public returns the public portion of the key used to create the +// RSAPSSSignerVerifier instance. +func (sv *RSAPSSSignerVerifier) Public() crypto.PublicKey { + return sv.public +} + +// LoadRSAPSSKeyFromFile returns an SSLibKey instance for an RSA key stored in a +// file. +func LoadRSAPSSKeyFromFile(path string) (*SSLibKey, error) { + contents, err := os.ReadFile(path) + if err != nil { + return nil, fmt.Errorf("unable to load RSA key from file: %w", err) + } + + pemData, keyObj, err := decodeAndParsePEM(contents) + if err != nil { + return nil, fmt.Errorf("unable to load RSA key from file: %w", err) + } + + key := &SSLibKey{ + KeyType: RSAKeyType, + Scheme: RSAKeyScheme, + KeyIDHashAlgorithms: KeyIDHashAlgorithms, + KeyVal: KeyVal{}, + } + + switch k := keyObj.(type) { + case *rsa.PublicKey: + pubKeyBytes, err := x509.MarshalPKIXPublicKey(k) + if err != nil { + return nil, fmt.Errorf("unable to load RSA key from file: %w", err) + } + key.KeyVal.Public = strings.TrimSpace(string(generatePEMBlock(pubKeyBytes, PublicKeyPEM))) + + case *rsa.PrivateKey: + pubKeyBytes, err := x509.MarshalPKIXPublicKey(k.Public()) + if err != nil { + return nil, fmt.Errorf("unable to load RSA key from file: %w", err) + } + key.KeyVal.Public = strings.TrimSpace(string(generatePEMBlock(pubKeyBytes, PublicKeyPEM))) + key.KeyVal.Private = strings.TrimSpace(string(generatePEMBlock(pemData.Bytes, RSAPrivateKeyPEM))) + } + + if len(key.KeyID) == 0 { + keyID, err := calculateKeyID(key) + if err != nil { + return nil, fmt.Errorf("unable to load RSA key from file: %w", err) + } + key.KeyID = keyID + } + + return key, nil +} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/signerverifier.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/signerverifier.go new file mode 100644 index 000000000000..5f510f7be571 --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/signerverifier.go @@ -0,0 +1,34 @@ +package signerverifier + +import ( + "errors" +) + +var KeyIDHashAlgorithms = []string{"sha256", "sha512"} + +var ( + ErrNotPrivateKey = errors.New("loaded key is not a private key") + ErrSignatureVerificationFailed = errors.New("failed to verify signature") + ErrUnknownKeyType = errors.New("unknown key type") + ErrInvalidThreshold = errors.New("threshold is either less than 1 or greater than number of provided public keys") + ErrInvalidKey = errors.New("key object has no value") +) + +const ( + PublicKeyPEM = "PUBLIC KEY" + PrivateKeyPEM = "PRIVATE KEY" +) + +type SSLibKey struct { + KeyIDHashAlgorithms []string `json:"keyid_hash_algorithms"` + KeyType string `json:"keytype"` + KeyVal KeyVal `json:"keyval"` + Scheme string `json:"scheme"` + KeyID string `json:"keyid"` +} + +type KeyVal struct { + Private string `json:"private,omitempty"` + Public string `json:"public"` + Certificate string `json:"certificate,omitempty"` +} diff --git a/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/utils.go b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/utils.go new file mode 100644 index 000000000000..73aaa77d46a2 --- /dev/null +++ b/vendor/github.com/secure-systems-lab/go-securesystemslib/signerverifier/utils.go @@ -0,0 +1,150 @@ +package signerverifier + +import ( + "crypto/sha256" + "crypto/x509" + "encoding/hex" + "encoding/json" + "encoding/pem" + "errors" + "hash" + "testing" + + "github.com/secure-systems-lab/go-securesystemslib/cjson" +) + +/* +Credits: Parts of this file were originally authored for in-toto-golang. +*/ + +var ( + // ErrNoPEMBlock gets triggered when there is no PEM block in the provided file + ErrNoPEMBlock = errors.New("failed to decode the data as PEM block (are you sure this is a pem file?)") + // ErrFailedPEMParsing gets returned when PKCS1, PKCS8 or PKIX key parsing fails + ErrFailedPEMParsing = errors.New("failed parsing the PEM block: unsupported PEM type") +) + +// loadKeyFromSSLibBytes returns a pointer to a Key instance created from the +// contents of the bytes. The key contents are expected to be in the custom +// securesystemslib format. +func loadKeyFromSSLibBytes(contents []byte) (*SSLibKey, error) { + var key *SSLibKey + if err := json.Unmarshal(contents, &key); err != nil { + return nil, err + } + + if len(key.KeyID) == 0 { + keyID, err := calculateKeyID(key) + if err != nil { + return nil, err + } + key.KeyID = keyID + } + + return key, nil +} + +func calculateKeyID(k *SSLibKey) (string, error) { + key := map[string]any{ + "keytype": k.KeyType, + "scheme": k.Scheme, + "keyid_hash_algorithms": k.KeyIDHashAlgorithms, + "keyval": map[string]string{ + "public": k.KeyVal.Public, + }, + } + canonical, err := cjson.EncodeCanonical(key) + if err != nil { + return "", err + } + digest := sha256.Sum256(canonical) + return hex.EncodeToString(digest[:]), nil +} + +/* +generatePEMBlock creates a PEM block from scratch via the keyBytes and the pemType. +If successful it returns a PEM block as []byte slice. This function should always +succeed, if keyBytes is empty the PEM block will have an empty byte block. +Therefore only header and footer will exist. +*/ +func generatePEMBlock(keyBytes []byte, pemType string) []byte { + // construct PEM block + pemBlock := &pem.Block{ + Type: pemType, + Headers: nil, + Bytes: keyBytes, + } + return pem.EncodeToMemory(pemBlock) +} + +/* +decodeAndParsePEM receives potential PEM bytes decodes them via pem.Decode +and pushes them to parseKey. If any error occurs during this process, +the function will return nil and an error (either ErrFailedPEMParsing +or ErrNoPEMBlock). On success it will return the decoded pemData, the +key object interface and nil as error. We need the decoded pemData, +because LoadKey relies on decoded pemData for operating system +interoperability. +*/ +func decodeAndParsePEM(pemBytes []byte) (*pem.Block, any, error) { + // pem.Decode returns the parsed pem block and a rest. + // The rest is everything, that could not be parsed as PEM block. + // Therefore we can drop this via using the blank identifier "_" + data, _ := pem.Decode(pemBytes) + if data == nil { + return nil, nil, ErrNoPEMBlock + } + + // Try to load private key, if this fails try to load + // key as public key + key, err := parsePEMKey(data.Bytes) + if err != nil { + return nil, nil, err + } + return data, key, nil +} + +/* +parseKey tries to parse a PEM []byte slice. Using the following standards +in the given order: + + - PKCS8 + - PKCS1 + - PKIX + +On success it returns the parsed key and nil. +On failure it returns nil and the error ErrFailedPEMParsing +*/ +func parsePEMKey(data []byte) (any, error) { + key, err := x509.ParsePKCS8PrivateKey(data) + if err == nil { + return key, nil + } + key, err = x509.ParsePKCS1PrivateKey(data) + if err == nil { + return key, nil + } + key, err = x509.ParsePKIXPublicKey(data) + if err == nil { + return key, nil + } + key, err = x509.ParseECPrivateKey(data) + if err == nil { + return key, nil + } + return nil, ErrFailedPEMParsing +} + +func hashBeforeSigning(data []byte, h hash.Hash) []byte { + h.Write(data) + return h.Sum(nil) +} + +func hexDecode(t *testing.T, data string) []byte { + t.Helper() + b, err := hex.DecodeString(data) + if err != nil { + t.Fatal(err) + } + return b +} diff --git a/vendor/modules.txt b/vendor/modules.txt index 83b26c04773a..6491a8e998f6 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -589,12 +589,13 @@ github.com/hashicorp/go-retryablehttp ## explicit; go 1.18 github.com/hashicorp/golang-lru/v2/internal github.com/hashicorp/golang-lru/v2/simplelru -# github.com/in-toto/in-toto-golang v0.5.0 -## explicit; go 1.17 +# github.com/in-toto/in-toto-golang v0.9.0 +## explicit; go 1.20 github.com/in-toto/in-toto-golang/in_toto github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/common github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.1 github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v0.2 +github.com/in-toto/in-toto-golang/in_toto/slsa_provenance/v1 # github.com/klauspost/compress v1.18.0 ## explicit; go 1.22 github.com/klauspost/compress @@ -744,10 +745,11 @@ github.com/russross/blackfriday/v2 # github.com/sasha-s/go-deadlock v0.3.5 ## explicit github.com/sasha-s/go-deadlock -# github.com/secure-systems-lab/go-securesystemslib v0.4.0 -## explicit; go 1.17 +# github.com/secure-systems-lab/go-securesystemslib v0.6.0 +## explicit; go 1.20 github.com/secure-systems-lab/go-securesystemslib/cjson github.com/secure-systems-lab/go-securesystemslib/dsse +github.com/secure-systems-lab/go-securesystemslib/signerverifier # github.com/serialx/hashring v0.0.0-20200727003509-22c0c7ab6b1b ## explicit github.com/serialx/hashring