Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
a0ac130
chore: update schema to latest 2.3
spiffcs Dec 14, 2022
22850f3
fix: power user command is now using syft json
spiffcs Dec 14, 2022
f7258e1
fix: null output to empty array
spiffcs Dec 14, 2022
67b42e8
feat: generate SHA1 digests for spdx formats
spiffcs Dec 14, 2022
e7948cc
opinion: remove size check for empty files
spiffcs Dec 14, 2022
a1787a8
refactor: remove SHA1 by default
spiffcs Dec 14, 2022
ea74bf6
test: update unit tests
spiffcs Dec 14, 2022
00589b2
feat: set filesAnalyzed to true if fileOwner
spiffcs Dec 14, 2022
8052211
feat: add packageVerificationCode
spiffcs Dec 14, 2022
051521d
feat: handle packageVerificationCode for OwnedFile
spiffcs Dec 19, 2022
277d01e
feat: add helper functions to SBOM for SPDX format
spiffcs Dec 19, 2022
1f62f86
feat: update pacakge verification code flow
spiffcs Dec 19, 2022
23b6d1b
feat: update coordinate discoverability
spiffcs Dec 19, 2022
83af97c
fix: account for invalid document state
spiffcs Dec 19, 2022
638c533
chore: undo packageChecksum changes
spiffcs Dec 19, 2022
786baec
fix: static-analysis
spiffcs Dec 19, 2022
d196ae4
test: update sbom
spiffcs Dec 19, 2022
80a1d50
chore: remove old comment
spiffcs Dec 19, 2022
7202c6c
test: add test harness for spdx tools
spiffcs Dec 19, 2022
da546a6
refactor: update relationship type API
spiffcs Dec 19, 2022
63b0044
test: tag value tests are passing
spiffcs Dec 19, 2022
591bdfb
tests: spdx tag value cli testing
spiffcs Dec 19, 2022
026428f
fix: imports
spiffcs Dec 19, 2022
5209869
test: add failing conditions and refactor errors
spiffcs Dec 19, 2022
b3133fe
test: update testing temp pathing
spiffcs Dec 19, 2022
ba86f13
test: t.tempDir free cleanup
spiffcs Dec 19, 2022
a77b3ff
test: assert successful make code
spiffcs Dec 19, 2022
52eb80f
test: build the image
spiffcs Dec 19, 2022
fb4acbf
test: update setup
spiffcs Dec 19, 2022
8b1e645
fix: linting
spiffcs Dec 19, 2022
5ef4a9f
test: run and show errors
spiffcs Dec 19, 2022
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ require (
github.com/sigstore/rekor v0.12.1-0.20220915152154-4bb6f441c1b2
github.com/sigstore/sigstore v1.4.4
github.com/vbatts/go-mtree v0.5.0
golang.org/x/exp v0.0.0-20220823124025-807a23277127
gopkg.in/yaml.v3 v3.0.1
)

Expand Down Expand Up @@ -288,7 +289,6 @@ require (
go.uber.org/atomic v1.10.0 // indirect
go.uber.org/multierr v1.8.0 // indirect
go.uber.org/zap v1.23.0 // indirect
golang.org/x/exp v0.0.0-20220823124025-807a23277127 // indirect
golang.org/x/oauth2 v0.0.0-20221006150949-b44042a4b9c1 // indirect
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0 // indirect
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec // indirect
Expand Down
1,184 changes: 657 additions & 527 deletions schema/spdx-json/spdx-schema-2.2.json

Large diffs are not rendered by default.

9 changes: 9 additions & 0 deletions syft/artifact/relationship.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,15 @@ const (
DescribedByRelationship RelationshipType = "described-by"
)

func AllRelationshipTypes() []RelationshipType {
return []RelationshipType{
OwnershipByFileOverlapRelationship,
ContainsRelationship,
DependencyOfRelationship,
DescribedByRelationship,
}
}

type RelationshipType string

type Relationship struct {
Expand Down
6 changes: 1 addition & 5 deletions syft/file/digest_cataloger.go
Original file line number Diff line number Diff line change
Expand Up @@ -92,15 +92,11 @@ func DigestsFromFile(closer io.ReadCloser, hashes []crypto.Hash) ([]Digest, erro
writers[idx] = hashers[idx]
}

size, err := io.Copy(io.MultiWriter(writers...), closer)
_, err := io.Copy(io.MultiWriter(writers...), closer)
Comment thread
spiffcs marked this conversation as resolved.
if err != nil {
return nil, err
}

if size == 0 {
return make([]Digest, 0), nil
}

result := make([]Digest, len(hashes))
// only capture digests when there is content. It is important to do this based on SIZE and not
// FILE TYPE. The reasoning is that it is possible for a tar to be crafted with a header-only
Expand Down
6 changes: 0 additions & 6 deletions syft/file/digest_cataloger_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,6 @@ func testDigests(t testing.TB, root string, files []string, hashes ...crypto.Has
t.Fatalf("could not read %q : %+v", f, err)
}

if len(b) == 0 {
// we don't keep digests for empty files
digests[source.NewLocation(f).Coordinates] = []Digest{}
continue
}

for _, hash := range hashes {
h := hash.New()
h.Write(b)
Expand Down
107 changes: 91 additions & 16 deletions syft/formats/common/spdxhelpers/to_format_model.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
//nolint:gosec // sha1 is used as a required hash function for SPDX, not a crypto function
package spdxhelpers

import (
"crypto/sha1"
"fmt"
"sort"
"strings"
Expand Down Expand Up @@ -103,7 +105,7 @@ func ToFormatModel(s sbom.SBOM) *spdx.Document {
// Cardinality: optional, one
CreatorComment: "",
},
Packages: toPackages(s.Artifacts.PackageCatalog),
Packages: toPackages(s.Artifacts.PackageCatalog, s),
Files: toFiles(s),
Relationships: toRelationships(s.RelationshipsSorted()),
}
Expand All @@ -123,7 +125,7 @@ func toSPDXID(identifiable artifact.Identifiable) common.ElementID {
// packages populates all Package Information from the package Catalog (see https://spdx.github.io/spdx-spec/3-package-information/)
//
//nolint:funlen
func toPackages(catalog *pkg.Catalog) (results []*spdx.Package) {
func toPackages(catalog *pkg.Catalog, sbom sbom.SBOM) (results []*spdx.Package) {
for _, p := range catalog.Sorted() {
// name should be guaranteed to be unique, but semantically useful and stable
id := toSPDXID(p)
Expand All @@ -132,7 +134,25 @@ func toPackages(catalog *pkg.Catalog) (results []*spdx.Package) {
// in the Comments on License field (section 7.16). With respect to NOASSERTION, a written explanation in
// the Comments on License field (section 7.16) is preferred.
license := License(p)
checksums, filesAnalyzed := toPackageChecksums(p)

// two ways to get filesAnalyzed == true:
// 1. syft has generated a sha1 digest for the package itself - usually in the java cataloger
// 2. syft has generated a sha1 digest for the package's contents
packageChecksums, filesAnalyzed := toPackageChecksums(p)

packageVerificationCode := newPackageVerificationCode(p, sbom)
if packageVerificationCode != nil {
filesAnalyzed = true
}

// invalid SPDX document state
if filesAnalyzed && packageVerificationCode == nil {
// this is an invalid document state
// we reset the filesAnalyzed flag to false to avoid
// cases where a package digest was generated but there was
// not enough metadata to generate a verification code regarding the files
filesAnalyzed = false
}

results = append(results, &spdx.Package{
// NOT PART OF SPEC
Expand Down Expand Up @@ -193,7 +213,7 @@ func toPackages(catalog *pkg.Catalog) (results []*spdx.Package) {
// 7.9: Package Verification Code
// Cardinality: optional, one if filesAnalyzed is true / omitted;
// zero (must be omitted) if filesAnalyzed is false
PackageVerificationCode: nil,
PackageVerificationCode: packageVerificationCode,

// 7.10: Package Checksum: may have keys for SHA1, SHA256 and/or MD5
// Cardinality: optional, one or many
Expand All @@ -203,7 +223,7 @@ func toPackages(catalog *pkg.Catalog) (results []*spdx.Package) {
// to determine if any file in the original package has been changed. If the SPDX file is to be included
// in a package, this value should not be calculated. The SHA-1 algorithm will be used to provide the
// checksum by default.
PackageChecksums: checksums,
PackageChecksums: packageChecksums,

// 7.11: Package Home Page
// Cardinality: optional, one
Expand Down Expand Up @@ -275,24 +295,14 @@ func toPackages(catalog *pkg.Catalog) (results []*spdx.Package) {
return results
}

func toPackageOriginator(p pkg.Package) *common.Originator {
kind, originator := Originator(p)
if kind == "" || originator == "" {
return nil
}
return &common.Originator{
Originator: originator,
OriginatorType: kind,
}
}

func toPackageChecksums(p pkg.Package) ([]common.Checksum, bool) {
filesAnalyzed := false
var checksums []common.Checksum
switch meta := p.Metadata.(type) {
// we generate digest for some Java packages
// spdx.github.io/spdx-spec/package-information/#710-package-checksum-field
case pkg.JavaMetadata:
// if syft has generated the digest here then filesAnalyzed is true
if len(meta.ArchiveDigests) > 0 {
filesAnalyzed = true
for _, digest := range meta.ArchiveDigests {
Expand All @@ -304,6 +314,7 @@ func toPackageChecksums(p pkg.Package) ([]common.Checksum, bool) {
}
}
case pkg.GolangBinMetadata:
// because the H1 digest is found in the Golang metadata we cannot claim that the files were analyzed
algo, hexStr, err := util.HDigestToSHA(meta.H1Digest)
if err != nil {
log.Debugf("invalid h1digest: %s: %v", meta.H1Digest, err)
Expand All @@ -318,6 +329,17 @@ func toPackageChecksums(p pkg.Package) ([]common.Checksum, bool) {
return checksums, filesAnalyzed
}

func toPackageOriginator(p pkg.Package) *common.Originator {
kind, originator := Originator(p)
if kind == "" || originator == "" {
return nil
}
return &common.Originator{
Originator: originator,
OriginatorType: kind,
}
}

func formatSPDXExternalRefs(p pkg.Package) (refs []*spdx.PackageExternalReference) {
for _, ref := range ExternalRefs(p) {
refs = append(refs, &spdx.PackageExternalReference{
Expand Down Expand Up @@ -414,6 +436,7 @@ func toFiles(s sbom.SBOM) (results []*spdx.File) {
}

func toFileChecksums(digests []file.Digest) (checksums []common.Checksum) {
checksums = make([]common.Checksum, 0, len(digests))
Comment thread
spiffcs marked this conversation as resolved.
for _, digest := range digests {
checksums = append(checksums, common.Checksum{
Algorithm: toChecksumAlgorithm(digest.Algorithm),
Expand Down Expand Up @@ -462,3 +485,55 @@ func toFileTypes(metadata *source.FileMetadata) (ty []string) {

return ty
}

// TODO: handle SPDX excludes file case
// f file is an "excludes" file, skip it /* exclude SPDX analysis file(s) */
// see: https://spdx.github.io/spdx-spec/v2.3/package-information/#79-package-verification-code-field
// the above link contains the SPDX algorithm for a package verification code
func newPackageVerificationCode(p pkg.Package, sbom sbom.SBOM) *common.PackageVerificationCode {
// key off of the contains relationship;
// spdx validator will fail if a package claims to contain a file but no sha1 provided
// if a sha1 for a file is provided then the validator will fail if the package does not have
// a package verification code
coordinates := sbom.CoordinatesForPackage(p, artifact.ContainsRelationship)
var digests []file.Digest
for _, c := range coordinates {
digest := sbom.Artifacts.FileDigests[c]
if len(digest) == 0 {
continue
}

var d file.Digest
for _, digest := range digest {
if digest.Algorithm == "sha1" {
d = digest
break
}
}
digests = append(digests, d)
}

if len(digests) == 0 {
return nil
}

// sort templist in ascending order by SHA1 value
sort.SliceStable(digests, func(i, j int) bool {
return digests[i].Value < digests[j].Value
})

// filelist = templist with "/n"s removed. /* ordered sequence of SHA1 values with no separators
var b strings.Builder
for _, digest := range digests {
b.WriteString(digest.Value)
}

//nolint:gosec
hasher := sha1.New()
_, _ = hasher.Write([]byte(b.String()))
return &common.PackageVerificationCode{
// 7.9.1: Package Verification Code Value
// Cardinality: mandatory, one
Value: fmt.Sprintf("%+x", hasher.Sum(nil)),
}
}
4 changes: 3 additions & 1 deletion syft/formats/common/spdxhelpers/to_format_model_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ import (
"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/pkg"
"github.com/anchore/syft/syft/sbom"
"github.com/anchore/syft/syft/source"
)

Expand Down Expand Up @@ -369,6 +370,7 @@ func Test_fileIDsForPackage(t *testing.T) {
}

func Test_H1Digest(t *testing.T) {
sbom := sbom.SBOM{}
tests := []struct {
name string
pkg pkg.Package
Expand Down Expand Up @@ -415,7 +417,7 @@ func Test_H1Digest(t *testing.T) {
for _, test := range tests {
t.Run(test.name, func(t *testing.T) {
catalog := pkg.NewCatalog(test.pkg)
pkgs := toPackages(catalog)
pkgs := toPackages(catalog, sbom)
require.Len(t, pkgs, 1)
for _, p := range pkgs {
if test.expectedDigest == "" {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,14 @@
"dataLicense": "CC0-1.0",
"SPDXID": "SPDXRef-DOCUMENT",
"name": "user-image-input",
"documentNamespace": "https://anchore.com/syft/image/user-image-input-55ad4afc-ecdc-46a4-8bc3-36b3e72da5d1",
"documentNamespace": "https://anchore.com/syft/image/user-image-input-ace88a38-4633-4bff-8fa3-8ae929dab37d",
"creationInfo": {
"licenseListVersion": "3.18",
"licenseListVersion": "3.19",
"creators": [
"Organization: Anchore, Inc",
"Tool: syft-v0.42.0-bogus"
],
"created": "2022-11-19T13:46:57Z",
"created": "2022-12-14T18:21:40Z",
"comment": ""
},
"packages": [
Expand Down Expand Up @@ -70,7 +70,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
},
Expand All @@ -80,7 +80,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
},
Expand All @@ -90,7 +90,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
},
Expand All @@ -100,7 +100,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
},
Expand All @@ -110,7 +110,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
},
Expand All @@ -120,7 +120,7 @@
"fileTypes": [
"OTHER"
],
"checksums": null,
"checksums": [],
"licenseConcluded": "NOASSERTION",
"copyrightText": ""
}
Expand Down
Binary file not shown.
32 changes: 32 additions & 0 deletions syft/sbom/sbom.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ package sbom
import (
"sort"

"golang.org/x/exp/slices"

"github.com/anchore/syft/syft/artifact"
"github.com/anchore/syft/syft/file"
"github.com/anchore/syft/syft/linux"
Expand Down Expand Up @@ -65,6 +67,36 @@ func (s SBOM) AllCoordinates() []source.Coordinates {
return set.ToSlice()
}

// RelationshipsForPackage returns all relationships for the provided types.
// If no types are provided, all relationships for the package are returned.
func (s SBOM) RelationshipsForPackage(p pkg.Package, rt ...artifact.RelationshipType) []artifact.Relationship {
if len(rt) == 0 {
rt = artifact.AllRelationshipTypes()
}

var relationships []artifact.Relationship
for _, relationship := range s.Relationships {
// check if the relationship is one we're searching for; rt is inclusive
idx := slices.IndexFunc(rt, func(r artifact.RelationshipType) bool { return relationship.Type == r })
Comment thread
spiffcs marked this conversation as resolved.
if relationship.From.ID() == p.ID() && idx != -1 {
relationships = append(relationships, relationship)
}
}

return relationships
}

// CoordinatesForPackage returns all coordinates for the provided package for provided relationship types
// If no types are provided, all relationship types are considered.
func (s SBOM) CoordinatesForPackage(p pkg.Package, rt ...artifact.RelationshipType) []source.Coordinates {
var coordinates []source.Coordinates
for _, relationship := range s.RelationshipsForPackage(p, rt...) {
cords := extractCoordinates(relationship)
coordinates = append(coordinates, cords...)
}
return coordinates
}

func extractCoordinates(relationship artifact.Relationship) (results []source.Coordinates) {
if coordinates, exists := relationship.From.(source.Coordinates); exists {
results = append(results, coordinates)
Expand Down
Loading