Structured results for permissions (#2584)

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* Update checks/evaluation/permissions/GitHubWorkflowPermissionsTopNoWrite.yml

Co-authored-by: Joyce <joycebrumu.u@gmail.com>
Signed-off-by: laurentsimon <64505099+laurentsimon@users.noreply.github.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* Update checks/evaluation/permissions/GitHubWorkflowPermissionsStepsNoWrite.yml

Co-authored-by: Joyce <joycebrumu.u@gmail.com>
Signed-off-by: laurentsimon <64505099+laurentsimon@users.noreply.github.com>

* Update checks/evaluation/permissions/GitHubWorkflowPermissionsStepsNoWrite.yml

Co-authored-by: Joyce <joycebrumu.u@gmail.com>
Signed-off-by: laurentsimon <64505099+laurentsimon@users.noreply.github.com>

* Update checks/evaluation/permissions/GitHubWorkflowPermissionsStepsNoWrite.yml

Co-authored-by: Joyce <joycebrumu.u@gmail.com>
Signed-off-by: laurentsimon <64505099+laurentsimon@users.noreply.github.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

* update

Signed-off-by: laurentsimon <laurentsimon@google.com>

---------

Signed-off-by: laurentsimon <laurentsimon@google.com>
Signed-off-by: laurentsimon <64505099+laurentsimon@users.noreply.github.com>
Co-authored-by: Joyce <joycebrumu.u@gmail.com>
This commit is contained in:
laurentsimon 2023-01-30 18:41:36 -08:00 committed by GitHub
parent 4ebe521141
commit 2ea140a3ee
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
55 changed files with 1563 additions and 330 deletions

View File

@ -25,6 +25,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
sclog "github.com/ossf/scorecard/v4/log"
)
@ -170,7 +171,7 @@ func CheckPreventBinaryArtifacts(
logger.Info(
fmt.Sprintf(
"binary detected path:%s type: %v offset:%v",
artifactFile.Path, checker.FileTypeBinary, artifactFile.Offset,
artifactFile.Path, finding.FileTypeBinary, artifactFile.Offset,
),
)
return Fail, nil

View File

@ -18,13 +18,14 @@ package checker
import (
"fmt"
"math"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/rule"
)
type (
// DetailType is the type of details.
DetailType int
// FileType is the type of a file.
FileType int
)
const (
@ -49,20 +50,6 @@ const (
DetailDebug
)
const (
// FileTypeNone is a default, not defined.
// FileTypeNone must be `0`.
FileTypeNone FileType = iota
// FileTypeSource is for source code files.
FileTypeSource
// FileTypeBinary is for binary files.
FileTypeBinary
// FileTypeText is for text files.
FileTypeText
// FileTypeURL for URLs.
FileTypeURL
)
// CheckResult captures result from a check run.
//
//nolint:govet
@ -70,21 +57,11 @@ type CheckResult struct {
Name string
Version int
Error error
Details []CheckDetail
Score int
Reason string
}
// Remediation represents a remediation.
type Remediation struct {
// Code snippet for humans.
Snippet string
// Diff for machines.
Diff string
// Help text for humans.
HelpText string
// Help text in markdown format for humans.
HelpMarkdown string
Details []CheckDetail
// Structured results.
Rules []string // TODO(X): add support.
}
// CheckDetail contains information for each detail.
@ -98,13 +75,17 @@ type CheckDetail struct {
//
//nolint:govet
type LogMessage struct {
Text string // A short string explaining why the detail was recorded/logged.
Path string // Fullpath to the file.
Type FileType // Type of file.
Offset uint // Offset in the file of Path (line for source/text files).
EndOffset uint // End of offset in the file, e.g. if the command spans multiple lines.
Snippet string // Snippet of code
Remediation *Remediation // Remediation information, if any.
// Structured resuts.
Finding *finding.Finding
// Non-structured results.
Text string // A short string explaining why the detail was recorded/logged.
Path string // Fullpath to the file.
Type finding.FileType // Type of file.
Offset uint // Offset in the file of Path (line for source/text files).
EndOffset uint // End of offset in the file, e.g. if the command spans multiple lines.
Snippet string // Snippet of code
Remediation *rule.Remediation // Remediation information, if any.
}
// CreateProportionalScore creates a proportional score.

View File

@ -18,6 +18,7 @@ import (
"time"
"github.com/ossf/scorecard/v4/clients"
"github.com/ossf/scorecard/v4/finding"
)
// RawResults contains results before a policy
@ -286,11 +287,11 @@ type ArchivedStatus struct {
// File represents a file.
type File struct {
Path string
Snippet string // Snippet of code
Offset uint // Offset in the file of Path (line for source/text files).
EndOffset uint // End of offset in the file, e.g. if the command spans multiple lines.
FileSize uint // Total size of file.
Type FileType // Type of file.
Snippet string // Snippet of code
Offset uint // Offset in the file of Path (line for source/text files).
EndOffset uint // End of offset in the file, e.g. if the command spans multiple lines.
FileSize uint // Total size of file.
Type finding.FileType // Type of file.
// TODO: add hash.
}

View File

@ -17,6 +17,7 @@ package evaluation
import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
// BinaryArtifacts applies the score policy for the Binary-Artifacts check.
@ -36,7 +37,7 @@ func BinaryArtifacts(name string, dl checker.DetailLogger,
score := checker.MaxResultScore
for _, f := range r.Files {
dl.Warn(&checker.LogMessage{
Path: f.Path, Type: checker.FileTypeBinary,
Path: f.Path, Type: finding.FileTypeBinary,
Offset: f.Offset,
Text: "binary detected",
})

View File

@ -19,6 +19,7 @@ import (
"strings"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/finding"
)
const (
@ -85,7 +86,7 @@ func prHasSuccessStatus(r checker.RevisionCIInfo, dl checker.DetailLogger) (bool
if isTest(status.Context) || isTest(status.TargetURL) {
dl.Debug(&checker.LogMessage{
Path: status.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("CI test found: pr: %s, context: %s", r.HeadSHA,
status.Context),
})
@ -109,7 +110,7 @@ func prHasSuccessfulCheck(r checker.RevisionCIInfo, dl checker.DetailLogger) (bo
if isTest(cr.App.Slug) {
dl.Debug(&checker.LogMessage{
Path: cr.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("CI test found: pr: %d, context: %s", r.PullRequestNumber,
cr.App.Slug),
})

View File

@ -19,6 +19,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
scut "github.com/ossf/scorecard/v4/utests"
)
@ -95,7 +96,7 @@ func TestDependencyUpdateTool(t *testing.T) {
[dependency-update-tool]
enabled = true
`,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
},
},
},

View File

@ -17,6 +17,7 @@ package evaluation
import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
func scoreLicenseCriteria(f *checker.LicenseFile,
@ -25,12 +26,12 @@ func scoreLicenseCriteria(f *checker.LicenseFile,
var score int
msg := checker.LogMessage{
Path: "",
Type: checker.FileTypeNone,
Type: finding.FileTypeNone,
Text: "",
Offset: 1,
}
msg.Path = f.File.Path
msg.Type = checker.FileTypeSource
msg.Type = finding.FileTypeSource
// #1 a license file was found.
score += 6

View File

@ -0,0 +1,33 @@
# Copyright 2023 OpenSSF Scorecard Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
short: Checks that GitHub workflows do not have steps with dangerous write permissions
desc: This rule checks that GitHub workflows do not have steps with dangerous write permissions
motivation: >
Even with permissions default set to read, some scopes having write permissions in their steps brings incurs a risk to the project.
By giving write permission to the Actions you call in jobs, an external Action you call could abuse them. Depending on the permissions,
this could let the external Action commit unreviewed code, remove pre-submit checks to introduce a bug.
For more information about the scopes and the vulnerabilities involved, see https://github.com/ossf/scorecard/blob/main/docs/checks.md#token-permissions.
implementation: >
The rule is implemented by checking whether the `permissions` keyword is given non-write permissions for the following
scopes: `statuses`, `checks`, `security-events`, `deployments`, `contents`, `packages`, `actions`.
Write permissions given to recognized packaging actions or commands are allowed and are considered an acceptable risk.
risk: Medium
remediation:
effort: High
text:
- Verify which permissions are needed and consider whether you can reduce them.
markdown:
- Verify which permissions are needed and consider whether you can reduce them.

View File

@ -0,0 +1,36 @@
# Copyright 2023 OpenSSF Scorecard Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
short: Checks that GitHub workflows do not have default write permissions
desc: This rule checks that GitHub workflows do not have default write permissions
motivation: >
If no permissions are declared, a workflow's GitHub token's permissions default to write for all scopes.
This include write permissions to push to the repository, to read encrypted secrets, etc.
For more information, see https://docs.github.com/en/actions/security-guides/automatic-token-authentication#permissions-for-the-github_token.
implementation: >
The rule is implemented by checking whether the `permissions` keyword is defined at the top of the workflow,
and that no write permissions are given.
risk: High
remediation:
effort: Low
text:
- Visit https://app.stepsecurity.io/secureworkflow/${{ repo }}/${{ workflow }}/${{ branch }}?enable=permissions
- Tick the 'Restrict permissions for GITHUB_TOKEN'
- Untick other options
- "NOTE: If you want to resolve multiple issues at once, you can visit https://app.stepsecurity.io/securerepo instead."
markdown:
- Visit [https://app.stepsecurity.io/secureworkflow](https://app.stepsecurity.io/secureworkflow/${{ repo }}/${{ workflow }}/${{ branch }}?enable=permissions).
- Tick the 'Restrict permissions for GITHUB_TOKEN'
- Untick other options
- "NOTE: If you want to resolve multiple issues at once, you can visit [https://app.stepsecurity.io/securerepo](https://app.stepsecurity.io/securerepo) instead."

View File

@ -15,13 +15,19 @@
package evaluation
import (
"embed"
"fmt"
"strings"
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/remediation"
)
//go:embed *.yml
var rules embed.FS
type permissions struct {
topLevelWritePermissions map[string]bool
jobLevelWritePermissions map[string]bool
@ -57,32 +63,42 @@ func applyScorePolicy(results *checker.TokenPermissionsData, c *checker.CheckReq
dl := c.Dlogger
//nolint:errcheck
remediationMetadata, _ := remediation.New(c)
negativeRuleResults := map[string]bool{
"GitHubWorkflowPermissionsStepsNoWrite": false,
"GitHubWorkflowPermissionsTopNoWrite": false,
}
for _, r := range results.TokenPermissions {
var msg checker.LogMessage
var rem *checker.Remediation
var loc *finding.Location
if r.File != nil {
msg.Path = r.File.Path
msg.Offset = r.File.Offset
msg.Type = r.File.Type
msg.Snippet = r.File.Snippet
if msg.Path != "" {
rem = remediationMetadata.CreateWorkflowPermissionRemediation(r.File.Path)
loc = &finding.Location{
Type: r.File.Type,
Value: r.File.Path,
LineStart: &r.File.Offset,
}
if r.File.Snippet != "" {
loc.Snippet = &r.File.Snippet
}
loc.Value = r.File.Path
}
text, err := createMessage(r)
text, err := createText(r)
if err != nil {
return checker.MinResultScore, err
}
msg.Text = text
msg, err := createLogMsg(r.LocationType)
if err != nil {
return checker.InconclusiveResultScore, err
}
msg.Finding = msg.Finding.WithMessage(text).WithLocation(loc)
switch r.Type {
case checker.PermissionLevelNone, checker.PermissionLevelRead:
dl.Info(&msg)
msg.Finding = msg.Finding.WithOutcome(finding.OutcomePositive)
dl.Info(msg)
case checker.PermissionLevelUnknown:
dl.Debug(&msg)
dl.Debug(msg)
case checker.PermissionLevelUndeclared:
if r.LocationType == nil {
@ -92,9 +108,9 @@ func applyScorePolicy(results *checker.TokenPermissionsData, c *checker.CheckReq
// We warn only for top-level.
if *r.LocationType == checker.PermissionLocationTop {
warnWithRemediation(dl, &msg, rem)
warnWithRemediation(dl, msg, remediationMetadata, loc, negativeRuleResults)
} else {
dl.Debug(&msg)
dl.Debug(msg)
}
// Group results by workflow name for score computation.
@ -103,7 +119,7 @@ func applyScorePolicy(results *checker.TokenPermissionsData, c *checker.CheckReq
}
case checker.PermissionLevelWrite:
warnWithRemediation(dl, &msg, rem)
warnWithRemediation(dl, msg, remediationMetadata, loc, negativeRuleResults)
// Group results by workflow name for score computation.
if err := updateWorkflowHashMap(hm, r); err != nil {
@ -112,12 +128,88 @@ func applyScorePolicy(results *checker.TokenPermissionsData, c *checker.CheckReq
}
}
if err := reportDefaultFindings(results, c.Dlogger, negativeRuleResults); err != nil {
return checker.InconclusiveResultScore, err
}
return calculateScore(hm), nil
}
func warnWithRemediation(logger checker.DetailLogger, msg *checker.LogMessage, rem *checker.Remediation) {
msg.Remediation = rem
func reportDefaultFindings(results *checker.TokenPermissionsData,
dl checker.DetailLogger, negativeRuleResults map[string]bool,
) error {
// No workflow files exist.
if len(results.TokenPermissions) == 0 {
text := "no workflows found in the repository"
if err := reportFinding("GitHubWorkflowPermissionsStepsNoWrite",
text, finding.OutcomeNotApplicable, dl); err != nil {
return err
}
if err := reportFinding("GitHubWorkflowPermissionsTopNoWrite",
text, finding.OutcomeNotApplicable, dl); err != nil {
return err
}
return nil
}
// Workflow files found, report positive findings if no
// negative findings were found.
// NOTE: we don't consider rule `GitHubWorkflowPermissionsTopNoWrite`
// because positive results are already reported.
found := negativeRuleResults["GitHubWorkflowPermissionsStepsNoWrite"]
if !found {
text := fmt.Sprintf("no %s write permissions found", checker.PermissionLocationJob)
if err := reportFinding("GitHubWorkflowPermissionsStepsNoWrite",
text, finding.OutcomePositive, dl); err != nil {
return err
}
}
return nil
}
func reportFinding(rule, text string, o finding.Outcome, dl checker.DetailLogger) error {
f, err := finding.New(rules, rule)
if err != nil {
return sce.WithMessage(sce.ErrScorecardInternal, err.Error())
}
f = f.WithMessage(text).WithOutcome(o)
dl.Info(&checker.LogMessage{
Finding: f,
})
return nil
}
func createLogMsg(loct *checker.PermissionLocation) (*checker.LogMessage, error) {
Rule := "GitHubWorkflowPermissionsStepsNoWrite"
if loct == nil || *loct == checker.PermissionLocationTop {
Rule = "GitHubWorkflowPermissionsTopNoWrite"
}
f, err := finding.New(rules, Rule)
if err != nil {
return nil,
sce.WithMessage(sce.ErrScorecardInternal, err.Error())
}
return &checker.LogMessage{
Finding: f,
}, nil
}
func warnWithRemediation(logger checker.DetailLogger, msg *checker.LogMessage,
rem *remediation.RemediationMetadata, loc *finding.Location,
negativeRuleResults map[string]bool,
) {
if loc != nil && loc.Value != "" {
msg.Finding = msg.Finding.WithRemediationMetadata(map[string]string{
"repo": rem.Repo,
"branch": rem.Branch,
"workflow": strings.TrimPrefix(loc.Value, ".github/workflows/"),
})
}
logger.Warn(msg)
// Record that we found a negative result.
negativeRuleResults[msg.Finding.Rule] = true
}
func recordPermissionWrite(hm map[string]permissions, path string,
@ -163,7 +255,7 @@ func updateWorkflowHashMap(hm map[string]permissions, t checker.TokenPermission)
return nil
}
func createMessage(t checker.TokenPermission) (string, error) {
func createText(t checker.TokenPermission) (string, error) {
// By default, use the message already present.
if t.Msg != nil {
return *t.Msg, nil
@ -174,7 +266,7 @@ func createMessage(t checker.TokenPermission) (string, error) {
return "", sce.WithMessage(sce.ErrScorecardInternal, "locationType is nil")
}
// Use a different message depending on the type.
// Use a different text depending on the type.
if t.Type == checker.PermissionLevelUndeclared {
return fmt.Sprintf("no %s permission defined", *t.LocationType), nil
}

View File

@ -21,7 +21,9 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/remediation"
"github.com/ossf/scorecard/v4/rule"
)
var errInvalidValue = errors.New("invalid value")
@ -54,7 +56,7 @@ func PinningDependencies(name string, c *checker.CheckRequest,
pr := make(map[checker.DependencyUseType]pinnedResult)
dl := c.Dlogger
//nolint:errcheck
remediaitonMetadata, _ := remediation.New(c)
remediationMetadata, _ := remediation.New(c)
for i := range r.Dependencies {
rr := r.Dependencies[i]
@ -86,7 +88,7 @@ func PinningDependencies(name string, c *checker.CheckRequest,
EndOffset: rr.Location.EndOffset,
Text: generateText(&rr),
Snippet: rr.Location.Snippet,
Remediation: generateRemediation(remediaitonMetadata, &rr),
Remediation: generateRemediation(remediationMetadata, &rr),
})
// Update the pinning status.
@ -136,10 +138,10 @@ func PinningDependencies(name string, c *checker.CheckRequest,
"dependency not pinned by hash detected", score, checker.MaxResultScore)
}
func generateRemediation(remediaitonMd remediation.RemediationMetadata, rr *checker.Dependency) *checker.Remediation {
func generateRemediation(remediationMd *remediation.RemediationMetadata, rr *checker.Dependency) *rule.Remediation {
switch rr.Type {
case checker.DependencyUseTypeGHAction:
return remediaitonMd.CreateWorkflowPinningRemediation(rr.Location.Path)
return remediationMd.CreateWorkflowPinningRemediation(rr.Location.Path)
case checker.DependencyUseTypeDockerfileContainerImage:
return remediation.CreateDockerfilePinningRemediation(rr, remediation.CraneDigester{})
default:
@ -279,7 +281,7 @@ func createReturnValuesForGitHubActionsWorkflowPinned(r worklowPinningResult, in
if r.gitHubOwned != notPinned {
score += 2
dl.Info(&checker.LogMessage{
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
Text: fmt.Sprintf("%s %s", "GitHub-owned", infoMsg),
})
@ -288,7 +290,7 @@ func createReturnValuesForGitHubActionsWorkflowPinned(r worklowPinningResult, in
if r.thirdParties != notPinned {
score += 8
dl.Info(&checker.LogMessage{
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
Text: fmt.Sprintf("%s %s", "Third-party", infoMsg),
})

View File

@ -17,6 +17,7 @@ package evaluation
import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
func scoreSecurityCriteria(f checker.File,
@ -142,7 +143,7 @@ func SecurityPolicy(name string, dl checker.DetailLogger, r *checker.SecurityPol
Path: spd.File.Path,
Type: spd.File.Type,
}
if msg.Type == checker.FileTypeURL {
if msg.Type == finding.FileTypeURL {
msg.Text = "security policy detected in org repo"
} else {
msg.Text = "security policy detected in current repo"

View File

@ -18,6 +18,7 @@ import (
"testing"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/finding"
scut "github.com/ossf/scorecard/v4/utests"
)
@ -59,14 +60,16 @@ func TestSecurityPolicy(t *testing.T) {
args: args{
name: "test_security_policy_3",
r: &checker.SecurityPolicyData{
PolicyFiles: []checker.SecurityPolicyFile{{
File: checker.File{
Path: "/etc/security/pam_env.conf",
Type: checker.FileTypeURL,
PolicyFiles: []checker.SecurityPolicyFile{
{
File: checker.File{
Path: "/etc/security/pam_env.conf",
Type: finding.FileTypeURL,
},
Information: make([]checker.SecurityPolicyInformation, 0),
},
Information: make([]checker.SecurityPolicyInformation, 0),
},
}},
},
},
want: checker.CheckResult{
Score: 0,
@ -77,13 +80,15 @@ func TestSecurityPolicy(t *testing.T) {
args: args{
name: "test_security_policy_4",
r: &checker.SecurityPolicyData{
PolicyFiles: []checker.SecurityPolicyFile{{
File: checker.File{
Path: "/etc/security/pam_env.conf",
PolicyFiles: []checker.SecurityPolicyFile{
{
File: checker.File{
Path: "/etc/security/pam_env.conf",
},
Information: make([]checker.SecurityPolicyInformation, 0),
},
Information: make([]checker.SecurityPolicyInformation, 0),
},
}},
},
},
want: checker.CheckResult{
Score: 0,

View File

@ -21,6 +21,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
var (
@ -30,8 +31,8 @@ var (
const releaseLookBack = 5
//SignedReleases applies the score policy for the Signed-Releases check.
//nolint
// SignedReleases applies the score policy for the Signed-Releases check.
// nolint
func SignedReleases(name string, dl checker.DetailLogger, r *checker.SignedReleasesData) checker.CheckResult {
if r == nil {
e := sce.WithMessage(sce.ErrScorecardInternal, "empty raw data")
@ -60,7 +61,7 @@ func SignedReleases(name string, dl checker.DetailLogger, r *checker.SignedRelea
if strings.HasSuffix(asset.Name, suffix) {
dl.Info(&checker.LogMessage{
Path: asset.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("provenance for release artifact: %s", asset.Name),
})
hasProvenance = true
@ -81,7 +82,7 @@ func SignedReleases(name string, dl checker.DetailLogger, r *checker.SignedRelea
dl.Warn(&checker.LogMessage{
Path: release.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("release artifact %s does not have provenance", release.TagName),
})
@ -91,7 +92,7 @@ func SignedReleases(name string, dl checker.DetailLogger, r *checker.SignedRelea
if strings.HasSuffix(asset.Name, suffix) {
dl.Info(&checker.LogMessage{
Path: asset.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("signed release artifact: %s", asset.Name),
})
signed = true
@ -109,7 +110,7 @@ func SignedReleases(name string, dl checker.DetailLogger, r *checker.SignedRelea
if !signed {
dl.Warn(&checker.LogMessage{
Path: release.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("release artifact %s not signed", release.TagName),
})
}

View File

@ -19,6 +19,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
// Webhooks applies the score policy for the Webhooks check.
@ -39,7 +40,7 @@ func Webhooks(name string, dl checker.DetailLogger,
if !hook.UsesAuthSecret {
dl.Warn(&checker.LogMessage{
Path: hook.Path,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: "Webhook with no secret configured",
})
hasNoSecretCount++

View File

@ -25,6 +25,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
const (
@ -349,7 +350,7 @@ func AnyJobsMatch(workflow *actionlint.Workflow, jobMatchers []JobMatcher, fp st
return JobMatchResult{
File: checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: GetLineNumber(job.Pos),
},
Msg: fmt.Sprintf("%v: %v", matcher.LogText, fp),
@ -360,7 +361,7 @@ func AnyJobsMatch(workflow *actionlint.Workflow, jobMatchers []JobMatcher, fp st
return JobMatchResult{
File: checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
Msg: fmt.Sprintf("%v: %v", logMsgNoMatch, fp),

View File

@ -16,7 +16,7 @@ package checks
import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/evaluation"
evaluation "github.com/ossf/scorecard/v4/checks/evaluation/permissions"
"github.com/ossf/scorecard/v4/checks/raw"
sce "github.com/ossf/scorecard/v4/errors"
)

View File

@ -28,7 +28,7 @@ import (
scut "github.com/ossf/scorecard/v4/utests"
)
//nolint
// nolint
func TestGithubTokenPermissions(t *testing.T) {
t.Parallel()
@ -44,7 +44,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -77,7 +77,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -99,7 +99,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -110,7 +110,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 0,
NumberOfInfo: 1,
NumberOfDebug: 5,
},
},
@ -121,7 +121,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -132,7 +132,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 0,
NumberOfInfo: 1,
NumberOfDebug: 5,
},
},
@ -143,7 +143,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 6,
},
},
@ -154,7 +154,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 10,
NumberOfInfo: 11,
NumberOfDebug: 5,
},
},
@ -165,7 +165,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 10,
NumberOfInfo: 11,
NumberOfDebug: 5,
},
},
@ -176,7 +176,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -187,7 +187,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore - 1,
NumberOfWarn: 2,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 6,
},
},
@ -198,7 +198,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore - 2,
NumberOfWarn: 2,
NumberOfInfo: 3,
NumberOfInfo: 4,
NumberOfDebug: 5,
},
},
@ -209,7 +209,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 5,
},
},
@ -220,7 +220,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 5,
},
},
@ -231,7 +231,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -242,7 +242,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 0,
NumberOfInfo: 2,
NumberOfDebug: 0,
},
},
@ -264,7 +264,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 4,
},
},
@ -275,7 +275,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 4,
},
},
@ -286,7 +286,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -297,7 +297,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: 9,
NumberOfWarn: 1,
NumberOfInfo: 3,
NumberOfInfo: 4,
NumberOfDebug: 4,
},
},
@ -322,7 +322,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore - 1,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 9,
},
},
@ -336,7 +336,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 2,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 9,
},
},
@ -350,7 +350,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 10,
},
},
@ -363,7 +363,7 @@ func TestGithubTokenPermissions(t *testing.T) {
Error: nil,
Score: checker.MaxResultScore,
NumberOfWarn: 0,
NumberOfInfo: 1,
NumberOfInfo: 2,
NumberOfDebug: 5,
},
},
@ -473,7 +473,11 @@ func TestGithubTokenPermissionsLineNumber(t *testing.T) {
for _, expectedLog := range tt.expected {
isExpectedLog := func(logMessage checker.LogMessage, logType checker.DetailType) bool {
return logMessage.Offset == expectedLog.lineNumber && logMessage.Path == p &&
return logMessage.Finding != nil &&
logMessage.Finding.Location != nil &&
logMessage.Finding.Location.LineStart != nil &&
*logMessage.Finding.Location.LineStart == expectedLog.lineNumber &&
logMessage.Finding.Location.Value == p &&
logType == checker.DetailWarn
}
if !scut.ValidateLogMessage(isExpectedLog, &dl) {

View File

@ -30,6 +30,7 @@ import (
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
var (
@ -149,7 +150,7 @@ var checkBinaryFileContent fileparser.DoWhileTrueOnFileContent = func(path strin
if exists1 {
*pfiles = append(*pfiles, checker.File{
Path: path,
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: checker.OffsetDefault,
})
return true, nil
@ -159,7 +160,7 @@ var checkBinaryFileContent fileparser.DoWhileTrueOnFileContent = func(path strin
if !isText(content) && exists2 {
*pfiles = append(*pfiles, checker.File{
Path: path,
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: checker.OffsetDefault,
})
}

View File

@ -25,6 +25,7 @@ import (
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
func containsUntrustedContextPattern(variable string) bool {
@ -196,7 +197,7 @@ func checkJobForUntrustedCodeCheckout(job *actionlint.Job, path string,
Type: checker.DangerousWorkflowUntrustedCheckout,
File: checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: line,
Snippet: ref.Value.Value,
},
@ -255,7 +256,7 @@ func checkVariablesInScript(script string, pos *actionlint.Pos,
checker.DangerousWorkflow{
File: checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: line,
Snippet: variable,
},

View File

@ -21,6 +21,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
"github.com/ossf/scorecard/v4/finding"
)
const (
@ -78,7 +79,7 @@ var checkDependencyFileExists fileparser.DoWhileTrueOnFilename = func(name strin
Files: []checker.File{
{
Path: name,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
},
@ -94,7 +95,7 @@ var checkDependencyFileExists fileparser.DoWhileTrueOnFilename = func(name strin
Files: []checker.File{
{
Path: name,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
},
@ -107,7 +108,7 @@ var checkDependencyFileExists fileparser.DoWhileTrueOnFilename = func(name strin
Files: []checker.File{
{
Path: name,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
},
@ -120,7 +121,7 @@ var checkDependencyFileExists fileparser.DoWhileTrueOnFilename = func(name strin
Files: []checker.File{
{
Path: name,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
},

View File

@ -119,24 +119,24 @@ func Test_checkDependencyFileExists(t *testing.T) {
want: false,
wantErr: false,
},
{
name: ".lift.toml",
args: args{
name: ".lift.toml",
data: &[]checker.Tool{},
},
want: false,
wantErr: false,
},
{
name: ".lift/config.toml",
args: args{
name: ".lift/config.toml",
data: &[]checker.Tool{},
},
want: false,
wantErr: false,
},
{
name: ".lift.toml",
args: args{
name: ".lift.toml",
data: &[]checker.Tool{},
},
want: false,
wantErr: false,
},
{
name: ".lift/config.toml",
args: args{
name: ".lift/config.toml",
data: &[]checker.Tool{},
},
want: false,
wantErr: false,
},
}
for _, tt := range tests {
tt := tt
@ -208,7 +208,8 @@ func TestDependencyUpdateTool(t *testing.T) {
want: 1,
CallSearchCommits: 1,
files: []string{},
SearchCommits: []clients.Commit{{Committer: clients.User{ID: 111111111}},
SearchCommits: []clients.Commit{
{Committer: clients.User{ID: 111111111}},
{Committer: clients.User{ID: dependabotID}},
},
},

View File

@ -24,6 +24,7 @@ import (
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
const (
@ -240,7 +241,7 @@ var getFuzzFunc fileparser.DoWhileTrueOnFileContent = func(
// FileTypeFuzz as Type, and # of lines as Offset.
pdata.files = append(pdata.files, checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Snippet: found,
Offset: uint(i + 1), // Since the # of lines starts from zero.
})

View File

@ -363,7 +363,8 @@ func Test_getProminentLanguages(t *testing.T) {
{
Name: clients.Python,
NumLines: 40,
}, {
},
{
Name: clients.JavaScript,
NumLines: 800,
},
@ -384,7 +385,8 @@ func Test_getProminentLanguages(t *testing.T) {
{
Name: clients.Python,
NumLines: 40,
}, {
},
{
Name: clients.JavaScript,
NumLines: 800,
},
@ -395,7 +397,8 @@ func Test_getProminentLanguages(t *testing.T) {
{
Name: clients.Python,
NumLines: 40,
}, {
},
{
Name: clients.JavaScript,
NumLines: 800,
},
@ -406,7 +409,8 @@ func Test_getProminentLanguages(t *testing.T) {
{
Name: clients.Python,
NumLines: 40,
}, {
},
{
Name: clients.JavaScript,
NumLines: 800,
},

View File

@ -25,6 +25,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
"github.com/ossf/scorecard/v4/finding"
)
// from checks.md
@ -120,7 +121,7 @@ func License(c *checker.CheckRequest) (checker.LicenseData, error) {
checker.LicenseFile{
File: checker.File{
Path: v.Path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
},
LicenseInformation: checker.License{
Approved: len(fsfOsiApprovedLicenseCiMap[strings.ToUpper(v.SPDXId)].Name) > 0,
@ -351,7 +352,7 @@ func checkLicense(lfName string) (checker.LicenseFile, bool) {
return (checker.LicenseFile{
File: checker.File{
Path: licensePath,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
},
LicenseInformation: checker.License{
Name: "",
@ -378,7 +379,7 @@ func checkLicense(lfName string) (checker.LicenseFile, bool) {
return (checker.LicenseFile{
File: checker.File{
Path: licensePath,
Type: checker.FileTypeSource, // TODO: introduce FileTypeFolder with licenseFolder
Type: finding.FileTypeSource, // TODO: introduce FileTypeFolder with licenseFolder
},
LicenseInformation: checker.License{
SpdxID: licenseSpdxID,

View File

@ -22,6 +22,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/finding"
)
// Packaging checks for packages.
@ -55,7 +56,7 @@ func Packaging(c *checker.CheckRequest) (checker.PackagingData, error) {
Msg: &match.Msg,
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
},
@ -74,7 +75,7 @@ func Packaging(c *checker.CheckRequest) (checker.PackagingData, error) {
pkg := checker.Package{
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: match.File.Offset,
},
Runs: []checker.Run{
@ -102,7 +103,7 @@ func Packaging(c *checker.CheckRequest) (checker.PackagingData, error) {
Msg: stringPointer(fmt.Sprintf("GitHub publishing workflow not used in runs: %v", fp)),
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
// TODO: Job

View File

@ -23,6 +23,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
type permission string
@ -127,7 +128,7 @@ func validatePermission(permissionKey permission, permissionValue *actionlint.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: lineNumber,
Snippet: val,
},
@ -144,7 +145,7 @@ func validatePermission(permissionKey permission, permissionValue *actionlint.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: lineNumber,
Snippet: val,
},
@ -163,7 +164,7 @@ func validatePermission(permissionKey permission, permissionValue *actionlint.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: lineNumber,
// TODO: set Snippet.
},
@ -210,7 +211,7 @@ func validatePermissions(permissions *actionlint.Permissions, permLoc checker.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
LocationType: &permLoc,
@ -227,7 +228,7 @@ func validatePermissions(permissions *actionlint.Permissions, permLoc checker.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: lineNumber,
Snippet: val,
},
@ -244,7 +245,7 @@ func validatePermissions(permissions *actionlint.Permissions, permLoc checker.Pe
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: lineNumber,
Snippet: val,
},
@ -270,7 +271,7 @@ func validateTopLevelPermissions(workflow *actionlint.Workflow, path string,
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
LocationType: &permLoc,
@ -299,7 +300,7 @@ func validatejobLevelPermissions(workflow *actionlint.Workflow, path string,
checker.TokenPermission{
File: &checker.File{
Path: path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: fileparser.GetLineNumber(job.Pos),
},
LocationType: &permLoc,
@ -376,7 +377,7 @@ func isAllowedWorkflow(workflow *actionlint.Workflow, fp string, pdata *permissi
tokenPermissions := checker.TokenPermission{
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
// TODO: set Snippet.
},
@ -416,7 +417,7 @@ func requiresPackagesPermissions(workflow *actionlint.Workflow, fp string, pdata
checker.TokenPermission{
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
Msg: &match.Msg,
@ -517,7 +518,7 @@ func isWorkflowOf(workflow *actionlint.Workflow, fp string,
checker.TokenPermission{
File: &checker.File{
Path: fp,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
},
Msg: &match.Msg,

View File

@ -26,6 +26,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/checks/fileparser"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
// PinningDependencies checks for (un)pinned dependencies.
@ -267,7 +268,7 @@ var validateDockerfilesPinning fileparser.DoWhileTrueOnFileContent = func(
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: uint(child.StartLine),
EndOffset: uint(child.EndLine),
Snippet: child.Original,
@ -286,7 +287,7 @@ var validateDockerfilesPinning fileparser.DoWhileTrueOnFileContent = func(
dep := checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: uint(child.StartLine),
EndOffset: uint(child.EndLine),
Snippet: child.Original,
@ -473,7 +474,7 @@ var validateGitHubActionWorkflow fileparser.DoWhileTrueOnFileContent = func(
dep := checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: uint(execAction.Uses.Pos.Line),
EndOffset: uint(execAction.Uses.Pos.Line), // `Uses` always span a single line.
Snippet: execAction.Uses.Value,

View File

@ -27,6 +27,7 @@ import (
"github.com/ossf/scorecard/v4/clients"
"github.com/ossf/scorecard/v4/clients/githubrepo"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
)
@ -87,7 +88,7 @@ func SecurityPolicy(c *checker.CheckRequest) (checker.SecurityPolicyData, error)
filePattern := data.files[idx].File.Path
// undo path.Join in isSecurityPolicyFile just
// for this call to OnMatchingFileContentsDo
if data.files[idx].File.Type == checker.FileTypeURL {
if data.files[idx].File.Type == finding.FileTypeURL {
filePattern = strings.Replace(filePattern, data.uri+"/", "", 1)
}
err := fileparser.OnMatchingFileContentDo(dotGitHubClient, fileparser.PathMatcher{
@ -114,7 +115,7 @@ var isSecurityPolicyFile fileparser.DoWhileTrueOnFilename = func(name string, ar
}
if isSecurityPolicyFilename(name) {
tempPath := name
tempType := checker.FileTypeText
tempType := finding.FileTypeText
if pdata.uri != "" {
// report complete path for org-based policy files
tempPath = path.Join(pdata.uri, tempPath)
@ -122,7 +123,7 @@ var isSecurityPolicyFile fileparser.DoWhileTrueOnFilename = func(name string, ar
// only denote for the details report that the
// policy was found at the org level rather
// than the repo level
tempType = checker.FileTypeURL
tempType = finding.FileTypeURL
}
pdata.files = append(pdata.files, checker.SecurityPolicyFile{
File: checker.File{

View File

@ -29,6 +29,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
var (
@ -330,7 +331,7 @@ func collectFetchPipeExecute(startLine, endLine uint, node syntax.Node, cmd, pat
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -381,7 +382,7 @@ func collectExecuteFiles(startLine, endLine uint, node syntax.Node, cmd, pathfn
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -652,7 +653,7 @@ func collectUnpinnedPakageManagerDownload(startLine, endLine uint, node syntax.N
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -670,7 +671,7 @@ func collectUnpinnedPakageManagerDownload(startLine, endLine uint, node syntax.N
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -688,7 +689,7 @@ func collectUnpinnedPakageManagerDownload(startLine, endLine uint, node syntax.N
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -706,7 +707,7 @@ func collectUnpinnedPakageManagerDownload(startLine, endLine uint, node syntax.N
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,
@ -801,7 +802,7 @@ func collectFetchProcSubsExecute(startLine, endLine uint, node syntax.Node, cmd,
checker.Dependency{
Location: &checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: startLine,
EndOffset: endLine,
Snippet: cmd,

View File

@ -27,6 +27,7 @@ import (
"github.com/ossf/scorecard/v4/checks/fileparser"
"github.com/ossf/scorecard/v4/clients"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
)
// CheckSAST is the registered name for SAST.
@ -157,7 +158,7 @@ func sastToolInCheckRuns(c *checker.CheckRequest) (int, error) {
if sastTools[cr.App.Slug] {
c.Dlogger.Debug(&checker.LogMessage{
Path: cr.URL,
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
Text: fmt.Sprintf("tool detected: %v", cr.App.Slug),
})
totalTested++
@ -199,7 +200,7 @@ func codeQLInCheckDefinitions(c *checker.CheckRequest) (int, error) {
for _, result := range resp.Results {
c.Dlogger.Debug(&checker.LogMessage{
Path: result.Path,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: checker.OffsetDefault,
Text: "CodeQL detected",
})
@ -294,7 +295,7 @@ var validateSonarConfig fileparser.DoWhileTrueOnFileContent = func(pathfn string
url: string(match[1]),
file: checker.File{
Path: pathfn,
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: offset,
EndOffset: endOffset,
},

View File

@ -26,6 +26,7 @@ import (
"github.com/xeipuuv/gojsonschema"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
"github.com/ossf/scorecard/v4/pkg"
)
@ -65,7 +66,7 @@ func jsonMockDocRead() *mockDoc {
return &m
}
//nolint
// nolint
func TestJSONOutput(t *testing.T) {
t.Parallel()
@ -110,7 +111,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -147,7 +148,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -183,7 +184,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -199,7 +200,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -216,7 +217,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -226,7 +227,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -236,7 +237,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -273,7 +274,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -289,7 +290,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -306,7 +307,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -316,7 +317,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -326,7 +327,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -363,7 +364,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -400,7 +401,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "https://domain.com/something",
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
},
},
},

View File

@ -48,7 +48,7 @@ var _ = Describe("E2E TEST:"+checks.CheckTokenPermissions, func() {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 5,
}
result := checks.TokenPermissions(&req)
@ -73,7 +73,7 @@ var _ = Describe("E2E TEST:"+checks.CheckTokenPermissions, func() {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 5,
}
result := checks.TokenPermissions(&req)
@ -110,7 +110,7 @@ var _ = Describe("E2E TEST:"+checks.CheckTokenPermissions, func() {
Error: nil,
Score: checker.MinResultScore,
NumberOfWarn: 1,
NumberOfInfo: 2,
NumberOfInfo: 3,
NumberOfDebug: 5,
}
result := checks.TokenPermissions(&req)

146
finding/finding.go Normal file
View File

@ -0,0 +1,146 @@
// Copyright 2023 OpenSSF Scorecard Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package finding
import (
"embed"
"fmt"
"strings"
"github.com/ossf/scorecard/v4/rule"
)
// FileType is the type of a file.
type FileType int
const (
// FileTypeNone must be `0`.
FileTypeNone FileType = iota
// FileTypeSource is for source code files.
FileTypeSource
// FileTypeBinary is for binary files.
FileTypeBinary
// FileTypeText is for text files.
FileTypeText
// FileTypeURL for URLs.
FileTypeURL
)
// Location represents the location of a finding.
// nolint: govet
type Location struct {
Type FileType `json:"type"`
Value string `json:"value"`
LineStart *uint `json:"lineStart,omitempty"`
LineEnd *uint `json:"lineEnd,omitempty"`
Snippet *string `json:"snippet,omitempty"`
}
// Outcome is the result of a finding.
type Outcome int
const (
// OutcomeNegative indicates a negative outcome.
OutcomeNegative Outcome = iota
// OutcomePositive indicates a positive outcome.
OutcomePositive
// OutcomeNotApplicable indicates a non-applicable outcome.
OutcomeNotApplicable
// OutcomeNotSupported indicates a non-supported outcome.
OutcomeNotSupported
)
// Finding represents a finding.
// nolint: govet
type Finding struct {
Rule string `json:"rule"`
Outcome Outcome `json:"outcome"`
Risk rule.Risk `json:"risk"`
Message string `json:"message"`
Location *Location `json:"location,omitempty"`
Remediation *rule.Remediation `json:"remediation,omitempty"`
}
// New creates a new finding.
func New(loc embed.FS, ruleID string) (*Finding, error) {
r, err := rule.New(loc, ruleID)
if err != nil {
// nolint
return nil, err
}
f := &Finding{
Rule: ruleID,
Outcome: OutcomeNegative,
Remediation: r.Remediation,
}
if r.Remediation != nil {
f.Risk = r.Risk
}
return f, nil
}
// WithMessage adds a message to an existing finding.
// No copy is made.
func (f *Finding) WithMessage(text string) *Finding {
f.Message = text
return f
}
// WithLocation adds a location to an existing finding.
// No copy is made.
func (f *Finding) WithLocation(loc *Location) *Finding {
f.Location = loc
return f
}
// WithPatch adds a patch to an existing finding.
// No copy is made.
func (f *Finding) WithPatch(patch *string) *Finding {
f.Remediation.Patch = patch
return f
}
// WithOutcome adds an outcome to an existing finding.
// No copy is made.
// WARNING: this function should be called at most once for a finding.
func (f *Finding) WithOutcome(o Outcome) *Finding {
f.Outcome = o
// Positive is not negative, remove the remediation.
if o != OutcomeNegative {
f.Remediation = nil
}
return f
}
// WithRemediationMetadata adds remediation metadata to an existing finding.
// No copy is made.
func (f *Finding) WithRemediationMetadata(values map[string]string) *Finding {
if f.Remediation != nil {
// Replace all dynamic values.
for k, v := range values {
f.Remediation.Text = strings.Replace(f.Remediation.Text,
fmt.Sprintf("${{ %s }}", k), v, -1)
f.Remediation.Markdown = strings.Replace(f.Remediation.Markdown,
fmt.Sprintf("${{ %s }}", k), v, -1)
}
}
return f
}
// WorseThan compares outcomes.
func (o *Outcome) WorseThan(oo Outcome) bool {
return *o < oo
}

212
finding/finding_test.go Normal file
View File

@ -0,0 +1,212 @@
// Copyright 2023 OpenSSF Scorecard Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package finding
import (
"embed"
"errors"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/ossf/scorecard/v4/rule"
)
func errCmp(e1, e2 error) bool {
return errors.Is(e1, e2) || errors.Is(e2, e1)
}
//go:embed testdata/*
var testfs embed.FS
func Test_New(t *testing.T) {
snippet := "some code snippet"
patch := "some patch values"
sline := uint(10)
eline := uint(46)
positiveOutcome := OutcomePositive
negativeOutcome := OutcomeNegative
t.Parallel()
// nolint:govet
tests := []struct {
name string
id string
outcome *Outcome
err error
metadata map[string]string
finding *Finding
}{
{
name: "risk high",
id: "testdata/risk-high",
outcome: &negativeOutcome,
finding: &Finding{
Rule: "testdata/risk-high",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 https://www.google.com/something",
Markdown: "step1\nstep2 [google.com](https://www.google.com/something)",
Effort: rule.RemediationEffortLow,
},
},
},
{
name: "effort low",
id: "testdata/effort-low",
outcome: &negativeOutcome,
finding: &Finding{
Rule: "testdata/effort-low",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 https://www.google.com/something",
Markdown: "step1\nstep2 [google.com](https://www.google.com/something)",
Effort: rule.RemediationEffortLow,
},
},
},
{
name: "effort high",
id: "testdata/effort-high",
outcome: &negativeOutcome,
finding: &Finding{
Rule: "testdata/effort-high",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 https://www.google.com/something",
Markdown: "step1\nstep2 [google.com](https://www.google.com/something)",
Effort: rule.RemediationEffortHigh,
},
},
},
{
name: "env variables",
id: "testdata/metadata-variables",
outcome: &negativeOutcome,
metadata: map[string]string{"branch": "master", "repo": "ossf/scorecard"},
finding: &Finding{
Rule: "testdata/metadata-variables",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 google.com/ossf/scorecard@master",
Markdown: "step1\nstep2 [google.com/ossf/scorecard@master](google.com/ossf/scorecard@master)",
Effort: rule.RemediationEffortLow,
},
},
},
{
name: "patch",
id: "testdata/metadata-variables",
outcome: &negativeOutcome,
metadata: map[string]string{"branch": "master", "repo": "ossf/scorecard"},
finding: &Finding{
Rule: "testdata/metadata-variables",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 google.com/ossf/scorecard@master",
Markdown: "step1\nstep2 [google.com/ossf/scorecard@master](google.com/ossf/scorecard@master)",
Effort: rule.RemediationEffortLow,
Patch: &patch,
},
},
},
{
name: "location",
id: "testdata/metadata-variables",
outcome: &negativeOutcome,
metadata: map[string]string{"branch": "master", "repo": "ossf/scorecard"},
finding: &Finding{
Rule: "testdata/metadata-variables",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 google.com/ossf/scorecard@master",
Markdown: "step1\nstep2 [google.com/ossf/scorecard@master](google.com/ossf/scorecard@master)",
Effort: rule.RemediationEffortLow,
},
Location: &Location{
Type: FileTypeSource,
Value: "path/to/file.txt",
LineStart: &sline,
LineEnd: &eline,
Snippet: &snippet,
},
},
},
{
name: "text",
id: "testdata/metadata-variables",
outcome: &negativeOutcome,
metadata: map[string]string{"branch": "master", "repo": "ossf/scorecard"},
finding: &Finding{
Rule: "testdata/metadata-variables",
Outcome: OutcomeNegative,
Risk: rule.RiskHigh,
Remediation: &rule.Remediation{
Text: "step1\nstep2 google.com/ossf/scorecard@master",
Markdown: "step1\nstep2 [google.com/ossf/scorecard@master](google.com/ossf/scorecard@master)",
Effort: rule.RemediationEffortLow,
},
Message: "some text",
},
},
{
name: "outcome",
id: "testdata/metadata-variables",
outcome: &positiveOutcome,
finding: &Finding{
Rule: "testdata/metadata-variables",
Outcome: OutcomePositive,
Risk: rule.RiskHigh,
Message: "some text",
},
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
r, err := New(testfs, tt.id)
if err != nil || tt.err != nil {
if !errCmp(err, tt.err) {
t.Fatalf("unexpected error: %v", cmp.Diff(err, tt.err, cmpopts.EquateErrors()))
}
return
}
r = r.WithMessage(tt.finding.Message).WithLocation(tt.finding.Location)
if len(tt.metadata) > 1 {
r = r.WithRemediationMetadata(tt.metadata)
}
if tt.finding.Remediation != nil {
r = r.WithPatch(tt.finding.Remediation.Patch)
}
if tt.outcome != nil {
r = r.WithOutcome(*tt.outcome)
}
if diff := cmp.Diff(*tt.finding, *r); diff != "" {
t.Errorf("mismatch (-want +got):\n%s", diff)
}
})
}
}

17
finding/testdata/effort-high.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: High
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

17
finding/testdata/effort-low.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: Low
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

17
finding/testdata/metadata-variables.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: Low
text:
- step1
- step2 google.com/${{ repo }}@${{ branch }}
markdown:
- step1
- step2 [google.com/${{ repo }}@${{ branch }}](google.com/${{ repo }}@${{ branch }})

17
finding/testdata/risk-high.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: Low
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

17
finding/testdata/risk-low.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: Low
remediation:
effort: Low
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

View File

@ -44,8 +44,9 @@ type Options struct {
CommitDepth int
ShowDetails bool
// Feature flags.
EnableSarif bool `env:"ENABLE_SARIF"`
EnableScorecardV6 bool `env:"SCORECARD_V6"`
EnableSarif bool `env:"ENABLE_SARIF"`
EnableScorecardV6 bool `env:"SCORECARD_V6"`
EnableScorecardExperimental bool `env:"SCORECARD_EXPERIMENTAL"`
}
// New creates a new instance of `Options`.
@ -75,6 +76,9 @@ const (
// Formats.
// FormatJSON specifies that results should be output in JSON format.
FormatJSON = "json"
// FormatSJSON specifies that results should be output in structured JSON format,
// i.e., with the structured results.
FormatSJSON = "structured-json"
// FormatSarif specifies that results should be output in SARIF format.
FormatSarif = "sarif"
// FormatDefault specifies that results should be output in default format.
@ -89,17 +93,21 @@ const (
// EnvVarScorecardV6 is the environment variable which enables scorecard v6
// options.
EnvVarScorecardV6 = "SCORECARD_V6"
// EnvVarScorecardExperimental is the environment variable which enables experimental
// features.
EnvVarScorecardExperimental = "SCORECARD_EXPERIMENTAL"
)
var (
// DefaultLogLevel retrieves the default log level.
DefaultLogLevel = log.DefaultLevel.String()
errCommitIsEmpty = errors.New("commit should be non-empty")
errFormatNotSupported = errors.New("unsupported format")
errPolicyFileNotSupported = errors.New("policy file is not supported yet")
errRawOptionNotSupported = errors.New("raw option is not supported yet")
errRepoOptionMustBeSet = errors.New(
errCommitIsEmpty = errors.New("commit should be non-empty")
errFormatNotSupported = errors.New("unsupported format")
errFormatSupportedWithExperimental = errors.New("format supported only with SCORECARD_EXPERIMENTAL=1")
errPolicyFileNotSupported = errors.New("policy file is not supported yet")
errRawOptionNotSupported = errors.New("raw option is not supported yet")
errRepoOptionMustBeSet = errors.New(
"exactly one of `repo`, `npm`, `pypi`, `rubygems` or `local` must be set",
)
errSARIFNotSupported = errors.New("SARIF format is not supported yet")
@ -149,6 +157,15 @@ func (o *Options) Validate() error {
}
}
if !o.isExperimentalEnabled() {
if o.Format == FormatSJSON {
errs = append(
errs,
errFormatSupportedWithExperimental,
)
}
}
// Validate format.
if !validateFormat(o.Format) {
errs = append(
@ -188,6 +205,13 @@ func boolSum(bools ...bool) int {
// Feature flags.
// isExperimentalEnabled returns true if experimental features were enabled via
// environment variable.
func (o *Options) isExperimentalEnabled() bool {
value, _ := os.LookupEnv(EnvVarScorecardExperimental)
return value == "1"
}
// isSarifEnabled returns true if SARIF format was specified in options or via
// environment variable.
func (o *Options) isSarifEnabled() bool {
@ -205,7 +229,7 @@ func (o *Options) isV6Enabled() bool {
func validateFormat(format string) bool {
switch format {
case FormatJSON, FormatSarif, FormatDefault, FormatRaw:
case FormatJSON, FormatSJSON, FormatSarif, FormatDefault, FormatRaw:
return true
default:
return false

View File

@ -26,12 +26,7 @@ func textToMarkdown(s string) string {
return strings.ReplaceAll(s, "\n", "\n\n")
}
// DetailToString turns a detail information into a string.
func DetailToString(d *checker.CheckDetail, logLevel log.Level) string {
if d.Type == checker.DetailDebug && logLevel != log.DebugLevel {
return ""
}
func nonStructuredResultString(d *checker.CheckDetail) string {
var sb strings.Builder
sb.WriteString(fmt.Sprintf("%s: %s", typeToString(d.Type), d.Msg.Text))
@ -46,12 +41,50 @@ func DetailToString(d *checker.CheckDetail, logLevel log.Level) string {
}
if d.Msg.Remediation != nil {
sb.WriteString(fmt.Sprintf(": %s", d.Msg.Remediation.HelpText))
sb.WriteString(fmt.Sprintf(": %s", d.Msg.Remediation.Text))
}
return sb.String()
}
func structuredResultString(d *checker.CheckDetail) string {
var sb strings.Builder
f := d.Msg.Finding
sb.WriteString(fmt.Sprintf("%s: %s severity: %s", typeToString(d.Type), f.Risk.String(), f.Message))
if f.Location != nil {
sb.WriteString(fmt.Sprintf(": %s", f.Location.Value))
if f.Location.LineStart != nil {
sb.WriteString(fmt.Sprintf(":%d", *f.Location.LineStart))
}
if f.Location.LineEnd != nil && *f.Location.LineStart < *f.Location.LineEnd {
sb.WriteString(fmt.Sprintf("-%d", *f.Location.LineEnd))
}
}
// Effort to remediate.
if f.Remediation != nil {
sb.WriteString(fmt.Sprintf(": %s (%s effort)", f.Remediation.Text, f.Remediation.Effort.String()))
}
return sb.String()
}
// DetailToString turns a detail information into a string.
func DetailToString(d *checker.CheckDetail, logLevel log.Level) string {
if d.Type == checker.DetailDebug && logLevel != log.DebugLevel {
return ""
}
// Non-structured results.
// NOTE: This is temporary until we have migrated all checks.
if d.Msg.Finding == nil {
return nonStructuredResultString(d)
}
// Structured results.
return structuredResultString(d)
}
func detailsToString(details []checker.CheckDetail, logLevel log.Level) (string, bool) {
// UPGRADEv2: change to make([]string, len(details)).
var sa []string

View File

@ -19,6 +19,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/log"
rules "github.com/ossf/scorecard/v4/rule"
)
func TestDetailString(t *testing.T) {
@ -121,8 +122,8 @@ func TestDetailString(t *testing.T) {
Msg: checker.LogMessage{
Text: "some meaningful text",
Path: "Dockerfile",
Remediation: &checker.Remediation{
HelpText: "fix x by doing y",
Remediation: &rules.Remediation{
Text: "fix x by doing y",
},
},
Type: checker.DetailWarn,

View File

@ -19,9 +19,12 @@ import (
"fmt"
"io"
"github.com/ossf/scorecard/v4/checker"
docs "github.com/ossf/scorecard/v4/docs/checks"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
rules "github.com/ossf/scorecard/v4/rule"
)
// nolint: govet
@ -83,6 +86,32 @@ type JSONScorecardResultV2 struct {
Metadata []string `json:"metadata"`
}
// nolint: govet
type jsonCheckResultV3 struct {
Risk rules.Risk `json:"risk"`
Outcome finding.Outcome `json:"outcome"`
Findings []finding.Finding `json:"findings"`
Score int `json:"score"`
Reason string `json:"reason"`
Name string `json:"name"`
// TODO(X): list of rules run.
// TODO(X): simplify the documentation for the overall check
// and add the rules that are used in the description.
Doc jsonCheckDocumentationV2 `json:"documentation"`
}
// JSONScorecardResultV3 exports results as JSON for structured detail format.
//
//nolint:govet
type JSONScorecardResultV3 struct {
Date string `json:"date"`
Repo jsonRepoV2 `json:"repo"`
Scorecard jsonScorecardV2 `json:"scorecard"`
AggregateScore jsonFloatScore `json:"score"`
Checks []jsonCheckResultV3 `json:"checks"`
Metadata []string `json:"metadata"`
}
// AsJSON exports results as JSON for new detail format.
func (r *ScorecardResult) AsJSON(showDetails bool, logLevel log.Level, writer io.Writer) error {
encoder := json.NewEncoder(writer)
@ -172,3 +201,74 @@ func (r *ScorecardResult) AsJSON2(showDetails bool,
return nil
}
func (r *ScorecardResult) AsSJSON(showDetails bool,
logLevel log.Level, checkDocs docs.Doc, writer io.Writer,
) error {
score, err := r.GetAggregateScore(checkDocs)
if err != nil {
return err
}
encoder := json.NewEncoder(writer)
out := JSONScorecardResultV3{
Repo: jsonRepoV2{
Name: r.Repo.Name,
Commit: r.Repo.CommitSHA,
},
Scorecard: jsonScorecardV2{
Version: r.Scorecard.Version,
Commit: r.Scorecard.CommitSHA,
},
Date: r.Date.Format("2006-01-02"),
Metadata: r.Metadata,
AggregateScore: jsonFloatScore(score),
}
for _, checkResult := range r.Checks {
doc, e := checkDocs.GetCheck(checkResult.Name)
if e != nil {
return sce.WithMessage(sce.ErrScorecardInternal, fmt.Sprintf("GetCheck: %s: %v", checkResult.Name, e))
}
tmpResult := jsonCheckResultV3{
Name: checkResult.Name,
Doc: jsonCheckDocumentationV2{
URL: doc.GetDocumentationURL(r.Scorecard.CommitSHA),
Short: doc.GetShort(),
},
Reason: checkResult.Reason,
Score: checkResult.Score,
Risk: rules.RiskNone,
Outcome: finding.OutcomePositive,
}
if showDetails {
for i := range checkResult.Details {
if checkResult.Details[i].Type == checker.DetailDebug && logLevel != log.DebugLevel {
continue
}
f := checkResult.Details[i].Msg.Finding
if f == nil {
continue
}
if f.Risk.GreaterThan(tmpResult.Risk) {
tmpResult.Risk = f.Risk
}
if f.Outcome.WorseThan(tmpResult.Outcome) {
tmpResult.Outcome = f.Outcome
}
tmpResult.Findings = append(tmpResult.Findings, *f)
}
}
out.Checks = append(out.Checks, tmpResult)
}
if err := encoder.Encode(out); err != nil {
return sce.WithMessage(sce.ErrScorecardInternal, fmt.Sprintf("encoder.Encode: %v", err))
}
return nil
}

View File

@ -26,6 +26,7 @@ import (
"github.com/xeipuuv/gojsonschema"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
)
@ -64,7 +65,7 @@ func jsonMockDocRead() *mockDoc {
return &m
}
//nolint
// nolint
func TestJSONOutput(t *testing.T) {
t.Parallel()
@ -109,7 +110,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -146,7 +147,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -182,7 +183,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -198,7 +199,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -215,7 +216,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -225,7 +226,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -235,7 +236,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -272,7 +273,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -288,7 +289,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -305,7 +306,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -315,7 +316,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -325,7 +326,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -362,7 +363,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -399,7 +400,7 @@ func TestJSONOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "https://domain.com/something",
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
},
},
},

View File

@ -29,6 +29,7 @@ import (
"github.com/ossf/scorecard/v4/checks"
docs "github.com/ossf/scorecard/v4/docs/checks"
sce "github.com/ossf/scorecard/v4/errors"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
spol "github.com/ossf/scorecard/v4/policy"
)
@ -37,7 +38,7 @@ type text struct {
Text string `json:"text,omitempty"`
}
//nolint
// nolint
type region struct {
StartLine *uint `json:"startLine,omitempty"`
EndLine *uint `json:"endLine,omitempty"`
@ -68,7 +69,7 @@ type location struct {
HasRemediation bool `json:"-"`
}
//nolint
// nolint
type relatedLocation struct {
ID int `json:"id"`
PhysicalLocation physicalLocation `json:"physicalLocation"`
@ -121,7 +122,7 @@ type tool struct {
Driver driver `json:"driver"`
}
//nolint
// nolint
type result struct {
RuleID string `json:"ruleId"`
Level string `json:"level,omitempty"` // Optional.
@ -203,13 +204,62 @@ func generateDefaultConfig(risk string) string {
return "error"
}
func getPath(d *checker.CheckDetail) string {
f := d.Msg.Finding
if f != nil && f.Location != nil {
return f.Location.Value
}
return d.Msg.Path
}
func getLocationType(d *checker.CheckDetail) finding.FileType {
f := d.Msg.Finding
if f != nil && f.Location != nil {
return f.Location.Type
}
return d.Msg.Type
}
func getSnippet(d *checker.CheckDetail) *text {
f := d.Msg.Finding
if f != nil && f.Location != nil && f.Location.Snippet != nil {
// NOTE: Snippet may be nil.
return &text{Text: *f.Location.Snippet}
}
if d.Msg.Snippet != "" {
return &text{Text: d.Msg.Snippet}
}
return nil
}
func getStartLine(d *checker.CheckDetail) uint {
f := d.Msg.Finding
if f != nil && f.Location != nil && f.Location.LineStart != nil {
return *f.Location.LineStart
}
return d.Msg.Offset
}
func getEndLine(d *checker.CheckDetail) uint {
f := d.Msg.Finding
if f != nil && f.Location != nil && f.Location.LineEnd != nil {
return *f.Location.LineEnd
}
return d.Msg.EndOffset
}
func getText(d *checker.CheckDetail) *text {
f := d.Msg.Finding
if f != nil {
return &text{Text: f.Message}
}
return &text{Text: d.Msg.Text}
}
func detailToRegion(details *checker.CheckDetail) region {
var reg region
var snippet *text
if details.Msg.Snippet != "" {
snippet = &text{Text: details.Msg.Snippet}
}
snippet := getSnippet(details)
locType := getLocationType(details)
// https://github.com/github/codeql-action/issues/754.
// "code-scanning currently only supports character offset/length and start/end line/columns offsets".
@ -217,36 +267,38 @@ func detailToRegion(details *checker.CheckDetail) region {
// "3.30.1 General".
// Line numbers > 0.
// byteOffset and charOffset >= 0.
switch details.Msg.Type {
switch locType {
default:
panic("invalid")
case checker.FileTypeURL:
line := maxOffset(checker.OffsetDefault, details.Msg.Offset)
case finding.FileTypeURL:
line := maxOffset(checker.OffsetDefault, getStartLine(details))
reg = region{
StartLine: &line,
Snippet: snippet,
}
case checker.FileTypeNone:
case finding.FileTypeNone:
// Do nothing.
case checker.FileTypeSource:
startLine := maxOffset(checker.OffsetDefault, details.Msg.Offset)
endLine := maxOffset(startLine, details.Msg.EndOffset)
case finding.FileTypeSource:
startLine := maxOffset(checker.OffsetDefault, getStartLine(details))
endLine := maxOffset(startLine, getEndLine(details))
reg = region{
StartLine: &startLine,
EndLine: &endLine,
Snippet: snippet,
}
case checker.FileTypeText:
case finding.FileTypeText:
offset := getStartLine(details)
reg = region{
CharOffset: &details.Msg.Offset,
CharOffset: &offset,
Snippet: snippet,
}
case checker.FileTypeBinary:
case finding.FileTypeBinary:
offset := getStartLine(details)
reg = region{
// Note: GitHub does not support ByteOffset, so we also set
// StartLine.
ByteOffset: &details.Msg.Offset,
StartLine: &details.Msg.Offset,
ByteOffset: &offset,
StartLine: &offset,
}
}
return reg
@ -255,13 +307,16 @@ func detailToRegion(details *checker.CheckDetail) region {
func shouldAddLocation(detail *checker.CheckDetail, showDetails bool,
minScore, score int,
) bool {
path := getPath(detail)
locType := getLocationType(detail)
switch {
default:
return false
case detail.Msg.Path == "",
case path == "",
!showDetails,
detail.Type != checker.DetailWarn,
detail.Msg.Type == checker.FileTypeURL:
locType == finding.FileTypeURL:
return false
case score == checker.InconclusiveResultScore:
return true
@ -270,6 +325,19 @@ func shouldAddLocation(detail *checker.CheckDetail, showDetails bool,
}
}
func setRemediation(loc *location, d *checker.CheckDetail) {
f := d.Msg.Finding
if f != nil && f.Remediation != nil {
loc.Message.Text = fmt.Sprintf("%s\nRemediation tip: %s", loc.Message.Text, f.Remediation.Markdown)
loc.HasRemediation = true
return
}
if d.Msg.Remediation != nil {
loc.Message.Text = fmt.Sprintf("%s\nRemediation tip: %s", loc.Message.Text, d.Msg.Remediation.Markdown)
loc.HasRemediation = true
}
}
func detailsToLocations(details []checker.CheckDetail,
showDetails bool, minScore, score int,
) []location {
@ -294,18 +362,15 @@ func detailsToLocations(details []checker.CheckDetail,
loc := location{
PhysicalLocation: physicalLocation{
ArtifactLocation: artifactLocation{
URI: d.Msg.Path,
URI: getPath(&d),
URIBaseID: "%SRCROOT%",
},
},
Message: &text{Text: d.Msg.Text},
Message: getText(&d),
}
// Add remediaiton information
if d.Msg.Remediation != nil {
loc.Message.Text = fmt.Sprintf("%s\nRemediation tip: %s", loc.Message.Text, d.Msg.Remediation.HelpMarkdown)
loc.HasRemediation = true
}
// Add remediation information
setRemediation(&loc, &d)
// Set the region depending on the file type.
loc.PhysicalLocation.Region = detailToRegion(&d)

View File

@ -24,8 +24,10 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/finding"
"github.com/ossf/scorecard/v4/log"
spol "github.com/ossf/scorecard/v4/policy"
rules "github.com/ossf/scorecard/v4/rule"
)
func sarifMockDocRead() *mockDoc {
@ -96,7 +98,7 @@ func sarifMockDocRead() *mockDoc {
return &m
}
//nolint
// nolint
func TestSARIFOutput(t *testing.T) {
t.Parallel()
@ -163,12 +165,12 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
Remediation: &checker.Remediation{
HelpMarkdown: "this is the custom markdown help",
HelpText: "this is the custom text help",
Remediation: &rules.Remediation{
Markdown: "this is the custom markdown help",
Text: "this is the custom text help",
},
},
},
@ -217,7 +219,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -267,7 +269,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -320,7 +322,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -336,7 +338,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -353,7 +355,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -363,7 +365,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -373,7 +375,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -427,7 +429,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -443,7 +445,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -460,7 +462,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -470,7 +472,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -480,7 +482,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -526,7 +528,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -574,7 +576,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "https://domain.com/something",
Type: checker.FileTypeURL,
Type: finding.FileTypeURL,
},
},
},
@ -626,7 +628,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "bin/binary.elf",
Type: checker.FileTypeBinary,
Type: finding.FileTypeBinary,
Offset: 0,
},
},
@ -642,7 +644,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/doc.txt",
Type: checker.FileTypeText,
Type: finding.FileTypeText,
Offset: 3,
Snippet: "some text",
},
@ -659,7 +661,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "info message",
Path: "some/path.js",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG();}",
},
@ -669,7 +671,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "some/path.py",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG2();}",
},
@ -679,7 +681,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "debug message",
Path: "some/path.go",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 3,
Snippet: "if (bad) {BUG5();}",
},
@ -737,7 +739,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -754,7 +756,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -764,7 +766,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file2.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad2) {BUG();}",
},
@ -781,7 +783,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},
@ -791,7 +793,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file2.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad2) {BUG2();}",
},
@ -808,7 +810,7 @@ func TestSARIFOutput(t *testing.T) {
Msg: checker.LogMessage{
Text: "warn message",
Path: "src/file1.cpp",
Type: checker.FileTypeSource,
Type: finding.FileTypeSource,
Offset: 5,
Snippet: "if (bad) {BUG();}",
},

View File

@ -43,7 +43,7 @@ type RepoInfo struct {
}
// ScorecardResult struct is returned on a successful Scorecard run.
//nolint
// nolint
type ScorecardResult struct {
Repo RepoInfo
Date time.Time
@ -117,6 +117,8 @@ func FormatResults(
err = results.AsSARIF(opts.ShowDetails, log.ParseLevel(opts.LogLevel), os.Stdout, doc, policy)
case options.FormatJSON:
err = results.AsJSON2(opts.ShowDetails, log.ParseLevel(opts.LogLevel), doc, os.Stdout)
case options.FormatSJSON:
err = results.AsSJSON(opts.ShowDetails, log.ParseLevel(opts.LogLevel), doc, os.Stdout)
case options.FormatRaw:
err = results.AsRawJSON(os.Stdout)
default:

View File

@ -22,6 +22,7 @@ import (
"github.com/google/go-containerregistry/pkg/crane"
"github.com/ossf/scorecard/v4/checker"
"github.com/ossf/scorecard/v4/rule"
)
var errInvalidArg = errors.New("invalid argument")
@ -35,53 +36,48 @@ var (
// TODO fix how this info makes it checks/evaluation.
type RemediationMetadata struct {
branch string
repo string
Branch string
Repo string
}
// New returns remediation relevant metadata from a CheckRequest.
func New(c *checker.CheckRequest) (RemediationMetadata, error) {
func New(c *checker.CheckRequest) (*RemediationMetadata, error) {
if c == nil || c.RepoClient == nil {
return RemediationMetadata{}, nil
return &RemediationMetadata{}, nil
}
// Get the branch for remediation.
branch, err := c.RepoClient.GetDefaultBranchName()
if err != nil {
return RemediationMetadata{}, fmt.Errorf("GetDefaultBranchName: %w", err)
return &RemediationMetadata{}, fmt.Errorf("GetDefaultBranchName: %w", err)
}
uri := c.RepoClient.URI()
parts := strings.Split(uri, "/")
if len(parts) != 3 {
return RemediationMetadata{}, fmt.Errorf("%w: empty: %s", errInvalidArg, uri)
return &RemediationMetadata{}, fmt.Errorf("%w: empty: %s", errInvalidArg, uri)
}
repo := fmt.Sprintf("%s/%s", parts[1], parts[2])
return RemediationMetadata{branch: branch, repo: repo}, nil
}
// CreateWorkflowPermissionRemediation create remediation for workflow permissions.
func (r *RemediationMetadata) CreateWorkflowPermissionRemediation(filepath string) *checker.Remediation {
return r.createWorkflowRemediation(filepath, "permissions")
return &RemediationMetadata{Branch: branch, Repo: repo}, nil
}
// CreateWorkflowPinningRemediation create remediaiton for pinninn GH Actions.
func (r *RemediationMetadata) CreateWorkflowPinningRemediation(filepath string) *checker.Remediation {
func (r *RemediationMetadata) CreateWorkflowPinningRemediation(filepath string) *rule.Remediation {
return r.createWorkflowRemediation(filepath, "pin")
}
func (r *RemediationMetadata) createWorkflowRemediation(path, t string) *checker.Remediation {
func (r *RemediationMetadata) createWorkflowRemediation(path, t string) *rule.Remediation {
p := strings.TrimPrefix(path, ".github/workflows/")
if r.branch == "" || r.repo == "" {
if r.Branch == "" || r.Repo == "" {
return nil
}
text := fmt.Sprintf(workflowText, r.repo, p, r.branch, t)
markdown := fmt.Sprintf(workflowMarkdown, r.repo, p, r.branch, t)
text := fmt.Sprintf(workflowText, r.Repo, p, r.Branch, t)
markdown := fmt.Sprintf(workflowMarkdown, r.Repo, p, r.Branch, t)
return &checker.Remediation{
HelpText: text,
HelpMarkdown: markdown,
return &rule.Remediation{
Text: text,
Markdown: markdown,
}
}
@ -105,7 +101,7 @@ func (c CraneDigester) Digest(name string) (string, error) {
}
// CreateDockerfilePinningRemediation create remediaiton for pinning Dockerfile images.
func CreateDockerfilePinningRemediation(dep *checker.Dependency, digester Digester) *checker.Remediation {
func CreateDockerfilePinningRemediation(dep *checker.Dependency, digester Digester) *rule.Remediation {
name, ok := dockerImageName(dep)
if !ok {
return nil
@ -119,8 +115,8 @@ func CreateDockerfilePinningRemediation(dep *checker.Dependency, digester Digest
text := fmt.Sprintf(dockerfilePinText, name, hash)
markdown := text
return &checker.Remediation{
HelpText: text,
HelpMarkdown: markdown,
return &rule.Remediation{
Text: text,
Markdown: markdown,
}
}

View File

@ -23,6 +23,7 @@ import (
"github.com/ossf/scorecard/v4/checker"
mockrepo "github.com/ossf/scorecard/v4/clients/mockclients"
"github.com/ossf/scorecard/v4/rule"
)
func TestRepeatedSetup(t *testing.T) {
@ -45,8 +46,8 @@ func TestRepeatedSetup(t *testing.T) {
}
want := fmt.Sprintf("ossf/scorecard%d", i)
if rmd.repo != want {
t.Errorf("failed. expected: %v, got: %v", want, rmd.repo)
if rmd.Repo != want {
t.Errorf("failed. expected: %v, got: %v", want, rmd.Repo)
}
}
}
@ -78,7 +79,7 @@ func TestCreateDockerfilePinningRemediation(t *testing.T) {
tests := []struct {
name string
dep checker.Dependency
expected *checker.Remediation
expected *rule.Remediation
}{
{
name: "no depdendency",
@ -91,9 +92,9 @@ func TestCreateDockerfilePinningRemediation(t *testing.T) {
Name: asPointer("foo"),
Type: checker.DependencyUseTypeDockerfileContainerImage,
},
expected: &checker.Remediation{
HelpText: "pin your Docker image by updating foo to foo@sha256:2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
HelpMarkdown: "pin your Docker image by updating foo to foo@sha256:2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
expected: &rule.Remediation{
Text: "pin your Docker image by updating foo to foo@sha256:2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
Markdown: "pin your Docker image by updating foo to foo@sha256:2c26b46b68ffc68ff99b453c1d30413413422d706483bfa0f98a5e886266e7ae",
},
},
{
@ -104,9 +105,9 @@ func TestCreateDockerfilePinningRemediation(t *testing.T) {
PinnedAt: asPointer("11"),
Type: checker.DependencyUseTypeDockerfileContainerImage,
},
expected: &checker.Remediation{
HelpText: "pin your Docker image by updating amazoncorretto:11 to amazoncorretto:11@sha256:b1a711069b801a325a30885f08f5067b2b102232379750dda4d25a016afd9a88",
HelpMarkdown: "pin your Docker image by updating amazoncorretto:11 to amazoncorretto:11@sha256:b1a711069b801a325a30885f08f5067b2b102232379750dda4d25a016afd9a88",
expected: &rule.Remediation{
Text: "pin your Docker image by updating amazoncorretto:11 to amazoncorretto:11@sha256:b1a711069b801a325a30885f08f5067b2b102232379750dda4d25a016afd9a88",
Markdown: "pin your Docker image by updating amazoncorretto:11 to amazoncorretto:11@sha256:b1a711069b801a325a30885f08f5067b2b102232379750dda4d25a016afd9a88",
},
},
{

247
rule/rule.go Normal file
View File

@ -0,0 +1,247 @@
// Copyright 2023 OpenSSF Scorecard Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rule
import (
"embed"
"errors"
"fmt"
"strings"
"gopkg.in/yaml.v3"
)
// RemediationEffort indicates the estimated effort necessary to remediate a finding.
type RemediationEffort int
const (
// RemediationEffortNone indicates a no remediation effort.
RemediationEffortNone RemediationEffort = iota
// RemediationEffortLow indicates a low remediation effort.
RemediationEffortLow
// RemediationEffortMedium indicates a medium remediation effort.
RemediationEffortMedium
// RemediationEffortHigh indicates a high remediation effort.
RemediationEffortHigh
)
// Remediation represents the remediation for a finding.
type Remediation struct {
// Patch for machines.
Patch *string `json:"patch,omitempty"`
// Text for humans.
Text string `json:"text"`
// Text in markdown format for humans.
Markdown string `json:"markdown"`
// Effort to remediate.
Effort RemediationEffort `json:"effort"`
}
// nolint: govet
type jsonRemediation struct {
Text []string `yaml:"text"`
Markdown []string `yaml:"markdown"`
Effort RemediationEffort `yaml:"effort"`
}
// nolint: govet
type jsonRule struct {
Short string `yaml:"short"`
Desc string `yaml:"desc"`
Motivation string `yaml:"motivation"`
Implementation string `yaml:"implementation"`
Risk Risk `yaml:"risk"`
Remediation jsonRemediation `yaml:"remediation"`
}
// Risk indicates a risk.
type Risk int
const (
// RiskNone is a no risk.
RiskNone Risk = iota
// RiskLow is a low risk.
RiskLow
// RiskMedium is a medium risk.
RiskMedium
// RiskHigh is a high risk.
RiskHigh
// RiskCritical is a critical risk.
RiskCritical
)
// nolint: govet
type Rule struct {
Name string
Short string
Desc string
Motivation string
Risk Risk
Remediation *Remediation
}
var errInvalid = errors.New("invalid")
// New create a new rule.
func New(loc embed.FS, rule string) (*Rule, error) {
content, err := loc.ReadFile(fmt.Sprintf("%s.yml", rule))
if err != nil {
return nil, fmt.Errorf("%w: %v", errInvalid, err)
}
r, err := parseFromJSON(content)
if err != nil {
return nil, err
}
if err := validate(r); err != nil {
return nil, err
}
return &Rule{
Name: rule,
Short: r.Short,
Desc: r.Desc,
Motivation: r.Motivation,
Risk: r.Risk,
Remediation: &Remediation{
Text: strings.Join(r.Remediation.Text, "\n"),
Markdown: strings.Join(r.Remediation.Markdown, "\n"),
Effort: r.Remediation.Effort,
},
}, nil
}
func validate(r *jsonRule) error {
if err := validateRisk(r.Risk); err != nil {
return fmt.Errorf("%w: %v", errInvalid, err)
}
if err := validateRemediation(r.Remediation); err != nil {
return fmt.Errorf("%w: %v", errInvalid, err)
}
return nil
}
func validateRemediation(r jsonRemediation) error {
switch r.Effort {
case RemediationEffortHigh, RemediationEffortMedium, RemediationEffortLow:
return nil
default:
return fmt.Errorf("%w: %v", errInvalid, fmt.Sprintf("remediation '%v'", r))
}
}
func validateRisk(r Risk) error {
switch r {
case RiskNone, RiskLow, RiskHigh, RiskMedium, RiskCritical:
return nil
default:
return fmt.Errorf("%w: %v", errInvalid, fmt.Sprintf("risk '%v'", r))
}
}
func parseFromJSON(content []byte) (*jsonRule, error) {
r := jsonRule{}
err := yaml.Unmarshal(content, &r)
if err != nil {
return nil, fmt.Errorf("%w: %v", errInvalid, err)
}
return &r, nil
}
// UnmarshalYAML is a custom unmarshalling function
// to transform the string into an enum.
func (r *RemediationEffort) UnmarshalYAML(n *yaml.Node) error {
var str string
if err := n.Decode(&str); err != nil {
return fmt.Errorf("%w: %v", errInvalid, err)
}
// nolint:goconst
switch n.Value {
case "Low":
*r = RemediationEffortLow
case "Medium":
*r = RemediationEffortMedium
case "High":
*r = RemediationEffortHigh
default:
return fmt.Errorf("%w: %q", errInvalid, str)
}
return nil
}
// UnmarshalYAML is a custom unmarshalling function
// to transform the string into an enum.
func (r *Risk) UnmarshalYAML(n *yaml.Node) error {
var str string
if err := n.Decode(&str); err != nil {
return fmt.Errorf("%w: %v", errInvalid, err)
}
switch n.Value {
case "None":
*r = RiskNone
case "Low":
*r = RiskLow
case "High":
*r = RiskHigh
case "Medium":
*r = RiskMedium
case "Critical":
*r = RiskCritical
default:
return fmt.Errorf("%w: %q", errInvalid, str)
}
return nil
}
// String stringifies the enum.
func (r *RemediationEffort) String() string {
switch *r {
case RemediationEffortLow:
return "Low"
case RemediationEffortMedium:
return "Medium"
case RemediationEffortHigh:
return "High"
default:
return ""
}
}
// String stringifies the enum.
func (r *Risk) String() string {
switch *r {
case RiskNone:
return "None"
case RiskLow:
return "Low"
case RiskHigh:
return "High"
case RiskMedium:
return "Medium"
case RiskCritical:
return "Critical"
default:
return ""
}
}
// GreaterThan compare risks.
func (r *Risk) GreaterThan(rr Risk) bool {
return *r > rr
}

87
rule/rule_test.go Normal file
View File

@ -0,0 +1,87 @@
// Copyright 2023 OpenSSF Scorecard Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rule
import (
"embed"
"errors"
"testing"
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
)
func errCmp(e1, e2 error) bool {
return errors.Is(e1, e2) || errors.Is(e2, e1)
}
//go:embed testdata/*
var testfs embed.FS
func Test_New(t *testing.T) {
t.Parallel()
// nolint: govet
tests := []struct {
name string
id string
err error
rule *Rule
}{
{
name: "all fields set",
id: "testdata/all-fields",
rule: &Rule{
Name: "testdata/all-fields",
Short: "short description",
Desc: "description",
Motivation: "line1 line2\n",
Risk: RiskHigh,
Remediation: &Remediation{
Text: "step1\nstep2 https://www.google.com/something",
Markdown: "step1\nstep2 [google.com](https://www.google.com/something)",
Effort: RemediationEffortLow,
},
},
},
{
name: "invalid risk",
id: "testdata/invalid-risk",
err: errInvalid,
},
{
name: "invalid effort",
id: "testdata/invalid-effort",
err: errInvalid,
},
}
for _, tt := range tests {
tt := tt // Re-initializing variable so it is not changed while executing the closure below
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
r, err := New(testfs, tt.id)
if err != nil || tt.err != nil {
if !errCmp(err, tt.err) {
t.Fatalf("unexpected error: %v", cmp.Diff(err, tt.err, cmpopts.EquateErrors()))
}
return
}
if diff := cmp.Diff(*tt.rule, *r); diff != "" {
t.Errorf("mismatch (-want +got):\n%s", diff)
}
})
}
}

17
rule/testdata/all-fields.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: Low
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

17
rule/testdata/invalid-effort.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: High
remediation:
effort: invalid
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)

17
rule/testdata/invalid-risk.yml vendored Normal file
View File

@ -0,0 +1,17 @@
short: short description
desc: description
motivation: >
line1
line2
implementation: >
line1
line2
risk: invalid
remediation:
effort: Low
text:
- step1
- step2 https://www.google.com/something
markdown:
- step1
- step2 [google.com](https://www.google.com/something)