This commit is contained in:
laurentsimon 2022-05-20 08:59:53 -07:00 committed by GitHub
parent 399d9974e4
commit af7f865b9d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 6 additions and 23 deletions

View File

@ -332,13 +332,13 @@ type CIIBestPracticesData struct {
} }
// DangerousWorkflowType represents a type of dangerous workflow. // DangerousWorkflowType represents a type of dangerous workflow.
type DangerousWorkflowType int type DangerousWorkflowType string
const ( const (
// DangerousWorkflowScriptInjection represents a script injection. // DangerousWorkflowScriptInjection represents a script injection.
DangerousWorkflowScriptInjection DangerousWorkflowType = iota DangerousWorkflowScriptInjection DangerousWorkflowType = "scriptInjection"
// DangerousWorkflowUntrustedCheckout represents an untrusted checkout. // DangerousWorkflowUntrustedCheckout represents an untrusted checkout.
DangerousWorkflowUntrustedCheckout DangerousWorkflowUntrustedCheckout DangerousWorkflowType = "untrustedCheckout"
) )
// DangerousWorkflowData contains raw results // DangerousWorkflowData contains raw results
@ -350,8 +350,8 @@ type DangerousWorkflowData struct {
// DangerousWorkflow represents a dangerous workflow. // DangerousWorkflow represents a dangerous workflow.
type DangerousWorkflow struct { type DangerousWorkflow struct {
Job *WorkflowJob Job *WorkflowJob
File File
Type DangerousWorkflowType Type DangerousWorkflowType
File File
} }
// WorkflowJob reprresents a workflow job. // WorkflowJob reprresents a workflow job.

View File

@ -16,7 +16,6 @@ package pkg
import ( import (
"encoding/json" "encoding/json"
"errors"
"fmt" "fmt"
"io" "io"
"time" "time"
@ -28,8 +27,6 @@ import (
// TODO: add a "check" field to all results so that they can be linked to a check. // TODO: add a "check" field to all results so that they can be linked to a check.
// TODO(#1874): Add a severity field in all results. // TODO(#1874): Add a severity field in all results.
var errorInvalidType = errors.New("invalid type")
// Flat JSON structure to hold raw results. // Flat JSON structure to hold raw results.
type jsonScorecardRawResult struct { type jsonScorecardRawResult struct {
Date string `json:"date"` Date string `json:"date"`
@ -167,13 +164,6 @@ type jsonLicense struct {
// TODO: add fields, like type of license, etc. // TODO: add fields, like type of license, etc.
} }
type dangerousPatternType string
const (
patternUntrustedCheckout dangerousPatternType = "untrustedCheckout"
patternScriptInjection dangerousPatternType = "scriptInjection"
)
type jsonWorkflow struct { type jsonWorkflow struct {
Job *jsonWorkflowJob `json:"job"` Job *jsonWorkflowJob `json:"job"`
File *jsonFile `json:"file"` File *jsonFile `json:"file"`
@ -242,6 +232,7 @@ func (r *jsonScorecardRawResult) addFuzzingRawResults(fd *checker.FuzzingData) e
return nil return nil
} }
//nolint:unparam
func (r *jsonScorecardRawResult) addDangerousWorkflowRawResults(df *checker.DangerousWorkflowData) error { func (r *jsonScorecardRawResult) addDangerousWorkflowRawResults(df *checker.DangerousWorkflowData) error {
r.Results.Workflows = []jsonWorkflow{} r.Results.Workflows = []jsonWorkflow{}
for _, e := range df.Workflows { for _, e := range df.Workflows {
@ -250,6 +241,7 @@ func (r *jsonScorecardRawResult) addDangerousWorkflowRawResults(df *checker.Dang
Path: e.File.Path, Path: e.File.Path,
Offset: int(e.File.Offset), Offset: int(e.File.Offset),
}, },
Type: string(e.Type),
} }
if e.File.Snippet != "" { if e.File.Snippet != "" {
v.File.Snippet = &e.File.Snippet v.File.Snippet = &e.File.Snippet
@ -261,15 +253,6 @@ func (r *jsonScorecardRawResult) addDangerousWorkflowRawResults(df *checker.Dang
} }
} }
switch e.Type {
case checker.DangerousWorkflowUntrustedCheckout:
v.Type = string(patternUntrustedCheckout)
case checker.DangerousWorkflowScriptInjection:
v.Type = string(patternScriptInjection)
default:
return fmt.Errorf("%w: %d", errorInvalidType, e.Type)
}
r.Results.Workflows = append(r.Results.Workflows, v) r.Results.Workflows = append(r.Results.Workflows, v)
} }