2016-07-20 11:02:01 +01:00
|
|
|
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
2018-07-19 17:42:25 +01:00
|
|
|
// Package gosec holds the central scanning logic used by gosec security scanner
|
|
|
|
package gosec
|
2016-07-20 11:02:01 +01:00
|
|
|
|
|
|
|
import (
|
2019-04-30 07:13:10 +01:00
|
|
|
"fmt"
|
2016-07-20 11:02:01 +01:00
|
|
|
"go/ast"
|
2017-05-10 05:26:53 +01:00
|
|
|
"go/build"
|
2016-07-20 11:02:01 +01:00
|
|
|
"go/token"
|
|
|
|
"go/types"
|
|
|
|
"log"
|
2017-07-19 22:17:00 +01:00
|
|
|
"os"
|
2016-12-02 18:20:23 +00:00
|
|
|
"path"
|
2020-01-15 15:27:56 +00:00
|
|
|
"path/filepath"
|
2016-07-20 11:02:01 +01:00
|
|
|
"reflect"
|
2017-10-05 22:32:03 +01:00
|
|
|
"regexp"
|
2019-02-26 22:24:06 +00:00
|
|
|
"strconv"
|
2016-07-20 11:02:01 +01:00
|
|
|
"strings"
|
2022-02-16 17:23:37 +00:00
|
|
|
"sync"
|
2017-04-26 00:01:28 +01:00
|
|
|
|
2023-02-09 11:28:53 +00:00
|
|
|
"golang.org/x/tools/go/analysis"
|
|
|
|
"golang.org/x/tools/go/analysis/passes/buildssa"
|
2019-04-25 08:25:32 +01:00
|
|
|
"golang.org/x/tools/go/packages"
|
2024-03-08 11:09:28 +00:00
|
|
|
|
|
|
|
"github.com/securego/gosec/v2/analyzers"
|
|
|
|
"github.com/securego/gosec/v2/issue"
|
2016-07-20 11:02:01 +01:00
|
|
|
)
|
|
|
|
|
2019-10-02 13:05:14 +01:00
|
|
|
// LoadMode controls the amount of details to return when loading the packages
|
|
|
|
const LoadMode = packages.NeedName |
|
|
|
|
packages.NeedFiles |
|
|
|
|
packages.NeedCompiledGoFiles |
|
|
|
|
packages.NeedImports |
|
|
|
|
packages.NeedTypes |
|
|
|
|
packages.NeedTypesSizes |
|
|
|
|
packages.NeedTypesInfo |
|
2023-02-09 11:28:53 +00:00
|
|
|
packages.NeedSyntax |
|
|
|
|
packages.NeedModule |
|
|
|
|
packages.NeedEmbedFiles |
|
|
|
|
packages.NeedEmbedPatterns
|
2019-10-02 13:05:14 +01:00
|
|
|
|
2021-12-09 10:53:36 +00:00
|
|
|
const externalSuppressionJustification = "Globally suppressed."
|
|
|
|
|
|
|
|
const aliasOfAllRules = "*"
|
|
|
|
|
2023-10-18 10:31:54 +01:00
|
|
|
type ignore struct {
|
|
|
|
start int
|
|
|
|
end int
|
|
|
|
suppressions map[string][]issue.SuppressionInfo
|
|
|
|
}
|
|
|
|
|
|
|
|
type ignores map[string][]ignore
|
|
|
|
|
|
|
|
func newIgnores() ignores {
|
|
|
|
return make(map[string][]ignore)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i ignores) parseLine(line string) (int, int) {
|
|
|
|
parts := strings.Split(line, "-")
|
|
|
|
start, err := strconv.Atoi(parts[0])
|
|
|
|
if err != nil {
|
|
|
|
start = 0
|
|
|
|
}
|
|
|
|
end := start
|
|
|
|
if len(parts) > 1 {
|
|
|
|
if e, err := strconv.Atoi(parts[1]); err == nil {
|
|
|
|
end = e
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return start, end
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i ignores) add(file string, line string, suppressions map[string]issue.SuppressionInfo) {
|
|
|
|
is := []ignore{}
|
|
|
|
if _, ok := i[file]; ok {
|
|
|
|
is = i[file]
|
|
|
|
}
|
|
|
|
found := false
|
|
|
|
start, end := i.parseLine(line)
|
|
|
|
for _, ig := range is {
|
|
|
|
if ig.start <= start && ig.end >= end {
|
|
|
|
found = true
|
|
|
|
for r, s := range suppressions {
|
|
|
|
ss, ok := ig.suppressions[r]
|
|
|
|
if !ok {
|
|
|
|
ss = []issue.SuppressionInfo{}
|
|
|
|
}
|
|
|
|
ss = append(ss, s)
|
|
|
|
ig.suppressions[r] = ss
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
ig := ignore{
|
|
|
|
start: start,
|
|
|
|
end: end,
|
|
|
|
suppressions: map[string][]issue.SuppressionInfo{},
|
|
|
|
}
|
|
|
|
for r, s := range suppressions {
|
|
|
|
ig.suppressions[r] = []issue.SuppressionInfo{s}
|
|
|
|
}
|
|
|
|
is = append(is, ig)
|
|
|
|
}
|
|
|
|
i[file] = is
|
|
|
|
}
|
|
|
|
|
|
|
|
func (i ignores) get(file string, line string) map[string][]issue.SuppressionInfo {
|
|
|
|
start, end := i.parseLine(line)
|
|
|
|
if is, ok := i[file]; ok {
|
|
|
|
for _, i := range is {
|
2024-05-28 11:46:49 +01:00
|
|
|
if i.start <= start && i.end >= end || start <= i.start && end >= i.end {
|
2023-10-18 10:31:54 +01:00
|
|
|
return i.suppressions
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return map[string][]issue.SuppressionInfo{}
|
2023-10-13 13:04:21 +01:00
|
|
|
}
|
|
|
|
|
2016-08-12 14:17:28 +01:00
|
|
|
// The Context is populated with data parsed from the source code as it is scanned.
|
|
|
|
// It is passed through to all rule functions as they are called. Rules may use
|
2023-05-26 16:03:54 +01:00
|
|
|
// this data in conjunction with the encountered AST node.
|
2016-07-20 11:02:01 +01:00
|
|
|
type Context struct {
|
2020-01-06 08:55:52 +00:00
|
|
|
FileSet *token.FileSet
|
|
|
|
Comments ast.CommentMap
|
|
|
|
Info *types.Info
|
|
|
|
Pkg *types.Package
|
|
|
|
PkgFiles []*ast.File
|
|
|
|
Root *ast.File
|
|
|
|
Imports *ImportTracker
|
2023-02-09 11:28:53 +00:00
|
|
|
Config Config
|
2023-10-18 10:31:54 +01:00
|
|
|
Ignores ignores
|
2020-01-06 08:55:52 +00:00
|
|
|
PassedValues map[string]interface{}
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
|
2023-02-15 19:47:07 +00:00
|
|
|
// GetFileAtNodePos returns the file at the node position in the file set available in the context.
|
2023-02-15 19:44:13 +00:00
|
|
|
func (ctx *Context) GetFileAtNodePos(node ast.Node) *token.File {
|
|
|
|
return ctx.FileSet.File(node.Pos())
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewIssue creates a new issue
|
|
|
|
func (ctx *Context) NewIssue(node ast.Node, ruleID, desc string,
|
2023-02-15 20:25:59 +00:00
|
|
|
severity, confidence issue.Score,
|
|
|
|
) *issue.Issue {
|
2023-02-15 19:44:13 +00:00
|
|
|
return issue.New(ctx.GetFileAtNodePos(node), node, ruleID, desc, severity, confidence)
|
|
|
|
}
|
|
|
|
|
2016-08-12 14:17:28 +01:00
|
|
|
// Metrics used when reporting information about a scanning run.
|
2016-07-20 11:02:01 +01:00
|
|
|
type Metrics struct {
|
2016-07-26 00:39:55 +01:00
|
|
|
NumFiles int `json:"files"`
|
|
|
|
NumLines int `json:"lines"`
|
|
|
|
NumNosec int `json:"nosec"`
|
|
|
|
NumFound int `json:"found"`
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
|
2018-07-19 17:42:25 +01:00
|
|
|
// Analyzer object is the main object of gosec. It has methods traverse an AST
|
2016-08-12 14:17:28 +01:00
|
|
|
// and invoke the correct checking rules as on each node as required.
|
2016-07-20 11:02:01 +01:00
|
|
|
type Analyzer struct {
|
2021-12-09 10:53:36 +00:00
|
|
|
ignoreNosec bool
|
|
|
|
ruleset RuleSet
|
|
|
|
context *Context
|
|
|
|
config Config
|
|
|
|
logger *log.Logger
|
2023-02-15 19:44:13 +00:00
|
|
|
issues []*issue.Issue
|
2021-12-09 10:53:36 +00:00
|
|
|
stats *Metrics
|
|
|
|
errors map[string][]Error // keys are file paths; values are the golang errors in those files
|
|
|
|
tests bool
|
|
|
|
excludeGenerated bool
|
|
|
|
showIgnored bool
|
|
|
|
trackSuppressions bool
|
2022-02-16 17:23:37 +00:00
|
|
|
concurrency int
|
2024-08-20 09:43:40 +01:00
|
|
|
analyzerSet *analyzers.AnalyzerSet
|
2023-10-18 14:00:50 +01:00
|
|
|
mu sync.Mutex
|
2021-12-09 10:53:36 +00:00
|
|
|
}
|
|
|
|
|
2018-10-11 13:45:31 +01:00
|
|
|
// NewAnalyzer builds a new analyzer.
|
2022-02-16 17:23:37 +00:00
|
|
|
func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, trackSuppressions bool, concurrency int, logger *log.Logger) *Analyzer {
|
2017-04-28 22:46:26 +01:00
|
|
|
ignoreNoSec := false
|
2019-01-14 11:37:40 +00:00
|
|
|
if enabled, err := conf.IsGlobalEnabled(Nosec); err == nil {
|
|
|
|
ignoreNoSec = enabled
|
2017-07-19 22:17:00 +01:00
|
|
|
}
|
2021-08-18 12:00:38 +01:00
|
|
|
showIgnored := false
|
|
|
|
if enabled, err := conf.IsGlobalEnabled(ShowIgnored); err == nil {
|
|
|
|
showIgnored = enabled
|
|
|
|
}
|
2017-07-19 22:17:00 +01:00
|
|
|
if logger == nil {
|
2018-07-19 17:42:25 +01:00
|
|
|
logger = log.New(os.Stderr, "[gosec]", log.LstdFlags)
|
2017-04-28 22:46:26 +01:00
|
|
|
}
|
2017-05-10 05:26:53 +01:00
|
|
|
return &Analyzer{
|
2021-12-09 10:53:36 +00:00
|
|
|
ignoreNosec: ignoreNoSec,
|
|
|
|
showIgnored: showIgnored,
|
|
|
|
ruleset: NewRuleSet(),
|
|
|
|
context: &Context{},
|
|
|
|
config: conf,
|
|
|
|
logger: logger,
|
2023-02-15 19:44:13 +00:00
|
|
|
issues: make([]*issue.Issue, 0, 16),
|
2021-12-09 10:53:36 +00:00
|
|
|
stats: &Metrics{},
|
|
|
|
errors: make(map[string][]Error),
|
|
|
|
tests: tests,
|
2022-02-16 17:23:37 +00:00
|
|
|
concurrency: concurrency,
|
2021-12-09 10:53:36 +00:00
|
|
|
excludeGenerated: excludeGenerated,
|
|
|
|
trackSuppressions: trackSuppressions,
|
2024-08-20 09:43:40 +01:00
|
|
|
analyzerSet: analyzers.NewAnalyzerSet(),
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-09 20:02:24 +00:00
|
|
|
// SetConfig updates the analyzer configuration
|
2019-06-25 09:29:19 +01:00
|
|
|
func (gosec *Analyzer) SetConfig(conf Config) {
|
|
|
|
gosec.config = conf
|
|
|
|
}
|
|
|
|
|
2019-06-25 10:56:26 +01:00
|
|
|
// Config returns the current configuration
|
|
|
|
func (gosec *Analyzer) Config() Config {
|
|
|
|
return gosec.config
|
|
|
|
}
|
|
|
|
|
2017-12-13 12:35:47 +00:00
|
|
|
// LoadRules instantiates all the rules to be used when analyzing source
|
|
|
|
// packages
|
2021-12-09 10:53:36 +00:00
|
|
|
func (gosec *Analyzer) LoadRules(ruleDefinitions map[string]RuleBuilder, ruleSuppressed map[string]bool) {
|
2017-10-05 22:32:03 +01:00
|
|
|
for id, def := range ruleDefinitions {
|
2018-07-19 17:42:25 +01:00
|
|
|
r, nodes := def(id, gosec.config)
|
2021-12-09 10:53:36 +00:00
|
|
|
gosec.ruleset.Register(r, ruleSuppressed[id], nodes...)
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-08-20 09:43:40 +01:00
|
|
|
// LoadAnalyzers instantiates all the analyzers to be used when analyzing source
|
|
|
|
// packages
|
|
|
|
func (gosec *Analyzer) LoadAnalyzers(analyzerDefinitions map[string]analyzers.AnalyzerDefinition, analyzerSuppressed map[string]bool) {
|
|
|
|
for id, def := range analyzerDefinitions {
|
|
|
|
r := def.Create(def.ID, def.Description)
|
|
|
|
gosec.analyzerSet.Register(r, analyzerSuppressed[id])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-12-13 12:35:47 +00:00
|
|
|
// Process kicks off the analysis process for a given package
|
2018-07-19 17:42:25 +01:00
|
|
|
func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error {
|
2020-05-27 11:42:19 +01:00
|
|
|
config := &packages.Config{
|
|
|
|
Mode: LoadMode,
|
|
|
|
BuildFlags: buildTags,
|
|
|
|
Tests: gosec.tests,
|
|
|
|
}
|
|
|
|
|
2022-02-16 17:23:37 +00:00
|
|
|
type result struct {
|
|
|
|
pkgPath string
|
|
|
|
pkgs []*packages.Package
|
|
|
|
err error
|
|
|
|
}
|
|
|
|
|
|
|
|
results := make(chan result)
|
|
|
|
jobs := make(chan string, len(packagePaths))
|
|
|
|
quit := make(chan struct{})
|
|
|
|
|
|
|
|
var wg sync.WaitGroup
|
|
|
|
|
|
|
|
worker := func(j chan string, r chan result, quit chan struct{}) {
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case s := <-j:
|
Refactor to support duplicate imports with different aliases (#865)
The existing code assumed imports to be either imported, or imported with an
alias. Badly formatted files may have duplicate imports for a package, using
different aliases.
This patch refactors the code, and;
Introduces a new `GetImportedNames` function, which returns all name(s) and
aliase(s) for a package, which effectively combines `GetAliasedName` and
`GetImportedName`, but adding support for duplicate imports.
The old `GetAliasedName` and `GetImportedName` functions have been rewritten to
use the new function and marked deprecated, but could be removed if there are no
external consumers.
With this patch, the linter is able to detect issues in files such as;
package main
import (
crand "crypto/rand"
"math/big"
"math/rand"
rand2 "math/rand"
rand3 "math/rand"
)
func main() {
_, _ = crand.Int(crand.Reader, big.NewInt(int64(2))) // good
_ = rand.Intn(2) // bad
_ = rand2.Intn(2) // bad
_ = rand3.Intn(2) // bad
}
Before this patch, only a single issue would be detected:
gosec --quiet .
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
With this patch, all issues are identified:
gosec --quiet .
[main.go:16] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
15: _ = rand2.Intn(2) // bad
> 16: _ = rand3.Intn(2) // bad
17: }
[main.go:15] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
14: _ = rand.Intn(2) // bad
> 15: _ = rand2.Intn(2) // bad
16: _ = rand3.Intn(2) // bad
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
While working on this change, I noticed that ImportTracker.TrackFile() was not able
to find import aliases; Analyser.Check() called both ImportTracker.TrackFile() and
ast.Walk(), which (with the updated ImportTracker) resulted in importes to be in-
correctly included multiple times (once with the correct alias, once with the default).
I updated ImportTracker.TrackFile() to fix this, but with the updated ImportTracker,
Analyser.Check() no longer has to call ImportTracker.TrackFile() separately, as ast.Walk()
already handles the file, and will find all imports.
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
2022-10-17 09:59:18 +01:00
|
|
|
pkgs, err := gosec.load(s, config)
|
2022-02-16 17:23:37 +00:00
|
|
|
select {
|
Refactor to support duplicate imports with different aliases (#865)
The existing code assumed imports to be either imported, or imported with an
alias. Badly formatted files may have duplicate imports for a package, using
different aliases.
This patch refactors the code, and;
Introduces a new `GetImportedNames` function, which returns all name(s) and
aliase(s) for a package, which effectively combines `GetAliasedName` and
`GetImportedName`, but adding support for duplicate imports.
The old `GetAliasedName` and `GetImportedName` functions have been rewritten to
use the new function and marked deprecated, but could be removed if there are no
external consumers.
With this patch, the linter is able to detect issues in files such as;
package main
import (
crand "crypto/rand"
"math/big"
"math/rand"
rand2 "math/rand"
rand3 "math/rand"
)
func main() {
_, _ = crand.Int(crand.Reader, big.NewInt(int64(2))) // good
_ = rand.Intn(2) // bad
_ = rand2.Intn(2) // bad
_ = rand3.Intn(2) // bad
}
Before this patch, only a single issue would be detected:
gosec --quiet .
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
With this patch, all issues are identified:
gosec --quiet .
[main.go:16] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
15: _ = rand2.Intn(2) // bad
> 16: _ = rand3.Intn(2) // bad
17: }
[main.go:15] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
14: _ = rand.Intn(2) // bad
> 15: _ = rand2.Intn(2) // bad
16: _ = rand3.Intn(2) // bad
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
While working on this change, I noticed that ImportTracker.TrackFile() was not able
to find import aliases; Analyser.Check() called both ImportTracker.TrackFile() and
ast.Walk(), which (with the updated ImportTracker) resulted in importes to be in-
correctly included multiple times (once with the correct alias, once with the default).
I updated ImportTracker.TrackFile() to fix this, but with the updated ImportTracker,
Analyser.Check() no longer has to call ImportTracker.TrackFile() separately, as ast.Walk()
already handles the file, and will find all imports.
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
2022-10-17 09:59:18 +01:00
|
|
|
case r <- result{pkgPath: s, pkgs: pkgs, err: err}:
|
2022-02-16 17:23:37 +00:00
|
|
|
case <-quit:
|
|
|
|
// we've been told to stop, probably an error while
|
|
|
|
// processing a previous result.
|
|
|
|
wg.Done()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
// j is empty and there are no jobs left
|
|
|
|
wg.Done()
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// fill the buffer
|
2019-04-27 09:01:27 +01:00
|
|
|
for _, pkgPath := range packagePaths {
|
2022-02-16 17:23:37 +00:00
|
|
|
jobs <- pkgPath
|
|
|
|
}
|
|
|
|
|
|
|
|
for i := 0; i < gosec.concurrency; i++ {
|
|
|
|
wg.Add(1)
|
|
|
|
go worker(jobs, results, quit)
|
|
|
|
}
|
|
|
|
|
|
|
|
go func() {
|
|
|
|
wg.Wait()
|
|
|
|
close(results)
|
|
|
|
}()
|
|
|
|
|
|
|
|
for r := range results {
|
|
|
|
if r.err != nil {
|
|
|
|
gosec.AppendError(r.pkgPath, r.err)
|
2018-01-07 23:02:33 +00:00
|
|
|
}
|
2022-02-16 17:23:37 +00:00
|
|
|
for _, pkg := range r.pkgs {
|
2019-04-30 07:13:10 +01:00
|
|
|
if pkg.Name != "" {
|
2019-04-30 12:53:22 +01:00
|
|
|
err := gosec.ParseErrors(pkg)
|
2019-04-30 07:13:10 +01:00
|
|
|
if err != nil {
|
2022-02-16 17:23:37 +00:00
|
|
|
close(quit)
|
|
|
|
wg.Wait() // wait for the goroutines to stop
|
2021-09-13 08:40:10 +01:00
|
|
|
return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err)
|
2019-04-30 07:13:10 +01:00
|
|
|
}
|
2023-02-09 11:28:53 +00:00
|
|
|
gosec.CheckRules(pkg)
|
2023-09-20 09:04:32 +01:00
|
|
|
gosec.CheckAnalyzers(pkg)
|
2019-04-27 09:01:27 +01:00
|
|
|
}
|
2018-01-07 23:02:33 +00:00
|
|
|
}
|
2019-04-27 09:01:27 +01:00
|
|
|
}
|
|
|
|
sortErrors(gosec.errors)
|
|
|
|
return nil
|
|
|
|
}
|
2018-01-07 23:02:33 +00:00
|
|
|
|
2019-04-27 09:01:27 +01:00
|
|
|
func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.Package, error) {
|
|
|
|
abspath, err := GetPkgAbsPath(pkgPath)
|
|
|
|
if err != nil {
|
|
|
|
gosec.logger.Printf("Skipping: %s. Path doesn't exist.", abspath)
|
|
|
|
return []*packages.Package{}, nil
|
2019-02-26 22:24:06 +00:00
|
|
|
}
|
2019-04-25 08:25:32 +01:00
|
|
|
|
2019-04-27 09:01:27 +01:00
|
|
|
gosec.logger.Println("Import directory:", abspath)
|
2020-05-27 11:42:19 +01:00
|
|
|
// step 1/3 create build context.
|
|
|
|
buildD := build.Default
|
|
|
|
// step 2/3: add build tags to get env dependent files into basePackage.
|
2023-10-18 14:00:50 +01:00
|
|
|
gosec.mu.Lock()
|
2020-05-27 11:42:19 +01:00
|
|
|
buildD.BuildTags = conf.BuildFlags
|
2023-10-18 14:00:50 +01:00
|
|
|
gosec.mu.Unlock()
|
2020-05-27 11:42:19 +01:00
|
|
|
basePackage, err := buildD.ImportDir(pkgPath, build.ImportComment)
|
2019-04-27 09:01:27 +01:00
|
|
|
if err != nil {
|
2021-09-13 08:40:10 +01:00
|
|
|
return []*packages.Package{}, fmt.Errorf("importing dir %q: %w", pkgPath, err)
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
2019-04-25 08:25:32 +01:00
|
|
|
|
2019-04-27 09:01:27 +01:00
|
|
|
var packageFiles []string
|
|
|
|
for _, filename := range basePackage.GoFiles {
|
|
|
|
packageFiles = append(packageFiles, path.Join(pkgPath, filename))
|
|
|
|
}
|
2020-01-15 15:27:56 +00:00
|
|
|
for _, filename := range basePackage.CgoFiles {
|
|
|
|
packageFiles = append(packageFiles, path.Join(pkgPath, filename))
|
|
|
|
}
|
2019-04-27 09:01:27 +01:00
|
|
|
|
2019-04-28 18:33:50 +01:00
|
|
|
if gosec.tests {
|
2021-08-18 12:00:38 +01:00
|
|
|
testsFiles := make([]string, 0)
|
2019-04-28 18:33:50 +01:00
|
|
|
testsFiles = append(testsFiles, basePackage.TestGoFiles...)
|
|
|
|
testsFiles = append(testsFiles, basePackage.XTestGoFiles...)
|
|
|
|
for _, filename := range testsFiles {
|
|
|
|
packageFiles = append(packageFiles, path.Join(pkgPath, filename))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-05-27 11:42:19 +01:00
|
|
|
// step 3/3 remove build tags from conf to proceed build correctly.
|
2023-10-18 14:00:50 +01:00
|
|
|
gosec.mu.Lock()
|
2020-05-27 11:42:19 +01:00
|
|
|
conf.BuildFlags = nil
|
2023-10-18 14:00:50 +01:00
|
|
|
defer gosec.mu.Unlock()
|
2019-04-27 09:01:27 +01:00
|
|
|
pkgs, err := packages.Load(conf, packageFiles...)
|
|
|
|
if err != nil {
|
2021-09-13 08:40:10 +01:00
|
|
|
return []*packages.Package{}, fmt.Errorf("loading files from package %q: %w", pkgPath, err)
|
2019-04-27 09:01:27 +01:00
|
|
|
}
|
|
|
|
return pkgs, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 09:30:22 +01:00
|
|
|
// CheckRules runs analysis on the given package.
|
2023-02-09 11:28:53 +00:00
|
|
|
func (gosec *Analyzer) CheckRules(pkg *packages.Package) {
|
2019-04-27 09:01:27 +01:00
|
|
|
gosec.logger.Println("Checking package:", pkg.Name)
|
|
|
|
for _, file := range pkg.Syntax {
|
2021-12-22 16:50:46 +00:00
|
|
|
fp := pkg.Fset.File(file.Pos())
|
|
|
|
if fp == nil {
|
|
|
|
// skip files which cannot be located
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
checkedFile := fp.Name()
|
2020-01-15 15:27:56 +00:00
|
|
|
// Skip the no-Go file from analysis (e.g. a Cgo files is expanded in 3 different files
|
|
|
|
// stored in the cache which do not need to by analyzed)
|
|
|
|
if filepath.Ext(checkedFile) != ".go" {
|
|
|
|
continue
|
|
|
|
}
|
2024-03-07 13:27:03 +00:00
|
|
|
if gosec.excludeGenerated && ast.IsGenerated(file) {
|
2021-08-04 16:33:20 +01:00
|
|
|
gosec.logger.Println("Ignoring generated file:", checkedFile)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
2020-01-15 15:27:56 +00:00
|
|
|
gosec.logger.Println("Checking file:", checkedFile)
|
2019-04-27 09:01:27 +01:00
|
|
|
gosec.context.FileSet = pkg.Fset
|
|
|
|
gosec.context.Config = gosec.config
|
|
|
|
gosec.context.Comments = ast.NewCommentMap(gosec.context.FileSet, file, file.Comments)
|
|
|
|
gosec.context.Root = file
|
|
|
|
gosec.context.Info = pkg.TypesInfo
|
|
|
|
gosec.context.Pkg = pkg.Types
|
|
|
|
gosec.context.PkgFiles = pkg.Syntax
|
|
|
|
gosec.context.Imports = NewImportTracker()
|
2020-01-06 08:55:52 +00:00
|
|
|
gosec.context.PassedValues = make(map[string]interface{})
|
2023-10-18 10:31:54 +01:00
|
|
|
gosec.updateIgnores()
|
2019-04-27 09:01:27 +01:00
|
|
|
ast.Walk(gosec, file)
|
|
|
|
gosec.stats.NumFiles++
|
|
|
|
gosec.stats.NumLines += pkg.Fset.File(file.Pos()).LineCount()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-10-12 09:30:22 +01:00
|
|
|
// CheckAnalyzers runs analyzers on a given package.
|
2023-02-09 11:28:53 +00:00
|
|
|
func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) {
|
2023-10-12 09:30:22 +01:00
|
|
|
ssaResult, err := gosec.buildSSA(pkg)
|
2023-10-13 08:41:35 +01:00
|
|
|
if err != nil || ssaResult == nil {
|
2023-10-12 09:30:22 +01:00
|
|
|
gosec.logger.Printf("Error building the SSA representation of the package %q: %s", pkg.Name, err)
|
2023-02-09 11:28:53 +00:00
|
|
|
return
|
|
|
|
}
|
2023-10-12 09:30:22 +01:00
|
|
|
|
2023-02-09 11:28:53 +00:00
|
|
|
resultMap := map[*analysis.Analyzer]interface{}{
|
|
|
|
buildssa.Analyzer: &analyzers.SSAAnalyzerResult{
|
|
|
|
Config: gosec.Config(),
|
|
|
|
Logger: gosec.logger,
|
|
|
|
SSA: ssaResult.(*buildssa.SSA),
|
|
|
|
},
|
|
|
|
}
|
2023-11-30 16:42:44 +00:00
|
|
|
|
|
|
|
generatedFiles := gosec.generatedFiles(pkg)
|
|
|
|
|
2024-08-20 09:43:40 +01:00
|
|
|
for _, analyzer := range gosec.analyzerSet.Analyzers {
|
2023-02-09 11:28:53 +00:00
|
|
|
pass := &analysis.Pass{
|
|
|
|
Analyzer: analyzer,
|
|
|
|
Fset: pkg.Fset,
|
|
|
|
Files: pkg.Syntax,
|
|
|
|
OtherFiles: pkg.OtherFiles,
|
|
|
|
IgnoredFiles: pkg.IgnoredFiles,
|
|
|
|
Pkg: pkg.Types,
|
|
|
|
TypesInfo: pkg.TypesInfo,
|
|
|
|
TypesSizes: pkg.TypesSizes,
|
|
|
|
ResultOf: resultMap,
|
|
|
|
Report: func(d analysis.Diagnostic) {},
|
|
|
|
ImportObjectFact: nil,
|
|
|
|
ExportObjectFact: nil,
|
|
|
|
ImportPackageFact: nil,
|
|
|
|
ExportPackageFact: nil,
|
|
|
|
AllObjectFacts: nil,
|
|
|
|
AllPackageFacts: nil,
|
|
|
|
}
|
|
|
|
result, err := pass.Analyzer.Run(pass)
|
|
|
|
if err != nil {
|
|
|
|
gosec.logger.Printf("Error running analyzer %s: %s\n", analyzer.Name, err)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
if result != nil {
|
2023-09-20 09:04:32 +01:00
|
|
|
if passIssues, ok := result.([]*issue.Issue); ok {
|
|
|
|
for _, iss := range passIssues {
|
2023-11-30 16:42:44 +00:00
|
|
|
if gosec.excludeGenerated {
|
|
|
|
if _, ok := generatedFiles[iss.File]; ok {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
}
|
2023-10-13 13:04:21 +01:00
|
|
|
gosec.updateIssues(iss)
|
2023-09-20 09:04:32 +01:00
|
|
|
}
|
2023-02-09 11:28:53 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-11-30 16:42:44 +00:00
|
|
|
func (gosec *Analyzer) generatedFiles(pkg *packages.Package) map[string]bool {
|
|
|
|
generatedFiles := map[string]bool{}
|
|
|
|
for _, file := range pkg.Syntax {
|
2024-03-07 13:27:03 +00:00
|
|
|
if ast.IsGenerated(file) {
|
2023-11-30 16:42:44 +00:00
|
|
|
fp := pkg.Fset.File(file.Pos())
|
|
|
|
if fp == nil {
|
|
|
|
// skip files which cannot be located
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
generatedFiles[fp.Name()] = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return generatedFiles
|
|
|
|
}
|
|
|
|
|
2023-10-12 09:30:22 +01:00
|
|
|
// buildSSA runs the SSA pass which builds the SSA representation of the package. It handles gracefully any panic.
|
|
|
|
func (gosec *Analyzer) buildSSA(pkg *packages.Package) (interface{}, error) {
|
|
|
|
defer func() {
|
2023-10-13 08:55:54 +01:00
|
|
|
if r := recover(); r != nil {
|
2023-10-13 08:41:35 +01:00
|
|
|
gosec.logger.Printf("Panic when running SSA analyser on package: %s", pkg.Name)
|
2023-10-12 09:30:22 +01:00
|
|
|
}
|
|
|
|
}()
|
|
|
|
ssaPass := &analysis.Pass{
|
|
|
|
Analyzer: buildssa.Analyzer,
|
|
|
|
Fset: pkg.Fset,
|
|
|
|
Files: pkg.Syntax,
|
|
|
|
OtherFiles: pkg.OtherFiles,
|
|
|
|
IgnoredFiles: pkg.IgnoredFiles,
|
|
|
|
Pkg: pkg.Types,
|
|
|
|
TypesInfo: pkg.TypesInfo,
|
|
|
|
TypesSizes: pkg.TypesSizes,
|
|
|
|
ResultOf: nil,
|
|
|
|
Report: nil,
|
|
|
|
ImportObjectFact: nil,
|
|
|
|
ExportObjectFact: nil,
|
|
|
|
ImportPackageFact: nil,
|
|
|
|
ExportPackageFact: nil,
|
|
|
|
AllObjectFacts: nil,
|
|
|
|
AllPackageFacts: nil,
|
|
|
|
}
|
|
|
|
|
|
|
|
return ssaPass.Analyzer.Run(ssaPass)
|
|
|
|
}
|
|
|
|
|
2019-04-30 12:53:22 +01:00
|
|
|
// ParseErrors parses the errors from given package
|
|
|
|
func (gosec *Analyzer) ParseErrors(pkg *packages.Package) error {
|
2019-04-27 09:01:27 +01:00
|
|
|
if len(pkg.Errors) == 0 {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
for _, pkgErr := range pkg.Errors {
|
2019-04-30 12:53:22 +01:00
|
|
|
parts := strings.Split(pkgErr.Pos, ":")
|
|
|
|
file := parts[0]
|
2019-04-30 07:13:10 +01:00
|
|
|
var err error
|
2019-04-30 12:53:22 +01:00
|
|
|
var line int
|
|
|
|
if len(parts) > 1 {
|
|
|
|
if line, err = strconv.Atoi(parts[1]); err != nil {
|
2021-09-13 08:40:10 +01:00
|
|
|
return fmt.Errorf("parsing line: %w", err)
|
2019-04-30 07:13:10 +01:00
|
|
|
}
|
2019-04-30 12:53:22 +01:00
|
|
|
}
|
|
|
|
var column int
|
|
|
|
if len(parts) > 2 {
|
|
|
|
if column, err = strconv.Atoi(parts[2]); err != nil {
|
2021-09-13 08:40:10 +01:00
|
|
|
return fmt.Errorf("parsing column: %w", err)
|
2019-04-30 07:13:10 +01:00
|
|
|
}
|
2019-04-27 09:01:27 +01:00
|
|
|
}
|
2019-04-30 12:53:22 +01:00
|
|
|
msg := strings.TrimSpace(pkgErr.Msg)
|
|
|
|
newErr := NewError(line, column, msg)
|
|
|
|
if errSlice, ok := gosec.errors[file]; ok {
|
|
|
|
gosec.errors[file] = append(errSlice, *newErr)
|
2019-04-27 09:01:27 +01:00
|
|
|
} else {
|
2019-04-30 07:13:10 +01:00
|
|
|
errSlice = []Error{}
|
2019-04-30 12:53:22 +01:00
|
|
|
gosec.errors[file] = append(errSlice, *newErr)
|
2019-04-27 09:01:27 +01:00
|
|
|
}
|
|
|
|
}
|
2017-05-10 05:26:53 +01:00
|
|
|
return nil
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
|
2019-04-30 15:57:32 +01:00
|
|
|
// AppendError appends an error to the file errors
|
|
|
|
func (gosec *Analyzer) AppendError(file string, err error) {
|
2019-04-30 16:14:26 +01:00
|
|
|
// Do not report the error for empty packages (e.g. files excluded from build with a tag)
|
2019-04-30 15:57:32 +01:00
|
|
|
r := regexp.MustCompile(`no buildable Go source files in`)
|
|
|
|
if r.MatchString(err.Error()) {
|
|
|
|
return
|
|
|
|
}
|
2021-08-18 12:00:38 +01:00
|
|
|
errors := make([]Error, 0)
|
2019-04-30 15:57:32 +01:00
|
|
|
if ferrs, ok := gosec.errors[file]; ok {
|
|
|
|
errors = ferrs
|
|
|
|
}
|
|
|
|
ferr := NewError(0, 0, err.Error())
|
|
|
|
errors = append(errors, *ferr)
|
|
|
|
gosec.errors[file] = errors
|
|
|
|
}
|
|
|
|
|
2020-01-06 08:47:28 +00:00
|
|
|
// ignore a node (and sub-tree) if it is tagged with a nosec tag comment
|
2023-02-15 19:44:13 +00:00
|
|
|
func (gosec *Analyzer) ignore(n ast.Node) map[string]issue.SuppressionInfo {
|
2018-07-19 17:42:25 +01:00
|
|
|
if groups, ok := gosec.context.Comments[n]; ok && !gosec.ignoreNosec {
|
2019-09-04 09:20:43 +01:00
|
|
|
|
|
|
|
// Checks if an alternative for #nosec is set and, if not, uses the default.
|
2023-10-18 13:25:50 +01:00
|
|
|
noSecDefaultTag, err := gosec.config.GetGlobal(Nosec)
|
|
|
|
if err != nil {
|
|
|
|
noSecDefaultTag = NoSecTag(string(Nosec))
|
|
|
|
} else {
|
|
|
|
noSecDefaultTag = NoSecTag(noSecDefaultTag)
|
|
|
|
}
|
2020-01-06 08:47:28 +00:00
|
|
|
noSecAlternativeTag, err := gosec.config.GetGlobal(NoSecAlternative)
|
2019-09-04 09:20:43 +01:00
|
|
|
if err != nil {
|
2020-01-06 08:47:28 +00:00
|
|
|
noSecAlternativeTag = noSecDefaultTag
|
2023-05-25 10:54:26 +01:00
|
|
|
} else {
|
|
|
|
noSecAlternativeTag = NoSecTag(noSecAlternativeTag)
|
2019-09-04 09:20:43 +01:00
|
|
|
}
|
|
|
|
|
2016-07-20 11:02:01 +01:00
|
|
|
for _, group := range groups {
|
2021-12-13 16:45:47 +00:00
|
|
|
comment := strings.TrimSpace(group.Text())
|
2023-10-24 20:23:17 +01:00
|
|
|
foundDefaultTag := strings.HasPrefix(comment, noSecDefaultTag) || regexp.MustCompile("\n *"+noSecDefaultTag).MatchString(comment)
|
|
|
|
foundAlternativeTag := strings.HasPrefix(comment, noSecAlternativeTag) || regexp.MustCompile("\n *"+noSecAlternativeTag).MatchString(comment)
|
2020-01-06 08:47:28 +00:00
|
|
|
|
|
|
|
if foundDefaultTag || foundAlternativeTag {
|
2018-07-19 17:42:25 +01:00
|
|
|
gosec.stats.NumNosec++
|
2017-10-05 22:32:03 +01:00
|
|
|
|
2022-01-03 15:48:42 +00:00
|
|
|
// Discard what's in front of the nosec tag.
|
|
|
|
if foundDefaultTag {
|
|
|
|
comment = strings.SplitN(comment, noSecDefaultTag, 2)[1]
|
|
|
|
} else {
|
|
|
|
comment = strings.SplitN(comment, noSecAlternativeTag, 2)[1]
|
|
|
|
}
|
|
|
|
|
2021-12-09 10:53:36 +00:00
|
|
|
// Extract the directive and the justification.
|
|
|
|
justification := ""
|
2021-12-13 16:45:47 +00:00
|
|
|
commentParts := regexp.MustCompile(`-{2,}`).Split(comment, 2)
|
2021-12-09 10:53:36 +00:00
|
|
|
directive := commentParts[0]
|
|
|
|
if len(commentParts) > 1 {
|
|
|
|
justification = strings.TrimSpace(strings.TrimRight(commentParts[1], "\n"))
|
|
|
|
}
|
|
|
|
|
2017-10-05 22:32:03 +01:00
|
|
|
// Pull out the specific rules that are listed to be ignored.
|
2019-04-30 10:32:06 +01:00
|
|
|
re := regexp.MustCompile(`(G\d{3})`)
|
2021-12-09 10:53:36 +00:00
|
|
|
matches := re.FindAllStringSubmatch(directive, -1)
|
2017-10-05 22:32:03 +01:00
|
|
|
|
2023-02-15 19:44:13 +00:00
|
|
|
suppression := issue.SuppressionInfo{
|
2021-12-09 10:53:36 +00:00
|
|
|
Kind: "inSource",
|
|
|
|
Justification: justification,
|
2018-03-08 19:01:00 +00:00
|
|
|
}
|
|
|
|
|
2017-10-05 22:32:03 +01:00
|
|
|
// Find the rule IDs to ignore.
|
2023-02-15 19:44:13 +00:00
|
|
|
ignores := make(map[string]issue.SuppressionInfo)
|
2017-10-05 22:32:03 +01:00
|
|
|
for _, v := range matches {
|
2021-12-09 10:53:36 +00:00
|
|
|
ignores[v[1]] = suppression
|
|
|
|
}
|
|
|
|
|
|
|
|
// If no specific rules were given, ignore everything.
|
|
|
|
if len(matches) == 0 {
|
|
|
|
ignores[aliasOfAllRules] = suppression
|
2017-10-05 22:32:03 +01:00
|
|
|
}
|
2021-12-09 10:53:36 +00:00
|
|
|
return ignores
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2021-12-09 10:53:36 +00:00
|
|
|
return nil
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
|
2018-07-19 17:42:25 +01:00
|
|
|
// Visit runs the gosec visitor logic over an AST created by parsing go code.
|
2016-08-12 14:17:28 +01:00
|
|
|
// Rule methods added with AddRule will be invoked as necessary.
|
2018-07-19 17:42:25 +01:00
|
|
|
func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
|
2023-02-09 11:28:53 +00:00
|
|
|
// Using ast.File instead of ast.ImportSpec, so that we can track all imports at once.
|
Refactor to support duplicate imports with different aliases (#865)
The existing code assumed imports to be either imported, or imported with an
alias. Badly formatted files may have duplicate imports for a package, using
different aliases.
This patch refactors the code, and;
Introduces a new `GetImportedNames` function, which returns all name(s) and
aliase(s) for a package, which effectively combines `GetAliasedName` and
`GetImportedName`, but adding support for duplicate imports.
The old `GetAliasedName` and `GetImportedName` functions have been rewritten to
use the new function and marked deprecated, but could be removed if there are no
external consumers.
With this patch, the linter is able to detect issues in files such as;
package main
import (
crand "crypto/rand"
"math/big"
"math/rand"
rand2 "math/rand"
rand3 "math/rand"
)
func main() {
_, _ = crand.Int(crand.Reader, big.NewInt(int64(2))) // good
_ = rand.Intn(2) // bad
_ = rand2.Intn(2) // bad
_ = rand3.Intn(2) // bad
}
Before this patch, only a single issue would be detected:
gosec --quiet .
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
With this patch, all issues are identified:
gosec --quiet .
[main.go:16] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
15: _ = rand2.Intn(2) // bad
> 16: _ = rand3.Intn(2) // bad
17: }
[main.go:15] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
14: _ = rand.Intn(2) // bad
> 15: _ = rand2.Intn(2) // bad
16: _ = rand3.Intn(2) // bad
[main.go:14] - G404 (CWE-338): Use of weak random number generator (math/rand instead of crypto/rand) (Confidence: MEDIUM, Severity: HIGH)
13:
> 14: _ = rand.Intn(2) // bad
15: _ = rand2.Intn(2) // bad
While working on this change, I noticed that ImportTracker.TrackFile() was not able
to find import aliases; Analyser.Check() called both ImportTracker.TrackFile() and
ast.Walk(), which (with the updated ImportTracker) resulted in importes to be in-
correctly included multiple times (once with the correct alias, once with the default).
I updated ImportTracker.TrackFile() to fix this, but with the updated ImportTracker,
Analyser.Check() no longer has to call ImportTracker.TrackFile() separately, as ast.Walk()
already handles the file, and will find all imports.
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
Signed-off-by: Sebastiaan van Stijn <github@gone.nl>
2022-10-17 09:59:18 +01:00
|
|
|
switch i := n.(type) {
|
|
|
|
case *ast.File:
|
|
|
|
gosec.context.Imports.TrackFile(i)
|
|
|
|
}
|
2016-11-07 17:13:20 +00:00
|
|
|
|
2023-02-09 11:28:53 +00:00
|
|
|
for _, rule := range gosec.ruleset.RegisteredFor(n) {
|
|
|
|
issue, err := rule.Match(n, gosec.context)
|
|
|
|
if err != nil {
|
|
|
|
file, line := GetLocation(n, gosec.context)
|
|
|
|
file = path.Base(file)
|
|
|
|
gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
|
|
|
}
|
2023-10-13 13:04:21 +01:00
|
|
|
gosec.updateIssues(issue)
|
2023-02-09 11:28:53 +00:00
|
|
|
}
|
|
|
|
return gosec
|
|
|
|
}
|
|
|
|
|
2023-10-18 10:31:54 +01:00
|
|
|
func (gosec *Analyzer) updateIgnores() {
|
|
|
|
for n := range gosec.context.Comments {
|
|
|
|
gosec.updateIgnoredRulesForNode(n)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (gosec *Analyzer) updateIgnoredRulesForNode(n ast.Node) {
|
2021-12-09 10:53:36 +00:00
|
|
|
ignoredRules := gosec.ignore(n)
|
2023-10-13 13:04:21 +01:00
|
|
|
if len(ignoredRules) > 0 {
|
|
|
|
if gosec.context.Ignores == nil {
|
2023-10-18 10:31:54 +01:00
|
|
|
gosec.context.Ignores = newIgnores()
|
2023-10-13 13:04:21 +01:00
|
|
|
}
|
|
|
|
line := issue.GetLine(gosec.context.FileSet.File(n.Pos()), n)
|
2023-10-18 10:31:54 +01:00
|
|
|
gosec.context.Ignores.add(
|
|
|
|
gosec.context.FileSet.File(n.Pos()).Name(),
|
|
|
|
line,
|
|
|
|
ignoredRules,
|
|
|
|
)
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
2023-10-13 13:04:21 +01:00
|
|
|
}
|
2017-10-05 22:32:03 +01:00
|
|
|
|
2023-10-13 13:04:21 +01:00
|
|
|
func (gosec *Analyzer) getSuppressionsAtLineInFile(file string, line string, id string) ([]issue.SuppressionInfo, bool) {
|
2023-10-18 10:31:54 +01:00
|
|
|
ignoredRules := gosec.context.Ignores.get(file, line)
|
2017-10-05 22:32:03 +01:00
|
|
|
|
2023-10-13 13:04:21 +01:00
|
|
|
// Check if the rule was specifically suppressed at this location.
|
2023-10-18 10:31:54 +01:00
|
|
|
generalSuppressions, generalIgnored := ignoredRules[aliasOfAllRules]
|
|
|
|
ruleSuppressions, ruleIgnored := ignoredRules[id]
|
2023-02-09 11:28:53 +00:00
|
|
|
ignored := generalIgnored || ruleIgnored
|
|
|
|
suppressions := append(generalSuppressions, ruleSuppressions...)
|
|
|
|
|
2023-10-13 13:04:21 +01:00
|
|
|
// Track external suppressions of this rule.
|
2024-08-20 09:43:40 +01:00
|
|
|
if gosec.ruleset.IsRuleSuppressed(id) || gosec.analyzerSet.IsSuppressed(id) {
|
2023-02-09 11:28:53 +00:00
|
|
|
ignored = true
|
2023-02-15 19:44:13 +00:00
|
|
|
suppressions = append(suppressions, issue.SuppressionInfo{
|
2023-02-09 11:28:53 +00:00
|
|
|
Kind: "external",
|
|
|
|
Justification: externalSuppressionJustification,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return suppressions, ignored
|
|
|
|
}
|
|
|
|
|
2023-10-13 13:04:21 +01:00
|
|
|
func (gosec *Analyzer) updateIssues(issue *issue.Issue) {
|
2023-02-09 11:28:53 +00:00
|
|
|
if issue != nil {
|
2023-10-13 13:04:21 +01:00
|
|
|
suppressions, ignored := gosec.getSuppressionsAtLineInFile(issue.File, issue.Line, issue.RuleID)
|
2023-02-09 11:28:53 +00:00
|
|
|
if gosec.showIgnored {
|
|
|
|
issue.NoSec = ignored
|
2017-10-05 22:32:03 +01:00
|
|
|
}
|
2023-02-09 11:28:53 +00:00
|
|
|
if !ignored || !gosec.showIgnored {
|
|
|
|
gosec.stats.NumFound++
|
|
|
|
}
|
|
|
|
if ignored && gosec.trackSuppressions {
|
|
|
|
issue.WithSuppressions(suppressions)
|
|
|
|
gosec.issues = append(gosec.issues, issue)
|
|
|
|
} else if !ignored || gosec.showIgnored || gosec.ignoreNosec {
|
|
|
|
gosec.issues = append(gosec.issues, issue)
|
2017-10-05 22:32:03 +01:00
|
|
|
}
|
|
|
|
}
|
2023-02-09 11:28:53 +00:00
|
|
|
}
|
|
|
|
|
2017-05-10 05:26:53 +01:00
|
|
|
// Report returns the current issues discovered and the metrics about the scan
|
2023-02-15 19:44:13 +00:00
|
|
|
func (gosec *Analyzer) Report() ([]*issue.Issue, *Metrics, map[string][]Error) {
|
2019-02-26 22:24:06 +00:00
|
|
|
return gosec.issues, gosec.stats, gosec.errors
|
2017-05-10 05:26:53 +01:00
|
|
|
}
|
2017-07-19 22:17:00 +01:00
|
|
|
|
|
|
|
// Reset clears state such as context, issues and metrics from the configured analyzer
|
2018-07-19 17:42:25 +01:00
|
|
|
func (gosec *Analyzer) Reset() {
|
|
|
|
gosec.context = &Context{}
|
2023-02-15 19:44:13 +00:00
|
|
|
gosec.issues = make([]*issue.Issue, 0, 16)
|
2018-07-19 17:42:25 +01:00
|
|
|
gosec.stats = &Metrics{}
|
2019-06-25 10:14:27 +01:00
|
|
|
gosec.ruleset = NewRuleSet()
|
2024-08-20 09:43:40 +01:00
|
|
|
gosec.analyzerSet = analyzers.NewAnalyzerSet()
|
2017-07-19 22:17:00 +01:00
|
|
|
}
|