Add support for Go analysis framework and SSA code representation

This commit is contained in:
Cosmin Cojocar 2023-02-09 12:28:53 +01:00 committed by Cosmin Cojocar
parent e795d75a46
commit 31e63276f1
4 changed files with 302 additions and 48 deletions

View file

@ -31,6 +31,9 @@ import (
"strings" "strings"
"sync" "sync"
"github.com/securego/gosec/v2/analyzers"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/packages" "golang.org/x/tools/go/packages"
) )
@ -42,7 +45,10 @@ const LoadMode = packages.NeedName |
packages.NeedTypes | packages.NeedTypes |
packages.NeedTypesSizes | packages.NeedTypesSizes |
packages.NeedTypesInfo | packages.NeedTypesInfo |
packages.NeedSyntax packages.NeedSyntax |
packages.NeedModule |
packages.NeedEmbedFiles |
packages.NeedEmbedPatterns
const externalSuppressionJustification = "Globally suppressed." const externalSuppressionJustification = "Globally suppressed."
@ -60,8 +66,8 @@ type Context struct {
Pkg *types.Package Pkg *types.Package
PkgFiles []*ast.File PkgFiles []*ast.File
Root *ast.File Root *ast.File
Config Config
Imports *ImportTracker Imports *ImportTracker
Config Config
Ignores []map[string][]SuppressionInfo Ignores []map[string][]SuppressionInfo
PassedValues map[string]interface{} PassedValues map[string]interface{}
} }
@ -90,6 +96,7 @@ type Analyzer struct {
showIgnored bool showIgnored bool
trackSuppressions bool trackSuppressions bool
concurrency int concurrency int
analyzerList []*analysis.Analyzer
} }
// SuppressionInfo object is to record the kind and the justification that used // SuppressionInfo object is to record the kind and the justification that used
@ -126,6 +133,7 @@ func NewAnalyzer(conf Config, tests bool, excludeGenerated bool, trackSuppressio
concurrency: concurrency, concurrency: concurrency,
excludeGenerated: excludeGenerated, excludeGenerated: excludeGenerated,
trackSuppressions: trackSuppressions, trackSuppressions: trackSuppressions,
analyzerList: analyzers.BuildDefaultAnalyzers(),
} }
} }
@ -216,7 +224,10 @@ func (gosec *Analyzer) Process(buildTags []string, packagePaths ...string) error
wg.Wait() // wait for the goroutines to stop wg.Wait() // wait for the goroutines to stop
return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err) return fmt.Errorf("parsing errors in pkg %q: %w", pkg.Name, err)
} }
gosec.Check(pkg) gosec.CheckRules(pkg)
if on, err := gosec.config.IsGlobalEnabled(SSA); err == nil && on {
gosec.CheckAnalyzers(pkg)
}
} }
} }
} }
@ -267,8 +278,8 @@ func (gosec *Analyzer) load(pkgPath string, conf *packages.Config) ([]*packages.
return pkgs, nil return pkgs, nil
} }
// Check runs analysis on the given package // CheckRules runs analysis on the given package
func (gosec *Analyzer) Check(pkg *packages.Package) { func (gosec *Analyzer) CheckRules(pkg *packages.Package) {
gosec.logger.Println("Checking package:", pkg.Name) gosec.logger.Println("Checking package:", pkg.Name)
for _, file := range pkg.Syntax { for _, file := range pkg.Syntax {
fp := pkg.Fset.File(file.Pos()) fp := pkg.Fset.File(file.Pos())
@ -303,6 +314,70 @@ func (gosec *Analyzer) Check(pkg *packages.Package) {
} }
} }
// CheckAnalyzers runs analyzers on a given package
func (gosec *Analyzer) CheckAnalyzers(pkg *packages.Package) {
ssaPass := &analysis.Pass{
Analyzer: buildssa.Analyzer,
Fset: pkg.Fset,
Files: pkg.Syntax,
OtherFiles: pkg.OtherFiles,
IgnoredFiles: pkg.IgnoredFiles,
Pkg: pkg.Types,
TypesInfo: pkg.TypesInfo,
TypesSizes: pkg.TypesSizes,
ResultOf: nil,
Report: nil,
ImportObjectFact: nil,
ExportObjectFact: nil,
ImportPackageFact: nil,
ExportPackageFact: nil,
AllObjectFacts: nil,
AllPackageFacts: nil,
}
ssaResult, err := ssaPass.Analyzer.Run(ssaPass)
if err != nil {
gosec.logger.Printf("Error running SSA analyser on package %q: %s", pkg.Name, err)
return
}
resultMap := map[*analysis.Analyzer]interface{}{
buildssa.Analyzer: &analyzers.SSAAnalyzerResult{
Config: gosec.Config(),
Logger: gosec.logger,
SSA: ssaResult.(*buildssa.SSA),
},
}
for _, analyzer := range gosec.analyzerList {
pass := &analysis.Pass{
Analyzer: analyzer,
Fset: pkg.Fset,
Files: pkg.Syntax,
OtherFiles: pkg.OtherFiles,
IgnoredFiles: pkg.IgnoredFiles,
Pkg: pkg.Types,
TypesInfo: pkg.TypesInfo,
TypesSizes: pkg.TypesSizes,
ResultOf: resultMap,
Report: func(d analysis.Diagnostic) {},
ImportObjectFact: nil,
ExportObjectFact: nil,
ImportPackageFact: nil,
ExportPackageFact: nil,
AllObjectFacts: nil,
AllPackageFacts: nil,
}
result, err := pass.Analyzer.Run(pass)
if err != nil {
gosec.logger.Printf("Error running analyzer %s: %s\n", analyzer.Name, err)
continue
}
if result != nil {
if issue, ok := result.(*analyzers.Issue); ok {
gosec.updateIssues(toGosecIssue(issue), false, []SuppressionInfo{})
}
}
}
}
func isGeneratedFile(file *ast.File) bool { func isGeneratedFile(file *ast.File) bool {
for _, comment := range file.Comments { for _, comment := range file.Comments {
for _, row := range comment.List { for _, row := range comment.List {
@ -426,20 +501,37 @@ func (gosec *Analyzer) ignore(n ast.Node) map[string]SuppressionInfo {
// Visit runs the gosec visitor logic over an AST created by parsing go code. // Visit runs the gosec visitor logic over an AST created by parsing go code.
// Rule methods added with AddRule will be invoked as necessary. // Rule methods added with AddRule will be invoked as necessary.
func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor { func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
// If we've reached the end of this branch, pop off the ignores stack. ignores, ok := gosec.updateIgnoredRules(n)
if !ok {
return gosec
}
// Using ast.File instead of ast.ImportSpec, so that we can track all imports at once.
switch i := n.(type) {
case *ast.File:
gosec.context.Imports.TrackFile(i)
}
for _, rule := range gosec.ruleset.RegisteredFor(n) {
suppressions, ignored := gosec.updateSuppressions(rule.ID(), ignores)
issue, err := rule.Match(n, gosec.context)
if err != nil {
file, line := GetLocation(n, gosec.context)
file = path.Base(file)
gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
}
gosec.updateIssues(issue, ignored, suppressions)
}
return gosec
}
func (gosec *Analyzer) updateIgnoredRules(n ast.Node) (map[string][]SuppressionInfo, bool) {
if n == nil { if n == nil {
if len(gosec.context.Ignores) > 0 { if len(gosec.context.Ignores) > 0 {
gosec.context.Ignores = gosec.context.Ignores[1:] gosec.context.Ignores = gosec.context.Ignores[1:]
} }
return gosec return nil, false
} }
switch i := n.(type) {
case *ast.File:
// Using ast.File instead of ast.ImportSpec, so that we can track
// all imports at once.
gosec.context.Imports.TrackFile(i)
}
// Get any new rule exclusions. // Get any new rule exclusions.
ignoredRules := gosec.ignore(n) ignoredRules := gosec.ignore(n)
@ -458,30 +550,30 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
// Push the new set onto the stack. // Push the new set onto the stack.
gosec.context.Ignores = append([]map[string][]SuppressionInfo{ignores}, gosec.context.Ignores...) gosec.context.Ignores = append([]map[string][]SuppressionInfo{ignores}, gosec.context.Ignores...)
for _, rule := range gosec.ruleset.RegisteredFor(n) { return ignores, true
}
func (gosec *Analyzer) updateSuppressions(id string, ignores map[string][]SuppressionInfo) ([]SuppressionInfo, bool) {
// Check if all rules are ignored. // Check if all rules are ignored.
generalSuppressions, generalIgnored := ignores[aliasOfAllRules] generalSuppressions, generalIgnored := ignores[aliasOfAllRules]
// Check if the specific rule is ignored // Check if the specific rule is ignored
ruleSuppressions, ruleIgnored := ignores[rule.ID()] ruleSuppressions, ruleIgnored := ignores[id]
ignored := generalIgnored || ruleIgnored ignored := generalIgnored || ruleIgnored
suppressions := append(generalSuppressions, ruleSuppressions...) suppressions := append(generalSuppressions, ruleSuppressions...)
// Track external suppressions. // Track external suppressions.
if gosec.ruleset.IsRuleSuppressed(rule.ID()) { if gosec.ruleset.IsRuleSuppressed(id) {
ignored = true ignored = true
suppressions = append(suppressions, SuppressionInfo{ suppressions = append(suppressions, SuppressionInfo{
Kind: "external", Kind: "external",
Justification: externalSuppressionJustification, Justification: externalSuppressionJustification,
}) })
} }
return suppressions, ignored
}
issue, err := rule.Match(n, gosec.context) func (gosec *Analyzer) updateIssues(issue *Issue, ignored bool, suppressions []SuppressionInfo) {
if err != nil {
file, line := GetLocation(n, gosec.context)
file = path.Base(file)
gosec.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
}
if issue != nil { if issue != nil {
if gosec.showIgnored { if gosec.showIgnored {
issue.NoSec = ignored issue.NoSec = ignored
@ -496,8 +588,18 @@ func (gosec *Analyzer) Visit(n ast.Node) ast.Visitor {
gosec.issues = append(gosec.issues, issue) gosec.issues = append(gosec.issues, issue)
} }
} }
}
func toGosecIssue(issue *analyzers.Issue) *Issue {
return &Issue{
File: issue.File,
Line: issue.Line,
Col: issue.Col,
RuleID: issue.AnalyzerID,
What: issue.What,
Confidence: Score(issue.Confidence),
Severity: Score(issue.Severity),
} }
return gosec
} }
// Report returns the current issues discovered and the metrics about the scan // Report returns the current issues discovered and the metrics about the scan

55
analyzers/ssrf.go Normal file
View file

@ -0,0 +1,55 @@
// (c) Copyright gosec's authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analyzers
import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
)
func newSSRFAnalyzer(id string, description string) *analysis.Analyzer {
return &analysis.Analyzer{
Name: id,
Doc: description,
Run: runSSRF,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
}
func runSSRF(pass *analysis.Pass) (interface{}, error) {
ssaResult, err := getSSAResult(pass)
if err != nil {
return nil, err
}
// TODO: implement the analysis
for _, fn := range ssaResult.SSA.SrcFuncs {
for _, block := range fn.DomPreorder() {
for _, instr := range block.Instrs {
switch instr := instr.(type) {
case *ssa.Call:
callee := instr.Call.StaticCallee()
if callee != nil {
ssaResult.Logger.Printf("callee: %s\n", callee)
return newIssue(pass.Analyzer.Name,
"not implemeted",
pass.Fset, instr.Call.Pos(), Low, High), nil
}
}
}
}
}
return nil, nil
}

95
analyzers/util.go Normal file
View file

@ -0,0 +1,95 @@
// (c) Copyright gosec's authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analyzers
import (
"fmt"
"go/token"
"log"
"strconv"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
)
// SSAAnalyzerResult contains various information returned by the
// SSA analysis along with some configuraion
type SSAAnalyzerResult struct {
Config map[string]interface{}
Logger *log.Logger
SSA *buildssa.SSA
}
// Score type used by severity and confidence values
// TODO: remove this duplicated type
type Score int
const (
// Low severity or confidence
Low Score = iota
// Medium severity or confidence
Medium
// High severity or confidence
High
)
// Issue is returned by a gosec rule if it discovers an issue with the scanned code.
// TODO: remove this duplicated type
type Issue struct {
Severity Score `json:"severity"` // issue severity (how problematic it is)
Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it)
AnalyzerID string `json:"analyzer_id"` // Human readable explanation
What string `json:"details"` // Human readable explanation
File string `json:"file"` // File name we found it in
Code string `json:"code"` // Impacted code line
Line string `json:"line"` // Line number in file
Col string `json:"column"` // Column number in line
}
// BuildDefaultAnalyzers returns the default list of analyzers
func BuildDefaultAnalyzers() []*analysis.Analyzer {
return []*analysis.Analyzer{
newSSRFAnalyzer("G107", "URL provided to HTTP request as taint input"),
}
}
// getSSAResult retrives the SSA result from analysis pass
func getSSAResult(pass *analysis.Pass) (*SSAAnalyzerResult, error) {
result, ok := pass.ResultOf[buildssa.Analyzer]
if !ok {
return nil, fmt.Errorf("no SSA result found in the analysis pass")
}
ssaResult, ok := result.(*SSAAnalyzerResult)
if !ok {
return nil, fmt.Errorf("the analysis pass result is not of type SSA")
}
return ssaResult, nil
}
func newIssue(analyzerID string, desc string, fileSet *token.FileSet, pos token.Pos, severity Score, confidence Score) *Issue {
file := fileSet.File(pos)
line := file.Line(pos)
col := file.Position(pos).Column
// TODO: extract the code snippet and map the CWE
return &Issue{
File: file.Name(),
Line: strconv.Itoa(line),
Col: strconv.Itoa(col),
Severity: severity,
Confidence: confidence,
AnalyzerID: analyzerID,
What: desc,
}
}

View file

@ -29,6 +29,8 @@ const (
ExcludeRules GlobalOption = "exclude" ExcludeRules GlobalOption = "exclude"
// IncludeRules global option for should be load // IncludeRules global option for should be load
IncludeRules GlobalOption = "include" IncludeRules GlobalOption = "include"
// SSA global option to enable go analysis framework with SSA support
SSA GlobalOption = "ssa"
) )
// Config is used to provide configuration and customization to each of the rules. // Config is used to provide configuration and customization to each of the rules.