Simplify analyzer and command line interface

The analyzer now only handles packages rather than one off files. This
simplifies the CLI functionality significantly.
This commit is contained in:
Grant Murphy 2017-05-09 21:26:53 -07:00
parent 65b18da711
commit 026fe4c534
2 changed files with 171 additions and 342 deletions

View file

@ -12,17 +12,15 @@
// See the License for the specific language governing permissions and // See the License for the specific language governing permissions and
// limitations under the License. // limitations under the License.
// Package core holds the central scanning logic used by GAS // Package gas holds the central scanning logic used by GAS
package gas package gas
import ( import (
"go/ast" "go/ast"
"go/importer" "go/build"
"go/parser"
"go/token" "go/token"
"go/types" "go/types"
"log" "log"
"os"
"path" "path"
"reflect" "reflect"
"strings" "strings"
@ -30,21 +28,6 @@ import (
"golang.org/x/tools/go/loader" "golang.org/x/tools/go/loader"
) )
// ImportInfo is used to track aliased and initialization only imports.
type ImportInfo struct {
Imported map[string]string
Aliased map[string]string
InitOnly map[string]bool
}
func NewImportInfo() *ImportInfo {
return &ImportInfo{
make(map[string]string),
make(map[string]string),
make(map[string]bool),
}
}
// The Context is populated with data parsed from the source code as it is scanned. // The Context is populated with data parsed from the source code as it is scanned.
// It is passed through to all rule functions as they are called. Rules may use // It is passed through to all rule functions as they are called. Rules may use
// this data in conjunction withe the encoutered AST node. // this data in conjunction withe the encoutered AST node.
@ -55,19 +38,9 @@ type Context struct {
Pkg *types.Package Pkg *types.Package
Root *ast.File Root *ast.File
Config map[string]interface{} Config map[string]interface{}
Imports *ImportInfo Imports *ImportTracker
} }
// The Rule interface used by all rules supported by GAS.
type Rule interface {
Match(ast.Node, *Context) (*Issue, error)
}
// A RuleSet maps lists of rules to the type of AST node they should be run on.
// The anaylzer will only invoke rules contained in the list associated with the
// type of AST node it is currently visiting.
type RuleSet map[reflect.Type][]Rule
// Metrics used when reporting information about a scanning run. // Metrics used when reporting information about a scanning run.
type Metrics struct { type Metrics struct {
NumFiles int `json:"files"` NumFiles int `json:"files"`
@ -84,134 +57,81 @@ type Analyzer struct {
context *Context context *Context
config Config config Config
logger *log.Logger logger *log.Logger
Issues []*Issue `json:"issues"` issues []*Issue
Stats *Metrics `json:"metrics"` stats *Metrics
} }
// NewAnalyzer builds a new anaylzer. // NewAnalyzer builds a new anaylzer.
func NewAnalyzer(conf Config, logger *log.Logger) Analyzer { func NewAnalyzer(conf Config, logger *log.Logger) *Analyzer {
if logger == nil {
logger = log.New(os.Stdout, "[gas]", 0)
}
ignoreNoSec := false ignoreNoSec := false
if val, err := conf.Get("ignoreNoSec"); err == nil { if val, err := conf.Get("ignoreNoSec"); err == nil {
if override, ok := val.(bool); ok { if override, ok := val.(bool); ok {
ignoreNoSec = override ignoreNoSec = override
} }
} }
a := Analyzer{ return &Analyzer{
ignoreNosec: ignoreNoSec, ignoreNosec: ignoreNoSec,
ruleset: make(RuleSet), ruleset: make(RuleSet),
context: &Context{}, context: &Context{},
config: conf, config: conf,
logger: logger, logger: logger,
Issues: make([]*Issue, 0, 16), issues: make([]*Issue, 0, 16),
Stats: &Metrics{0, 0, 0, 0}, stats: &Metrics{},
}
} }
return a func (gas *Analyzer) LoadRules(ruleDefinitions ...RuleBuilder) {
for _, builder := range ruleDefinitions {
r, nodes := builder(gas.config)
gas.ruleset.Register(r, nodes...)
}
} }
func (gas *Analyzer) process(filename string, source interface{}) error { func (gas *Analyzer) Process(packagePath string) error {
mode := parser.ParseComments
gas.context.FileSet = token.NewFileSet()
root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode)
if err == nil {
gas.context.Config = gas.config
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments)
gas.context.Root = root
// here we get type info basePackage, err := build.Default.ImportDir(packagePath, build.ImportComment)
gas.context.Info = &types.Info{
Types: make(map[ast.Expr]types.TypeAndValue),
Defs: make(map[*ast.Ident]types.Object),
Uses: make(map[*ast.Ident]types.Object),
Selections: make(map[*ast.SelectorExpr]*types.Selection),
Scopes: make(map[ast.Node]*types.Scope),
Implicits: make(map[ast.Node]types.Object),
}
conf := types.Config{Importer: importer.Default()}
gas.context.Pkg, err = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info)
if err != nil { if err != nil {
// TODO(gm) Type checker not currently considering all files within a package
// see: issue #113
gas.logger.Printf(`Error during type checking: "%s"`, err)
err = nil
}
gas.context.Imports = NewImportInfo()
for _, pkg := range gas.context.Pkg.Imports() {
gas.context.Imports.Imported[pkg.Path()] = pkg.Name()
}
ast.Walk(gas, root)
gas.Stats.NumFiles++
}
return err return err
} }
// AddRule adds a rule into a rule set list mapped to the given AST node's type. packageConfig := loader.Config{Build: &build.Default}
// The node is only needed for its type and is not otherwise used. packageFiles := make([]string, 0)
func (gas *Analyzer) AddRule(r Rule, nodes []ast.Node) { for _, filename := range basePackage.GoFiles {
for _, n := range nodes { packageFiles = append(packageFiles, path.Join(packagePath, filename))
t := reflect.TypeOf(n)
if val, ok := gas.ruleset[t]; ok {
gas.ruleset[t] = append(val, r)
} else {
gas.ruleset[t] = []Rule{r}
}
}
} }
// Process reads in a source file, convert it to an AST and traverse it. packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...)
// Rule methods added with AddRule will be invoked as necessary. builtPackage, err := packageConfig.Load()
func (gas *Analyzer) Process(filename string) error { if err != nil {
err := gas.process(filename, nil)
fun := func(f *token.File) bool {
gas.Stats.NumLines += f.LineCount()
return true
}
gas.context.FileSet.Iterate(fun)
return err return err
} }
func (gas *Analyzer) ProcessPackage(prog *loader.Program, pkg *loader.PackageInfo, file *ast.File) error { for _, pkg := range builtPackage.Created {
gas.logger.Println("Checking package:", pkg.String())
gas.context.FileSet = prog.Fset for _, file := range pkg.Files {
gas.logger.Println("Checking file:", builtPackage.Fset.File(file.Pos()).Name())
gas.context.FileSet = builtPackage.Fset
gas.context.Config = gas.config gas.context.Config = gas.config
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments) gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments)
gas.context.Root = file gas.context.Root = file
gas.context.Info = &pkg.Info gas.context.Info = &pkg.Info
gas.context.Pkg = pkg.Pkg gas.context.Pkg = pkg.Pkg
gas.context.Imports = NewImportInfo() gas.context.Imports = NewImportTracker()
for _, imported := range gas.context.Pkg.Imports() { gas.context.Imports.TrackPackages(gas.context.Pkg.Imports()...)
gas.context.Imports.Imported[imported.Path()] = imported.Name()
}
ast.Walk(gas, file) ast.Walk(gas, file)
gas.Stats.NumFiles++ gas.stats.NumFiles++
gas.Stats.NumLines += prog.Fset.File(file.Pos()).LineCount() gas.stats.NumLines += builtPackage.Fset.File(file.Pos()).LineCount()
}
}
return nil return nil
} }
// ProcessSource will convert a source code string into an AST and traverse it.
// Rule methods added with AddRule will be invoked as necessary. The string is
// identified by the filename given but no file IO will be done.
func (gas *Analyzer) ProcessSource(filename string, source string) error {
err := gas.process(filename, source)
fun := func(f *token.File) bool {
gas.Stats.NumLines += f.LineCount()
return true
}
gas.context.FileSet.Iterate(fun)
return err
}
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment // ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
func (gas *Analyzer) ignore(n ast.Node) bool { func (gas *Analyzer) ignore(n ast.Node) bool {
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec { if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
for _, group := range groups { for _, group := range groups {
if strings.Contains(group.Text(), "#nosec") { if strings.Contains(group.Text(), "#nosec") {
gas.Stats.NumNosec++ gas.stats.NumNosec++
return true return true
} }
} }
@ -225,38 +145,26 @@ func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
if !gas.ignore(n) { if !gas.ignore(n) {
// Track aliased and initialization imports // Track aliased and initialization imports
if imported, ok := n.(*ast.ImportSpec); ok { gas.context.Imports.TrackImport(n)
path := strings.Trim(imported.Path.Value, `"`)
if imported.Name != nil {
if imported.Name.Name == "_" {
// Initialization import
gas.context.Imports.InitOnly[path] = true
} else {
// Aliased import
gas.context.Imports.Aliased[path] = imported.Name.Name
}
}
// unsafe is not included in Package.Imports()
if path == "unsafe" {
gas.context.Imports.Imported[path] = path
}
}
if val, ok := gas.ruleset[reflect.TypeOf(n)]; ok { for _, rule := range gas.ruleset.RegisteredFor(n) {
for _, rule := range val { issue, err := rule.Match(n, gas.context)
ret, err := rule.Match(n, gas.context)
if err != nil { if err != nil {
file, line := GetLocation(n, gas.context) file, line := GetLocation(n, gas.context)
file = path.Base(file) file = path.Base(file)
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
} }
if ret != nil { if issue != nil {
gas.Issues = append(gas.Issues, ret) gas.issues = append(gas.issues, issue)
gas.Stats.NumFound++ gas.stats.NumFound++
}
} }
} }
return gas return gas
} }
return nil return nil
} }
// Report returns the current issues discovered and the metrics about the scan
func (gas *Analyzer) Report() ([]*Issue, *Metrics) {
return gas.issues, gas.stats
}

View file

@ -15,47 +15,22 @@
package main package main
import ( import (
"encoding/json"
"flag" "flag"
"fmt" "fmt"
"go/build"
"io/ioutil"
"log" "log"
"os" "os"
"path"
"path/filepath" "path/filepath"
"regexp"
"sort" "sort"
"strings" "strings"
"github.com/GoASTScanner/gas" "github.com/GoASTScanner/gas"
"github.com/GoASTScanner/gas/output" "github.com/GoASTScanner/gas/output"
"github.com/GoASTScanner/gas/rules" "github.com/GoASTScanner/gas/rules"
"golang.org/x/tools/go/loader" "github.com/kisielk/gotool"
) )
type recursion bool
const ( const (
recurse recursion = true
noRecurse recursion = false
)
var (
// #nosec flag
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
// format output
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
// output file
flagOutput = flag.String("out", "", "Set output file for results")
// config file
flagConfig = flag.String("conf", "", "Path to optional config file")
// quiet
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
usageText = ` usageText = `
GAS - Go AST Scanner GAS - Go AST Scanner
@ -78,58 +53,36 @@ USAGE:
$ gas -exclude=G101 ./... $ gas -exclude=G101 ./...
` `
)
var (
// #nosec flag
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
// format output
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
// output file
flagOutput = flag.String("out", "", "Set output file for results")
// config file
flagConfig = flag.String("conf", "", "Path to optional config file")
// quiet
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
// rules to explicitly include
flagRulesInclude = flag.String("include", "", "Comma separated list of rules IDs to include. (see rule list)")
// rules to explicitly exclude
flagRulesExclude = flag.String("exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
// log to file or stderr
flagLogfile = flag.String("log", "", "Log messages to file rather than stderr")
logger *log.Logger logger *log.Logger
) )
func extendConfList(conf map[string]interface{}, name string, inputStr string) {
if inputStr == "" {
conf[name] = []string{}
} else {
input := strings.Split(inputStr, ",")
if val, ok := conf[name]; ok {
if data, ok := val.(*[]string); ok {
conf[name] = append(*data, input...)
} else {
logger.Fatal("Config item must be a string list: ", name)
}
} else {
conf[name] = input
}
}
}
func buildConfig(incRules string, excRules string) map[string]interface{} {
config := make(map[string]interface{})
if flagConfig != nil && *flagConfig != "" { // parse config if we have one
if data, err := ioutil.ReadFile(*flagConfig); err == nil {
if err := json.Unmarshal(data, &(config)); err != nil {
logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err)
}
} else {
logger.Fatal("Could not read config file: ", *flagConfig)
}
}
// add in CLI include and exclude data
extendConfList(config, "include", incRules)
extendConfList(config, "exclude", excRules)
// override ignoreNosec if given on CLI
if flagIgnoreNoSec != nil {
config["ignoreNosec"] = *flagIgnoreNoSec
} else {
val, ok := config["ignoreNosec"]
if !ok {
config["ignoreNosec"] = false
} else if _, ok := val.(bool); !ok {
logger.Fatal("Config value must be a bool: 'ignoreNosec'")
}
}
return config
}
// #nosec // #nosec
func usage() { func usage() {
@ -152,43 +105,47 @@ func usage() {
fmt.Fprint(os.Stderr, "\n") fmt.Fprint(os.Stderr, "\n")
} }
// TODO(gm) This needs to be refactored (potentially included in Analyzer) func loadConfig(configFile string) (gas.Config, error) {
func analyzePackage(packageDirectory string, metrics *gas.Metrics, config gas.Config, logger *log.Logger, ruleDefs rules.RuleList) ([]*gas.Issue, error) { config := gas.NewConfig()
if configFile != "" {
basePackage, err := build.Default.ImportDir(packageDirectory, build.ImportComment) file, err := os.Open(configFile)
if err != nil { if err != nil {
return nil, err return nil, err
} }
defer file.Close()
packageConfig := loader.Config{Build: &build.Default} if _, err := config.ReadFrom(file); err != nil {
packageFiles := make([]string, 0)
for _, filename := range basePackage.GoFiles {
packageFiles = append(packageFiles, path.Join(packageDirectory, filename))
}
packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...)
builtPackage, err := packageConfig.Load()
if err != nil {
return nil, err return nil, err
} }
issues := make([]*gas.Issue, 0)
for _, pkg := range builtPackage.Created {
analyzer := gas.NewAnalyzer(config, logger)
for _, rule := range ruleDefs {
analyzer.AddRule(rule.Create(config))
} }
for _, file := range pkg.Files { return config, nil
analyzer.ProcessPackage(builtPackage, pkg, file)
}
issues = append(issues, analyzer.Issues...)
metrics.NumFiles += analyzer.Stats.NumFiles
metrics.NumFound += analyzer.Stats.NumFound
metrics.NumLines += analyzer.Stats.NumLines
metrics.NumNosec += analyzer.Stats.NumNosec
} }
return issues, nil func loadRules(include, exclude string) rules.RuleList {
filters := make([]rules.RuleFilter, 0)
if include != "" {
including := strings.Split(include, ",")
filters = append(filters, rules.NewRuleFilter(false, including...))
}
if exclude != "" {
excluding := strings.Split(exclude, ",")
filters = append(filters, rules.NewRuleFilter(true, excluding...))
}
return rules.Generate(filters...)
}
func saveOutput(filename, format string, issues []*gas.Issue, metrics *gas.Metrics) error {
if filename != "" {
outfile, err := os.Create(filename)
if err != nil {
return err
}
defer outfile.Close()
output.CreateReport(outfile, format, issues, metrics)
} else {
output.CreateReport(os.Stdout, format, issues, metrics)
}
return nil
} }
func main() { func main() {
@ -200,93 +157,61 @@ func main() {
excluded := newFileList("*_test.go") excluded := newFileList("*_test.go")
flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match") flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match")
incRules := ""
flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)")
excRules := ""
flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
// Custom commands / utilities to run instead of default analyzer
tools := newUtils()
flag.Var(tools, "tool", "GAS utilities to assist with rule development")
// Setup logging
logger = log.New(os.Stderr, "[gas] ", log.LstdFlags)
// Parse command line arguments // Parse command line arguments
flag.Parse() flag.Parse()
// Ensure at least one file was specified // Ensure at least one file was specified
if flag.NArg() == 0 { if flag.NArg() == 0 {
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n") fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
flag.Usage() flag.Usage()
os.Exit(1) os.Exit(1)
} }
// Run utils instead of analysis // Setup logging
if len(tools.call) > 0 { logWriter := os.Stderr
tools.run(flag.Args()...) if *flagLogfile != "" {
os.Exit(0) var e error
logWriter, e = os.Create(*flagLogfile)
if e != nil {
flag.Usage()
log.Fatal(e)
} }
}
logger = log.New(logWriter, "[gas] ", log.LstdFlags)
// Load config // Load config
config := gas.NewConfig() config, err := loadConfig(*flagConfig)
if flagConfig != nil && *flagConfig != "" {
file, err := os.Open(*flagConfig)
if err != nil { if err != nil {
logger.Fatal(err) logger.Fatal(err)
} }
defer file.Close()
if _, err := config.ReadFrom(file); err != nil {
logger.Fatal(err)
}
}
filters := make([]rules.RuleFilter, 0)
if incRules != "" {
including := strings.Split(incRules, ",")
filters = append(filters, rules.NewRuleFilter(false, including...))
}
if excRules != "" {
excluding := strings.Split(excRules, ",")
filters = append(filters, rules.NewRuleFilter(true, excluding...))
}
ruleDefinitions := rules.Generate(filters...)
issues := make([]*gas.Issue, 0)
metrics := &gas.Metrics{}
for _, arg := range flag.Args() {
if arg == "./..." {
baseDirectory, err := os.Getwd()
if err != nil {
log.Fatal(err)
}
filepath.Walk(baseDirectory, func(path string, finfo os.FileInfo, e error) error { // Load enabled rule definitions
dir := filepath.Base(path) ruleDefinitions := loadRules(*flagRulesInclude, *flagRulesExclude)
if finfo.IsDir() {
// TODO(gm) - This... // Create the analyzer
if strings.HasPrefix(dir, ".") || dir == "vendor" || dir == "GoDeps" { analyzer := gas.NewAnalyzer(config, logger)
log.Printf("Skipping %s\n", path) analyzer.LoadRules(ruleDefinitions.Builders()...)
return filepath.SkipDir
vendor := regexp.MustCompile(`[\\/]vendor([\\/]|$)`)
// Iterate over packages on the import paths
for _, pkg := range gotool.ImportPaths(flag.Args()) {
// Skip vendor directory
if vendor.MatchString(pkg) {
continue
} }
newIssues, err := analyzePackage(path, metrics, config, logger, ruleDefinitions)
if err != nil { abspath, _ := filepath.Abs(pkg)
log.Println(err) logger.Println("Searching directory:", abspath)
} else { if err := analyzer.Process(pkg); err != nil {
issues = append(issues, newIssues...) logger.Fatal(err)
}
}
return nil
})
} else {
newIssues, err := analyzePackage(arg, metrics, config, logger, ruleDefinitions)
if err != nil {
log.Fatal(err)
}
issues = newIssues
} }
} }
// Collect the results
issues, metrics := analyzer.Report()
issuesFound := len(issues) > 0 issuesFound := len(issues) > 0
// Exit quietly if nothing was found // Exit quietly if nothing was found
if !issuesFound && *flagQuiet { if !issuesFound && *flagQuiet {
@ -294,17 +219,13 @@ func main() {
} }
// Create output report // Create output report
if *flagOutput != "" { if err := saveOutput(*flagOutput, *flagFormat, issues, metrics); err != nil {
outfile, err := os.Create(*flagOutput) logger.Fatal(err)
if err != nil {
logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err)
}
defer outfile.Close()
output.CreateReport(outfile, *flagFormat, issues, metrics)
} else {
output.CreateReport(os.Stdout, *flagFormat, issues, metrics)
} }
// Finialize logging
logWriter.Close()
// Do we have an issue? If so exit 1 // Do we have an issue? If so exit 1
if issuesFound { if issuesFound {
os.Exit(1) os.Exit(1)