diff --git a/analyzer.go b/analyzer.go index e6329bd..711cc48 100644 --- a/analyzer.go +++ b/analyzer.go @@ -12,17 +12,15 @@ // See the License for the specific language governing permissions and // limitations under the License. -// Package core holds the central scanning logic used by GAS +// Package gas holds the central scanning logic used by GAS package gas import ( "go/ast" - "go/importer" - "go/parser" + "go/build" "go/token" "go/types" "log" - "os" "path" "reflect" "strings" @@ -30,21 +28,6 @@ import ( "golang.org/x/tools/go/loader" ) -// ImportInfo is used to track aliased and initialization only imports. -type ImportInfo struct { - Imported map[string]string - Aliased map[string]string - InitOnly map[string]bool -} - -func NewImportInfo() *ImportInfo { - return &ImportInfo{ - make(map[string]string), - make(map[string]string), - make(map[string]bool), - } -} - // The Context is populated with data parsed from the source code as it is scanned. // It is passed through to all rule functions as they are called. Rules may use // this data in conjunction withe the encoutered AST node. @@ -55,19 +38,9 @@ type Context struct { Pkg *types.Package Root *ast.File Config map[string]interface{} - Imports *ImportInfo + Imports *ImportTracker } -// The Rule interface used by all rules supported by GAS. -type Rule interface { - Match(ast.Node, *Context) (*Issue, error) -} - -// A RuleSet maps lists of rules to the type of AST node they should be run on. -// The anaylzer will only invoke rules contained in the list associated with the -// type of AST node it is currently visiting. -type RuleSet map[reflect.Type][]Rule - // Metrics used when reporting information about a scanning run. type Metrics struct { NumFiles int `json:"files"` @@ -84,134 +57,81 @@ type Analyzer struct { context *Context config Config logger *log.Logger - Issues []*Issue `json:"issues"` - Stats *Metrics `json:"metrics"` + issues []*Issue + stats *Metrics } // NewAnalyzer builds a new anaylzer. -func NewAnalyzer(conf Config, logger *log.Logger) Analyzer { - if logger == nil { - logger = log.New(os.Stdout, "[gas]", 0) - } +func NewAnalyzer(conf Config, logger *log.Logger) *Analyzer { ignoreNoSec := false if val, err := conf.Get("ignoreNoSec"); err == nil { if override, ok := val.(bool); ok { ignoreNoSec = override } } - a := Analyzer{ + return &Analyzer{ ignoreNosec: ignoreNoSec, ruleset: make(RuleSet), context: &Context{}, config: conf, logger: logger, - Issues: make([]*Issue, 0, 16), - Stats: &Metrics{0, 0, 0, 0}, - } - - return a -} - -func (gas *Analyzer) process(filename string, source interface{}) error { - mode := parser.ParseComments - gas.context.FileSet = token.NewFileSet() - root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode) - if err == nil { - gas.context.Config = gas.config - gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments) - gas.context.Root = root - - // here we get type info - gas.context.Info = &types.Info{ - Types: make(map[ast.Expr]types.TypeAndValue), - Defs: make(map[*ast.Ident]types.Object), - Uses: make(map[*ast.Ident]types.Object), - Selections: make(map[*ast.SelectorExpr]*types.Selection), - Scopes: make(map[ast.Node]*types.Scope), - Implicits: make(map[ast.Node]types.Object), - } - - conf := types.Config{Importer: importer.Default()} - gas.context.Pkg, err = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info) - if err != nil { - // TODO(gm) Type checker not currently considering all files within a package - // see: issue #113 - gas.logger.Printf(`Error during type checking: "%s"`, err) - err = nil - } - - gas.context.Imports = NewImportInfo() - for _, pkg := range gas.context.Pkg.Imports() { - gas.context.Imports.Imported[pkg.Path()] = pkg.Name() - } - ast.Walk(gas, root) - gas.Stats.NumFiles++ - } - return err -} - -// AddRule adds a rule into a rule set list mapped to the given AST node's type. -// The node is only needed for its type and is not otherwise used. -func (gas *Analyzer) AddRule(r Rule, nodes []ast.Node) { - for _, n := range nodes { - t := reflect.TypeOf(n) - if val, ok := gas.ruleset[t]; ok { - gas.ruleset[t] = append(val, r) - } else { - gas.ruleset[t] = []Rule{r} - } + issues: make([]*Issue, 0, 16), + stats: &Metrics{}, } } -// Process reads in a source file, convert it to an AST and traverse it. -// Rule methods added with AddRule will be invoked as necessary. -func (gas *Analyzer) Process(filename string) error { - err := gas.process(filename, nil) - fun := func(f *token.File) bool { - gas.Stats.NumLines += f.LineCount() - return true +func (gas *Analyzer) LoadRules(ruleDefinitions ...RuleBuilder) { + for _, builder := range ruleDefinitions { + r, nodes := builder(gas.config) + gas.ruleset.Register(r, nodes...) } - gas.context.FileSet.Iterate(fun) - return err } -func (gas *Analyzer) ProcessPackage(prog *loader.Program, pkg *loader.PackageInfo, file *ast.File) error { +func (gas *Analyzer) Process(packagePath string) error { - gas.context.FileSet = prog.Fset - gas.context.Config = gas.config - gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments) - gas.context.Root = file - gas.context.Info = &pkg.Info - gas.context.Pkg = pkg.Pkg - gas.context.Imports = NewImportInfo() - for _, imported := range gas.context.Pkg.Imports() { - gas.context.Imports.Imported[imported.Path()] = imported.Name() + basePackage, err := build.Default.ImportDir(packagePath, build.ImportComment) + if err != nil { + return err + } + + packageConfig := loader.Config{Build: &build.Default} + packageFiles := make([]string, 0) + for _, filename := range basePackage.GoFiles { + packageFiles = append(packageFiles, path.Join(packagePath, filename)) + } + + packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...) + builtPackage, err := packageConfig.Load() + if err != nil { + return err + } + + for _, pkg := range builtPackage.Created { + gas.logger.Println("Checking package:", pkg.String()) + for _, file := range pkg.Files { + gas.logger.Println("Checking file:", builtPackage.Fset.File(file.Pos()).Name()) + gas.context.FileSet = builtPackage.Fset + gas.context.Config = gas.config + gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments) + gas.context.Root = file + gas.context.Info = &pkg.Info + gas.context.Pkg = pkg.Pkg + gas.context.Imports = NewImportTracker() + gas.context.Imports.TrackPackages(gas.context.Pkg.Imports()...) + ast.Walk(gas, file) + gas.stats.NumFiles++ + gas.stats.NumLines += builtPackage.Fset.File(file.Pos()).LineCount() + } } - ast.Walk(gas, file) - gas.Stats.NumFiles++ - gas.Stats.NumLines += prog.Fset.File(file.Pos()).LineCount() return nil } -// ProcessSource will convert a source code string into an AST and traverse it. -// Rule methods added with AddRule will be invoked as necessary. The string is -// identified by the filename given but no file IO will be done. -func (gas *Analyzer) ProcessSource(filename string, source string) error { - err := gas.process(filename, source) - fun := func(f *token.File) bool { - gas.Stats.NumLines += f.LineCount() - return true - } - gas.context.FileSet.Iterate(fun) - return err -} - // ignore a node (and sub-tree) if it is tagged with a "#nosec" comment func (gas *Analyzer) ignore(n ast.Node) bool { if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec { for _, group := range groups { if strings.Contains(group.Text(), "#nosec") { - gas.Stats.NumNosec++ + gas.stats.NumNosec++ return true } } @@ -225,38 +145,26 @@ func (gas *Analyzer) Visit(n ast.Node) ast.Visitor { if !gas.ignore(n) { // Track aliased and initialization imports - if imported, ok := n.(*ast.ImportSpec); ok { - path := strings.Trim(imported.Path.Value, `"`) - if imported.Name != nil { - if imported.Name.Name == "_" { - // Initialization import - gas.context.Imports.InitOnly[path] = true - } else { - // Aliased import - gas.context.Imports.Aliased[path] = imported.Name.Name - } - } - // unsafe is not included in Package.Imports() - if path == "unsafe" { - gas.context.Imports.Imported[path] = path - } - } + gas.context.Imports.TrackImport(n) - if val, ok := gas.ruleset[reflect.TypeOf(n)]; ok { - for _, rule := range val { - ret, err := rule.Match(n, gas.context) - if err != nil { - file, line := GetLocation(n, gas.context) - file = path.Base(file) - gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) - } - if ret != nil { - gas.Issues = append(gas.Issues, ret) - gas.Stats.NumFound++ - } + for _, rule := range gas.ruleset.RegisteredFor(n) { + issue, err := rule.Match(n, gas.context) + if err != nil { + file, line := GetLocation(n, gas.context) + file = path.Base(file) + gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line) + } + if issue != nil { + gas.issues = append(gas.issues, issue) + gas.stats.NumFound++ } } return gas } return nil } + +// Report returns the current issues discovered and the metrics about the scan +func (gas *Analyzer) Report() ([]*Issue, *Metrics) { + return gas.issues, gas.stats +} diff --git a/cmd/gas/main.go b/cmd/gas/main.go index 9844291..7a80c41 100644 --- a/cmd/gas/main.go +++ b/cmd/gas/main.go @@ -15,47 +15,22 @@ package main import ( - "encoding/json" "flag" "fmt" - "go/build" - "io/ioutil" "log" "os" - "path" "path/filepath" + "regexp" "sort" "strings" "github.com/GoASTScanner/gas" "github.com/GoASTScanner/gas/output" "github.com/GoASTScanner/gas/rules" - "golang.org/x/tools/go/loader" + "github.com/kisielk/gotool" ) -type recursion bool - const ( - recurse recursion = true - noRecurse recursion = false -) - -var ( - // #nosec flag - flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set") - - // format output - flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text") - - // output file - flagOutput = flag.String("out", "", "Set output file for results") - - // config file - flagConfig = flag.String("conf", "", "Path to optional config file") - - // quiet - flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found") - usageText = ` GAS - Go AST Scanner @@ -78,58 +53,36 @@ USAGE: $ gas -exclude=G101 ./... ` +) + +var ( + // #nosec flag + flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set") + + // format output + flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text") + + // output file + flagOutput = flag.String("out", "", "Set output file for results") + + // config file + flagConfig = flag.String("conf", "", "Path to optional config file") + + // quiet + flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found") + + // rules to explicitly include + flagRulesInclude = flag.String("include", "", "Comma separated list of rules IDs to include. (see rule list)") + + // rules to explicitly exclude + flagRulesExclude = flag.String("exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)") + + // log to file or stderr + flagLogfile = flag.String("log", "", "Log messages to file rather than stderr") logger *log.Logger ) -func extendConfList(conf map[string]interface{}, name string, inputStr string) { - if inputStr == "" { - conf[name] = []string{} - } else { - input := strings.Split(inputStr, ",") - if val, ok := conf[name]; ok { - if data, ok := val.(*[]string); ok { - conf[name] = append(*data, input...) - } else { - logger.Fatal("Config item must be a string list: ", name) - } - } else { - conf[name] = input - } - } -} - -func buildConfig(incRules string, excRules string) map[string]interface{} { - config := make(map[string]interface{}) - if flagConfig != nil && *flagConfig != "" { // parse config if we have one - if data, err := ioutil.ReadFile(*flagConfig); err == nil { - if err := json.Unmarshal(data, &(config)); err != nil { - logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err) - } - } else { - logger.Fatal("Could not read config file: ", *flagConfig) - } - } - - // add in CLI include and exclude data - extendConfList(config, "include", incRules) - extendConfList(config, "exclude", excRules) - - // override ignoreNosec if given on CLI - if flagIgnoreNoSec != nil { - config["ignoreNosec"] = *flagIgnoreNoSec - } else { - val, ok := config["ignoreNosec"] - if !ok { - config["ignoreNosec"] = false - } else if _, ok := val.(bool); !ok { - logger.Fatal("Config value must be a bool: 'ignoreNosec'") - } - } - - return config -} - // #nosec func usage() { @@ -152,43 +105,47 @@ func usage() { fmt.Fprint(os.Stderr, "\n") } -// TODO(gm) This needs to be refactored (potentially included in Analyzer) -func analyzePackage(packageDirectory string, metrics *gas.Metrics, config gas.Config, logger *log.Logger, ruleDefs rules.RuleList) ([]*gas.Issue, error) { - - basePackage, err := build.Default.ImportDir(packageDirectory, build.ImportComment) - if err != nil { - return nil, err - } - - packageConfig := loader.Config{Build: &build.Default} - packageFiles := make([]string, 0) - for _, filename := range basePackage.GoFiles { - packageFiles = append(packageFiles, path.Join(packageDirectory, filename)) - } - - packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...) - builtPackage, err := packageConfig.Load() - if err != nil { - return nil, err - } - issues := make([]*gas.Issue, 0) - - for _, pkg := range builtPackage.Created { - analyzer := gas.NewAnalyzer(config, logger) - for _, rule := range ruleDefs { - analyzer.AddRule(rule.Create(config)) +func loadConfig(configFile string) (gas.Config, error) { + config := gas.NewConfig() + if configFile != "" { + file, err := os.Open(configFile) + if err != nil { + return nil, err } - for _, file := range pkg.Files { - analyzer.ProcessPackage(builtPackage, pkg, file) + defer file.Close() + if _, err := config.ReadFrom(file); err != nil { + return nil, err } - issues = append(issues, analyzer.Issues...) - metrics.NumFiles += analyzer.Stats.NumFiles - metrics.NumFound += analyzer.Stats.NumFound - metrics.NumLines += analyzer.Stats.NumLines - metrics.NumNosec += analyzer.Stats.NumNosec + } + return config, nil +} + +func loadRules(include, exclude string) rules.RuleList { + filters := make([]rules.RuleFilter, 0) + if include != "" { + including := strings.Split(include, ",") + filters = append(filters, rules.NewRuleFilter(false, including...)) } - return issues, nil + if exclude != "" { + excluding := strings.Split(exclude, ",") + filters = append(filters, rules.NewRuleFilter(true, excluding...)) + } + return rules.Generate(filters...) +} + +func saveOutput(filename, format string, issues []*gas.Issue, metrics *gas.Metrics) error { + if filename != "" { + outfile, err := os.Create(filename) + if err != nil { + return err + } + defer outfile.Close() + output.CreateReport(outfile, format, issues, metrics) + } else { + output.CreateReport(os.Stdout, format, issues, metrics) + } + return nil } func main() { @@ -200,93 +157,61 @@ func main() { excluded := newFileList("*_test.go") flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match") - incRules := "" - flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)") - - excRules := "" - flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)") - - // Custom commands / utilities to run instead of default analyzer - tools := newUtils() - flag.Var(tools, "tool", "GAS utilities to assist with rule development") - - // Setup logging - logger = log.New(os.Stderr, "[gas] ", log.LstdFlags) - // Parse command line arguments flag.Parse() // Ensure at least one file was specified if flag.NArg() == 0 { - fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n") flag.Usage() os.Exit(1) } - // Run utils instead of analysis - if len(tools.call) > 0 { - tools.run(flag.Args()...) - os.Exit(0) + // Setup logging + logWriter := os.Stderr + if *flagLogfile != "" { + var e error + logWriter, e = os.Create(*flagLogfile) + if e != nil { + flag.Usage() + log.Fatal(e) + } } + logger = log.New(logWriter, "[gas] ", log.LstdFlags) // Load config - config := gas.NewConfig() - if flagConfig != nil && *flagConfig != "" { - file, err := os.Open(*flagConfig) - if err != nil { - logger.Fatal(err) - } - defer file.Close() - if _, err := config.ReadFrom(file); err != nil { - logger.Fatal(err) - } + config, err := loadConfig(*flagConfig) + if err != nil { + logger.Fatal(err) } - filters := make([]rules.RuleFilter, 0) - if incRules != "" { - including := strings.Split(incRules, ",") - filters = append(filters, rules.NewRuleFilter(false, including...)) - } - if excRules != "" { - excluding := strings.Split(excRules, ",") - filters = append(filters, rules.NewRuleFilter(true, excluding...)) - } - ruleDefinitions := rules.Generate(filters...) - issues := make([]*gas.Issue, 0) - metrics := &gas.Metrics{} - for _, arg := range flag.Args() { - if arg == "./..." { - baseDirectory, err := os.Getwd() - if err != nil { - log.Fatal(err) - } - filepath.Walk(baseDirectory, func(path string, finfo os.FileInfo, e error) error { - dir := filepath.Base(path) - if finfo.IsDir() { - // TODO(gm) - This... - if strings.HasPrefix(dir, ".") || dir == "vendor" || dir == "GoDeps" { - log.Printf("Skipping %s\n", path) - return filepath.SkipDir - } - newIssues, err := analyzePackage(path, metrics, config, logger, ruleDefinitions) - if err != nil { - log.Println(err) - } else { - issues = append(issues, newIssues...) - } - } - return nil - }) - } else { - newIssues, err := analyzePackage(arg, metrics, config, logger, ruleDefinitions) - if err != nil { - log.Fatal(err) - } - issues = newIssues + // Load enabled rule definitions + ruleDefinitions := loadRules(*flagRulesInclude, *flagRulesExclude) + + // Create the analyzer + analyzer := gas.NewAnalyzer(config, logger) + analyzer.LoadRules(ruleDefinitions.Builders()...) + + vendor := regexp.MustCompile(`[\\/]vendor([\\/]|$)`) + + // Iterate over packages on the import paths + for _, pkg := range gotool.ImportPaths(flag.Args()) { + + // Skip vendor directory + if vendor.MatchString(pkg) { + continue + } + + abspath, _ := filepath.Abs(pkg) + logger.Println("Searching directory:", abspath) + if err := analyzer.Process(pkg); err != nil { + logger.Fatal(err) } } + // Collect the results + issues, metrics := analyzer.Report() + issuesFound := len(issues) > 0 // Exit quietly if nothing was found if !issuesFound && *flagQuiet { @@ -294,17 +219,13 @@ func main() { } // Create output report - if *flagOutput != "" { - outfile, err := os.Create(*flagOutput) - if err != nil { - logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err) - } - defer outfile.Close() - output.CreateReport(outfile, *flagFormat, issues, metrics) - } else { - output.CreateReport(os.Stdout, *flagFormat, issues, metrics) + if err := saveOutput(*flagOutput, *flagFormat, issues, metrics); err != nil { + logger.Fatal(err) } + // Finialize logging + logWriter.Close() + // Do we have an issue? If so exit 1 if issuesFound { os.Exit(1)