2016-07-20 11:02:01 +01:00
|
|
|
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
|
|
//
|
|
|
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
|
|
// you may not use this file except in compliance with the License.
|
|
|
|
// You may obtain a copy of the License at
|
|
|
|
//
|
|
|
|
// http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
//
|
|
|
|
// Unless required by applicable law or agreed to in writing, software
|
|
|
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
// See the License for the specific language governing permissions and
|
|
|
|
// limitations under the License.
|
|
|
|
|
|
|
|
package main
|
|
|
|
|
|
|
|
import (
|
2016-08-05 14:27:21 +01:00
|
|
|
"encoding/json"
|
2016-07-20 11:02:01 +01:00
|
|
|
"flag"
|
|
|
|
"fmt"
|
2016-08-05 14:27:21 +01:00
|
|
|
"io/ioutil"
|
2016-07-20 11:02:01 +01:00
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
2016-08-10 12:51:03 +01:00
|
|
|
"sort"
|
2016-07-20 11:02:01 +01:00
|
|
|
"strings"
|
|
|
|
|
2016-11-02 23:54:20 +00:00
|
|
|
gas "github.com/GoASTScanner/gas/core"
|
|
|
|
"github.com/GoASTScanner/gas/output"
|
2016-07-20 11:02:01 +01:00
|
|
|
)
|
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
type recursion bool
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
const (
|
|
|
|
recurse recursion = true
|
|
|
|
noRecurse recursion = false
|
|
|
|
)
|
|
|
|
|
|
|
|
var (
|
|
|
|
// #nosec flag
|
|
|
|
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
|
|
|
|
|
|
|
// format output
|
|
|
|
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
// output file
|
|
|
|
flagOutput = flag.String("out", "", "Set output file for results")
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
// config file
|
|
|
|
flagConfig = flag.String("conf", "", "Path to optional config file")
|
2016-07-29 11:19:50 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
// quiet
|
|
|
|
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
2016-11-04 18:36:55 +00:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
usageText = `
|
2016-07-20 11:02:01 +01:00
|
|
|
GAS - Go AST Scanner
|
|
|
|
|
|
|
|
Gas analyzes Go source code to look for common programming mistakes that
|
|
|
|
can lead to security problems.
|
|
|
|
|
|
|
|
USAGE:
|
|
|
|
|
|
|
|
# Check a single Go file
|
|
|
|
$ gas example.go
|
|
|
|
|
|
|
|
# Check all files under the current directory and save results in
|
|
|
|
# json format.
|
|
|
|
$ gas -fmt=json -out=results.json ./...
|
|
|
|
|
|
|
|
# Run a specific set of rules (by default all rules will be run):
|
2016-08-11 13:14:19 +01:00
|
|
|
$ gas -include=G101,G203,G401 ./...
|
|
|
|
|
|
|
|
# Run all rules except the provided
|
|
|
|
$ gas -exclude=G101 ./...
|
2016-07-20 11:02:01 +01:00
|
|
|
|
|
|
|
`
|
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
logger *log.Logger
|
|
|
|
)
|
2016-08-05 14:27:21 +01:00
|
|
|
|
2016-08-10 12:51:03 +01:00
|
|
|
func extendConfList(conf map[string]interface{}, name string, inputStr string) {
|
|
|
|
if inputStr == "" {
|
|
|
|
conf[name] = []string{}
|
|
|
|
} else {
|
|
|
|
input := strings.Split(inputStr, ",")
|
|
|
|
if val, ok := conf[name]; ok {
|
|
|
|
if data, ok := val.(*[]string); ok {
|
|
|
|
conf[name] = append(*data, input...)
|
|
|
|
} else {
|
|
|
|
logger.Fatal("Config item must be a string list: ", name)
|
|
|
|
}
|
2016-08-05 14:27:21 +01:00
|
|
|
} else {
|
2016-08-10 12:51:03 +01:00
|
|
|
conf[name] = input
|
2016-08-05 14:27:21 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func buildConfig(incRules string, excRules string) map[string]interface{} {
|
|
|
|
config := make(map[string]interface{})
|
|
|
|
if flagConfig != nil && *flagConfig != "" { // parse config if we have one
|
|
|
|
if data, err := ioutil.ReadFile(*flagConfig); err == nil {
|
|
|
|
if err := json.Unmarshal(data, &(config)); err != nil {
|
|
|
|
logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
logger.Fatal("Could not read config file: ", *flagConfig)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// add in CLI include and exclude data
|
2016-08-10 12:51:03 +01:00
|
|
|
extendConfList(config, "include", incRules)
|
|
|
|
extendConfList(config, "exclude", excRules)
|
2016-08-05 14:27:21 +01:00
|
|
|
|
|
|
|
// override ignoreNosec if given on CLI
|
|
|
|
if flagIgnoreNoSec != nil {
|
|
|
|
config["ignoreNosec"] = *flagIgnoreNoSec
|
|
|
|
} else {
|
|
|
|
val, ok := config["ignoreNosec"]
|
|
|
|
if !ok {
|
|
|
|
config["ignoreNosec"] = false
|
|
|
|
} else if _, ok := val.(bool); !ok {
|
|
|
|
logger.Fatal("Config value must be a bool: 'ignoreNosec'")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return config
|
|
|
|
}
|
|
|
|
|
2016-12-02 18:20:23 +00:00
|
|
|
// #nosec
|
2016-07-20 11:02:01 +01:00
|
|
|
func usage() {
|
2016-12-02 18:20:23 +00:00
|
|
|
|
2016-07-20 11:02:01 +01:00
|
|
|
fmt.Fprintln(os.Stderr, usageText)
|
|
|
|
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
|
|
|
flag.PrintDefaults()
|
2016-08-10 12:51:03 +01:00
|
|
|
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
|
|
|
|
|
|
|
// sorted rule list for eas of reading
|
|
|
|
rl := GetFullRuleList()
|
|
|
|
keys := make([]string, 0, len(rl))
|
|
|
|
for key := range rl {
|
|
|
|
keys = append(keys, key)
|
|
|
|
}
|
|
|
|
sort.Strings(keys)
|
|
|
|
for _, k := range keys {
|
|
|
|
v := rl[k]
|
|
|
|
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.description)
|
|
|
|
}
|
|
|
|
fmt.Fprint(os.Stderr, "\n")
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
func main() {
|
|
|
|
|
|
|
|
// Setup usage description
|
|
|
|
flag.Usage = usage
|
|
|
|
|
|
|
|
// Exclude files
|
2016-12-02 18:40:36 +00:00
|
|
|
excluded := newFileList("*_test.go")
|
2017-04-10 10:18:02 +01:00
|
|
|
flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match")
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-08-05 14:27:21 +01:00
|
|
|
incRules := ""
|
2016-08-11 13:14:19 +01:00
|
|
|
flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
2016-08-05 14:27:21 +01:00
|
|
|
|
|
|
|
excRules := ""
|
2016-08-11 13:14:19 +01:00
|
|
|
flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
2016-08-05 14:27:21 +01:00
|
|
|
|
2016-07-20 11:02:01 +01:00
|
|
|
// Custom commands / utilities to run instead of default analyzer
|
|
|
|
tools := newUtils()
|
|
|
|
flag.Var(tools, "tool", "GAS utilities to assist with rule development")
|
|
|
|
|
2016-12-02 23:34:12 +00:00
|
|
|
// Setup logging
|
|
|
|
logger = log.New(os.Stderr, "[gas] ", log.LstdFlags)
|
|
|
|
|
2016-07-20 11:02:01 +01:00
|
|
|
// Parse command line arguments
|
|
|
|
flag.Parse()
|
|
|
|
|
|
|
|
// Ensure at least one file was specified
|
|
|
|
if flag.NArg() == 0 {
|
|
|
|
|
2017-01-13 19:27:17 +00:00
|
|
|
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
|
2016-07-20 11:02:01 +01:00
|
|
|
flag.Usage()
|
|
|
|
os.Exit(1)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Run utils instead of analysis
|
|
|
|
if len(tools.call) > 0 {
|
|
|
|
tools.run(flag.Args()...)
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Setup analyzer
|
2016-08-05 14:27:21 +01:00
|
|
|
config := buildConfig(incRules, excRules)
|
|
|
|
analyzer := gas.NewAnalyzer(config, logger)
|
2016-08-10 12:51:03 +01:00
|
|
|
AddRules(&analyzer, config)
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
toAnalyze := getFilesToAnalyze(flag.Args(), excluded)
|
2016-07-20 11:02:01 +01:00
|
|
|
|
2016-12-02 18:40:36 +00:00
|
|
|
for _, file := range toAnalyze {
|
2017-01-13 19:27:17 +00:00
|
|
|
logger.Printf(`Processing "%s"...`, file)
|
2016-12-02 18:40:36 +00:00
|
|
|
if err := analyzer.Process(file); err != nil {
|
2017-01-13 19:27:17 +00:00
|
|
|
logger.Printf(`Failed to process: "%s"`, file)
|
|
|
|
logger.Println(err)
|
|
|
|
logger.Fatalf(`Halting execution.`)
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-04 18:36:55 +00:00
|
|
|
issuesFound := len(analyzer.Issues) > 0
|
|
|
|
// Exit quietly if nothing was found
|
|
|
|
if !issuesFound && *flagQuiet {
|
|
|
|
os.Exit(0)
|
|
|
|
}
|
|
|
|
|
2016-07-20 11:02:01 +01:00
|
|
|
// Create output report
|
|
|
|
if *flagOutput != "" {
|
|
|
|
outfile, err := os.Create(*flagOutput)
|
|
|
|
if err != nil {
|
|
|
|
logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err)
|
|
|
|
}
|
|
|
|
defer outfile.Close()
|
|
|
|
output.CreateReport(outfile, *flagFormat, &analyzer)
|
|
|
|
} else {
|
|
|
|
output.CreateReport(os.Stdout, *flagFormat, &analyzer)
|
|
|
|
}
|
2016-10-30 01:22:08 +01:00
|
|
|
|
2016-10-31 14:16:03 +00:00
|
|
|
// Do we have an issue? If so exit 1
|
2016-11-04 18:36:55 +00:00
|
|
|
if issuesFound {
|
2016-10-30 01:22:08 +01:00
|
|
|
os.Exit(1)
|
|
|
|
}
|
2016-07-20 11:02:01 +01:00
|
|
|
}
|
2016-12-02 18:40:36 +00:00
|
|
|
|
|
|
|
// getFilesToAnalyze lists all files
|
|
|
|
func getFilesToAnalyze(paths []string, excluded *fileList) []string {
|
2016-12-02 23:34:12 +00:00
|
|
|
//log.Println("getFilesToAnalyze: start")
|
2016-12-02 18:40:36 +00:00
|
|
|
var toAnalyze []string
|
2017-01-27 17:16:36 +00:00
|
|
|
for _, relativePath := range paths {
|
2016-12-02 23:34:12 +00:00
|
|
|
//log.Printf("getFilesToAnalyze: processing \"%s\"\n", path)
|
2016-12-02 18:40:36 +00:00
|
|
|
// get the absolute path before doing anything else
|
2017-01-27 17:16:36 +00:00
|
|
|
path, err := filepath.Abs(relativePath)
|
2016-12-02 18:40:36 +00:00
|
|
|
if err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
2017-01-27 17:16:36 +00:00
|
|
|
if filepath.Base(relativePath) == "..." {
|
2016-12-02 18:40:36 +00:00
|
|
|
toAnalyze = append(
|
|
|
|
toAnalyze,
|
|
|
|
listFiles(filepath.Dir(path), recurse, excluded)...,
|
|
|
|
)
|
|
|
|
} else {
|
|
|
|
var (
|
|
|
|
finfo os.FileInfo
|
|
|
|
err error
|
|
|
|
)
|
|
|
|
if finfo, err = os.Stat(path); err != nil {
|
|
|
|
logger.Fatal(err)
|
|
|
|
}
|
|
|
|
if !finfo.IsDir() {
|
|
|
|
if shouldInclude(path, excluded) {
|
|
|
|
toAnalyze = append(toAnalyze, path)
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
toAnalyze = listFiles(path, noRecurse, excluded)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2016-12-02 23:34:12 +00:00
|
|
|
//log.Println("getFilesToAnalyze: end")
|
2016-12-02 18:40:36 +00:00
|
|
|
return toAnalyze
|
|
|
|
}
|
|
|
|
|
|
|
|
// listFiles returns a list of all files found that pass the shouldInclude check.
|
|
|
|
// If doRecursiveWalk it true, it will walk the tree rooted at absPath, otherwise it
|
|
|
|
// will only include files directly within the dir referenced by absPath.
|
|
|
|
func listFiles(absPath string, doRecursiveWalk recursion, excluded *fileList) []string {
|
|
|
|
var files []string
|
|
|
|
|
|
|
|
walk := func(path string, info os.FileInfo, err error) error {
|
|
|
|
if info.IsDir() && doRecursiveWalk == noRecurse {
|
|
|
|
return filepath.SkipDir
|
|
|
|
}
|
|
|
|
if shouldInclude(path, excluded) {
|
|
|
|
files = append(files, path)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := filepath.Walk(absPath, walk); err != nil {
|
|
|
|
log.Fatal(err)
|
|
|
|
}
|
|
|
|
return files
|
|
|
|
}
|
|
|
|
|
|
|
|
// shouldInclude checks if a specific path which is expected to reference
|
|
|
|
// a regular file should be included
|
|
|
|
func shouldInclude(path string, excluded *fileList) bool {
|
|
|
|
return filepath.Ext(path) == ".go" && !excluded.Contains(path)
|
|
|
|
}
|