mirror of
https://github.com/securego/gosec.git
synced 2024-12-26 04:25:52 +00:00
Merge pull request #146 from GoASTScanner/experimental
Merge experimental / refactor
This commit is contained in:
commit
3520a5ae85
106 changed files with 3043 additions and 12012 deletions
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -3,11 +3,11 @@
|
||||||
*.a
|
*.a
|
||||||
*.so
|
*.so
|
||||||
*.swp
|
*.swp
|
||||||
gas
|
|
||||||
|
|
||||||
# Folders
|
# Folders
|
||||||
_obj
|
_obj
|
||||||
_test
|
_test
|
||||||
|
vendor
|
||||||
|
|
||||||
# Architecture specific extensions/prefixes
|
# Architecture specific extensions/prefixes
|
||||||
*.[568vq]
|
*.[568vq]
|
||||||
|
|
|
@ -4,3 +4,11 @@ before_script:
|
||||||
go:
|
go:
|
||||||
- 1.5
|
- 1.5
|
||||||
- tip
|
- tip
|
||||||
|
install:
|
||||||
|
- go get -v github.com/onsi/ginkgo/ginkgo
|
||||||
|
- go get -v github.com/onsi/gomega
|
||||||
|
- go get -v -t ./...
|
||||||
|
- export PATH=$PATH:$HOME/gopath/bin
|
||||||
|
|
||||||
|
script: ginkgo -r
|
||||||
|
|
||||||
|
|
285
Godeps/Godeps.json
generated
Normal file
285
Godeps/Godeps.json
generated
Normal file
|
@ -0,0 +1,285 @@
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/GoASTScanner/gas",
|
||||||
|
"GoVersion": "go1.9",
|
||||||
|
"GodepVersion": "v79",
|
||||||
|
"Deps": [
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/adjacency",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/data",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/entropy",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/frequency",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/match",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/matching",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/scoring",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/nbutton23/zxcvbn-go/utils/math",
|
||||||
|
"Rev": "a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/config",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/codelocation",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/containernode",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/failer",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/leafnodes",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/remote",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/spec",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/spec_iterator",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/specrunner",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/suite",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/testingtproxy",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/internal/writer",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/reporters",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/reporters/stenographer",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/reporters/stenographer/support/go-colorable",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/reporters/stenographer/support/go-isatty",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/ginkgo/types",
|
||||||
|
"Comment": "v1.4.0-4-g11459a8",
|
||||||
|
"Rev": "11459a886d9cd66b319dac7ef1e917ee221372c9"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/format",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/internal/assertion",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/internal/asyncassertion",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/internal/oraclematcher",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/internal/testingtsupport",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/matchers",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/matchers/support/goraph/bipartitegraph",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/matchers/support/goraph/edge",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/matchers/support/goraph/node",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/matchers/support/goraph/util",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "github.com/onsi/gomega/types",
|
||||||
|
"Comment": "v1.2.0-2-gdcabb60",
|
||||||
|
"Rev": "dcabb60a477c2b6f456df65037cb6708210fbb02"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/net/html",
|
||||||
|
"Rev": "8351a756f30f1297fe94bbf4b767ec589c6ea6d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/net/html/atom",
|
||||||
|
"Rev": "8351a756f30f1297fe94bbf4b767ec589c6ea6d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/net/html/charset",
|
||||||
|
"Rev": "8351a756f30f1297fe94bbf4b767ec589c6ea6d0"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/sys/unix",
|
||||||
|
"Rev": "164713f0dfcec4e80be8b53e1f0811f5f0d84578"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/charmap",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/htmlindex",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/internal",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/internal/identifier",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/japanese",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/korean",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/simplifiedchinese",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/traditionalchinese",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/encoding/unicode",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/internal/tag",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/internal/utf8internal",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/language",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/runes",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/text/transform",
|
||||||
|
"Rev": "1cbadb444a806fd9430d14ad08967ed91da4fa0a"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/tools/go/ast/astutil",
|
||||||
|
"Rev": "e531a2a1c15f94033f6fa87666caeb19a688175f"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/tools/go/buildutil",
|
||||||
|
"Rev": "e531a2a1c15f94033f6fa87666caeb19a688175f"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "golang.org/x/tools/go/loader",
|
||||||
|
"Rev": "e531a2a1c15f94033f6fa87666caeb19a688175f"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"ImportPath": "gopkg.in/yaml.v2",
|
||||||
|
"Rev": "eb3733d160e74a9c7e442f435eb3bea458e1d19f"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
5
Godeps/Readme
generated
Normal file
5
Godeps/Readme
generated
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
This directory tree is generated automatically by godep.
|
||||||
|
|
||||||
|
Please do not edit.
|
||||||
|
|
||||||
|
See https://github.com/tools/godep for more information.
|
|
@ -64,12 +64,8 @@ $ gas -exclude=G303 ./...
|
||||||
|
|
||||||
#### Excluding files:
|
#### Excluding files:
|
||||||
|
|
||||||
Gas can be told to \ignore paths that match a supplied pattern using the 'skip' command line option. This is
|
Gas will ignore dependencies in your vendor directory any files
|
||||||
accomplished via [go-glob](github.com/ryanuber/go-glob). Multiple patterns can be specified as follows:
|
that are not considered build artifacts by the compiler (so test files).
|
||||||
|
|
||||||
```
|
|
||||||
$ gas -skip=tests* -skip=*_example.go ./...
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Annotating code
|
#### Annotating code
|
||||||
|
|
||||||
|
|
183
analyzer.go
Normal file
183
analyzer.go
Normal file
|
@ -0,0 +1,183 @@
|
||||||
|
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
// Package gas holds the central scanning logic used by GAS
|
||||||
|
package gas
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"reflect"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The Context is populated with data parsed from the source code as it is scanned.
|
||||||
|
// It is passed through to all rule functions as they are called. Rules may use
|
||||||
|
// this data in conjunction withe the encoutered AST node.
|
||||||
|
type Context struct {
|
||||||
|
FileSet *token.FileSet
|
||||||
|
Comments ast.CommentMap
|
||||||
|
Info *types.Info
|
||||||
|
Pkg *types.Package
|
||||||
|
Root *ast.File
|
||||||
|
Config map[string]interface{}
|
||||||
|
Imports *ImportTracker
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metrics used when reporting information about a scanning run.
|
||||||
|
type Metrics struct {
|
||||||
|
NumFiles int `json:"files"`
|
||||||
|
NumLines int `json:"lines"`
|
||||||
|
NumNosec int `json:"nosec"`
|
||||||
|
NumFound int `json:"found"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Analyzer object is the main object of GAS. It has methods traverse an AST
|
||||||
|
// and invoke the correct checking rules as on each node as required.
|
||||||
|
type Analyzer struct {
|
||||||
|
ignoreNosec bool
|
||||||
|
ruleset RuleSet
|
||||||
|
context *Context
|
||||||
|
config Config
|
||||||
|
logger *log.Logger
|
||||||
|
issues []*Issue
|
||||||
|
stats *Metrics
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewAnalyzer builds a new anaylzer.
|
||||||
|
func NewAnalyzer(conf Config, logger *log.Logger) *Analyzer {
|
||||||
|
ignoreNoSec := false
|
||||||
|
if setting, err := conf.GetGlobal("nosec"); err == nil {
|
||||||
|
ignoreNoSec = setting == "true" || setting == "enabled"
|
||||||
|
}
|
||||||
|
if logger == nil {
|
||||||
|
logger = log.New(os.Stderr, "[gas]", log.LstdFlags)
|
||||||
|
}
|
||||||
|
return &Analyzer{
|
||||||
|
ignoreNosec: ignoreNoSec,
|
||||||
|
ruleset: make(RuleSet),
|
||||||
|
context: &Context{},
|
||||||
|
config: conf,
|
||||||
|
logger: logger,
|
||||||
|
issues: make([]*Issue, 0, 16),
|
||||||
|
stats: &Metrics{},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadRules instantiates all the rules to be used when analyzing source
|
||||||
|
// packages
|
||||||
|
func (gas *Analyzer) LoadRules(ruleDefinitions ...RuleBuilder) {
|
||||||
|
for _, builder := range ruleDefinitions {
|
||||||
|
r, nodes := builder(gas.config)
|
||||||
|
gas.ruleset.Register(r, nodes...)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process kicks off the analysis process for a given package
|
||||||
|
func (gas *Analyzer) Process(packagePath string) error {
|
||||||
|
|
||||||
|
basePackage, err := build.Default.ImportDir(packagePath, build.ImportComment)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
packageConfig := loader.Config{Build: &build.Default, ParserMode: parser.ParseComments}
|
||||||
|
var packageFiles []string
|
||||||
|
for _, filename := range basePackage.GoFiles {
|
||||||
|
packageFiles = append(packageFiles, path.Join(packagePath, filename))
|
||||||
|
}
|
||||||
|
|
||||||
|
packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...)
|
||||||
|
builtPackage, err := packageConfig.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pkg := range builtPackage.Created {
|
||||||
|
gas.logger.Println("Checking package:", pkg.String())
|
||||||
|
for _, file := range pkg.Files {
|
||||||
|
gas.logger.Println("Checking file:", builtPackage.Fset.File(file.Pos()).Name())
|
||||||
|
gas.context.FileSet = builtPackage.Fset
|
||||||
|
gas.context.Config = gas.config
|
||||||
|
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, file, file.Comments)
|
||||||
|
gas.context.Root = file
|
||||||
|
gas.context.Info = &pkg.Info
|
||||||
|
gas.context.Pkg = pkg.Pkg
|
||||||
|
gas.context.Imports = NewImportTracker()
|
||||||
|
gas.context.Imports.TrackPackages(gas.context.Pkg.Imports()...)
|
||||||
|
ast.Walk(gas, file)
|
||||||
|
gas.stats.NumFiles++
|
||||||
|
gas.stats.NumLines += builtPackage.Fset.File(file.Pos()).LineCount()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
|
||||||
|
func (gas *Analyzer) ignore(n ast.Node) bool {
|
||||||
|
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
|
||||||
|
for _, group := range groups {
|
||||||
|
if strings.Contains(group.Text(), "#nosec") {
|
||||||
|
gas.stats.NumNosec++
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
// Visit runs the GAS visitor logic over an AST created by parsing go code.
|
||||||
|
// Rule methods added with AddRule will be invoked as necessary.
|
||||||
|
func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
|
||||||
|
if !gas.ignore(n) {
|
||||||
|
|
||||||
|
// Track aliased and initialization imports
|
||||||
|
gas.context.Imports.TrackImport(n)
|
||||||
|
|
||||||
|
for _, rule := range gas.ruleset.RegisteredFor(n) {
|
||||||
|
issue, err := rule.Match(n, gas.context)
|
||||||
|
if err != nil {
|
||||||
|
file, line := GetLocation(n, gas.context)
|
||||||
|
file = path.Base(file)
|
||||||
|
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
||||||
|
}
|
||||||
|
if issue != nil {
|
||||||
|
gas.issues = append(gas.issues, issue)
|
||||||
|
gas.stats.NumFound++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return gas
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Report returns the current issues discovered and the metrics about the scan
|
||||||
|
func (gas *Analyzer) Report() ([]*Issue, *Metrics) {
|
||||||
|
return gas.issues, gas.stats
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reset clears state such as context, issues and metrics from the configured analyzer
|
||||||
|
func (gas *Analyzer) Reset() {
|
||||||
|
gas.context = &Context{}
|
||||||
|
gas.issues = make([]*Issue, 0, 16)
|
||||||
|
gas.stats = &Metrics{}
|
||||||
|
}
|
134
analyzer_test.go
Normal file
134
analyzer_test.go
Normal file
|
@ -0,0 +1,134 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/GoASTScanner/gas/rules"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas/testutils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Analyzer", func() {
|
||||||
|
|
||||||
|
var (
|
||||||
|
analyzer *gas.Analyzer
|
||||||
|
logger *log.Logger
|
||||||
|
output *bytes.Buffer
|
||||||
|
)
|
||||||
|
BeforeEach(func() {
|
||||||
|
logger, output = testutils.NewLogger()
|
||||||
|
analyzer = gas.NewAnalyzer(nil, logger)
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when processing a package", func() {
|
||||||
|
|
||||||
|
It("should return an error if the package contains no Go files", func() {
|
||||||
|
analyzer.LoadRules(rules.Generate().Builders()...)
|
||||||
|
dir, err := ioutil.TempDir("", "empty")
|
||||||
|
defer os.RemoveAll(dir)
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
err = analyzer.Process(dir)
|
||||||
|
Expect(err).Should(HaveOccurred())
|
||||||
|
Expect(err.Error()).Should(MatchRegexp("no buildable Go source files"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return an error if the package fails to build", func() {
|
||||||
|
analyzer.LoadRules(rules.Generate().Builders()...)
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("wonky.go", `func main(){ println("forgot the package")}`)
|
||||||
|
pkg.Build()
|
||||||
|
|
||||||
|
err := analyzer.Process(pkg.Path)
|
||||||
|
Expect(err).Should(HaveOccurred())
|
||||||
|
Expect(err.Error()).Should(MatchRegexp(`expected 'package'`))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be able to analyze mulitple Go files", func() {
|
||||||
|
analyzer.LoadRules(rules.Generate().Builders()...)
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", `
|
||||||
|
package main
|
||||||
|
func main(){
|
||||||
|
bar()
|
||||||
|
}`)
|
||||||
|
pkg.AddFile("bar.go", `
|
||||||
|
package main
|
||||||
|
func bar(){
|
||||||
|
println("package has two files!")
|
||||||
|
}`)
|
||||||
|
pkg.Build()
|
||||||
|
err := analyzer.Process(pkg.Path)
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
_, metrics := analyzer.Report()
|
||||||
|
Expect(metrics.NumFiles).To(Equal(2))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should find errors when nosec is not in use", func() {
|
||||||
|
|
||||||
|
// Rule for MD5 weak crypto usage
|
||||||
|
sample := testutils.SampleCodeG401[0]
|
||||||
|
source := sample.Code
|
||||||
|
analyzer.LoadRules(rules.Generate(rules.NewRuleFilter(false, "G401")).Builders()...)
|
||||||
|
|
||||||
|
controlPackage := testutils.NewTestPackage()
|
||||||
|
defer controlPackage.Close()
|
||||||
|
controlPackage.AddFile("md5.go", source)
|
||||||
|
controlPackage.Build()
|
||||||
|
analyzer.Process(controlPackage.Path)
|
||||||
|
controlIssues, _ := analyzer.Report()
|
||||||
|
Expect(controlIssues).Should(HaveLen(sample.Errors))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not report errors when a nosec comment is present", func() {
|
||||||
|
// Rule for MD5 weak crypto usage
|
||||||
|
sample := testutils.SampleCodeG401[0]
|
||||||
|
source := sample.Code
|
||||||
|
analyzer.LoadRules(rules.Generate(rules.NewRuleFilter(false, "G401")).Builders()...)
|
||||||
|
|
||||||
|
nosecPackage := testutils.NewTestPackage()
|
||||||
|
defer nosecPackage.Close()
|
||||||
|
nosecSource := strings.Replace(source, "h := md5.New()", "h := md5.New() // #nosec", 1)
|
||||||
|
nosecPackage.AddFile("md5.go", nosecSource)
|
||||||
|
nosecPackage.Build()
|
||||||
|
|
||||||
|
analyzer.Process(nosecPackage.Path)
|
||||||
|
nosecIssues, _ := analyzer.Report()
|
||||||
|
Expect(nosecIssues).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to overwrite nosec comments, and report issues", func() {
|
||||||
|
|
||||||
|
// Rule for MD5 weak crypto usage
|
||||||
|
sample := testutils.SampleCodeG401[0]
|
||||||
|
source := sample.Code
|
||||||
|
|
||||||
|
// overwrite nosec option
|
||||||
|
nosecIgnoreConfig := gas.NewConfig()
|
||||||
|
nosecIgnoreConfig.SetGlobal("nosec", "true")
|
||||||
|
customAnalyzer := gas.NewAnalyzer(nosecIgnoreConfig, logger)
|
||||||
|
customAnalyzer.LoadRules(rules.Generate(rules.NewRuleFilter(false, "G401")).Builders()...)
|
||||||
|
|
||||||
|
nosecPackage := testutils.NewTestPackage()
|
||||||
|
defer nosecPackage.Close()
|
||||||
|
nosecSource := strings.Replace(source, "h := md5.New()", "h := md5.New() // #nosec", 1)
|
||||||
|
nosecPackage.AddFile("md5.go", nosecSource)
|
||||||
|
nosecPackage.Build()
|
||||||
|
|
||||||
|
customAnalyzer.Process(nosecPackage.Path)
|
||||||
|
nosecIssues, _ := customAnalyzer.Report()
|
||||||
|
Expect(nosecIssues).Should(HaveLen(sample.Errors))
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
|
@ -11,7 +11,7 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package core
|
package gas
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
@ -19,23 +19,23 @@ import (
|
||||||
|
|
||||||
type set map[string]bool
|
type set map[string]bool
|
||||||
|
|
||||||
/// CallList is used to check for usage of specific packages
|
// CallList is used to check for usage of specific packages
|
||||||
/// and functions.
|
// and functions.
|
||||||
type CallList map[string]set
|
type CallList map[string]set
|
||||||
|
|
||||||
/// NewCallList creates a new empty CallList
|
// NewCallList creates a new empty CallList
|
||||||
func NewCallList() CallList {
|
func NewCallList() CallList {
|
||||||
return make(CallList)
|
return make(CallList)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// AddAll will add several calls to the call list at once
|
// AddAll will add several calls to the call list at once
|
||||||
func (c CallList) AddAll(selector string, idents ...string) {
|
func (c CallList) AddAll(selector string, idents ...string) {
|
||||||
for _, ident := range idents {
|
for _, ident := range idents {
|
||||||
c.Add(selector, ident)
|
c.Add(selector, ident)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Add a selector and call to the call list
|
// Add a selector and call to the call list
|
||||||
func (c CallList) Add(selector, ident string) {
|
func (c CallList) Add(selector, ident string) {
|
||||||
if _, ok := c[selector]; !ok {
|
if _, ok := c[selector]; !ok {
|
||||||
c[selector] = make(set)
|
c[selector] = make(set)
|
||||||
|
@ -43,7 +43,7 @@ func (c CallList) Add(selector, ident string) {
|
||||||
c[selector][ident] = true
|
c[selector][ident] = true
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Contains returns true if the package and function are
|
// Contains returns true if the package and function are
|
||||||
/// members of this call list.
|
/// members of this call list.
|
||||||
func (c CallList) Contains(selector, ident string) bool {
|
func (c CallList) Contains(selector, ident string) bool {
|
||||||
if idents, ok := c[selector]; ok {
|
if idents, ok := c[selector]; ok {
|
||||||
|
@ -53,21 +53,22 @@ func (c CallList) Contains(selector, ident string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
/// ContainsCallExpr resolves the call expression name and type
|
// ContainsCallExpr resolves the call expression name and type
|
||||||
/// or package and determines if it exists within the CallList
|
/// or package and determines if it exists within the CallList
|
||||||
func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) bool {
|
func (c CallList) ContainsCallExpr(n ast.Node, ctx *Context) *ast.CallExpr {
|
||||||
selector, ident, err := GetCallInfo(n, ctx)
|
selector, ident, err := GetCallInfo(n, ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try direct resolution
|
// Try direct resolution
|
||||||
if c.Contains(selector, ident) {
|
if c.Contains(selector, ident) {
|
||||||
return true
|
return n.(*ast.CallExpr)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Also support explicit path
|
// Also support explicit path
|
||||||
if path, ok := GetImportPath(selector, ctx); ok {
|
if path, ok := GetImportPath(selector, ctx); ok && c.Contains(path, ident) {
|
||||||
return c.Contains(path, ident)
|
return n.(*ast.CallExpr)
|
||||||
}
|
}
|
||||||
return false
|
return nil
|
||||||
}
|
}
|
86
call_list_test.go
Normal file
86
call_list_test.go
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/GoASTScanner/gas/testutils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("call list", func() {
|
||||||
|
var (
|
||||||
|
calls gas.CallList
|
||||||
|
)
|
||||||
|
BeforeEach(func() {
|
||||||
|
calls = gas.NewCallList()
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not return any matches when empty", func() {
|
||||||
|
Expect(calls.Contains("foo", "bar")).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to add a single call", func() {
|
||||||
|
Expect(calls).Should(HaveLen(0))
|
||||||
|
calls.Add("foo", "bar")
|
||||||
|
Expect(calls).Should(HaveLen(1))
|
||||||
|
|
||||||
|
expected := make(map[string]bool)
|
||||||
|
expected["bar"] = true
|
||||||
|
actual := map[string]bool(calls["foo"])
|
||||||
|
Expect(actual).Should(Equal(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to add multiple calls at once", func() {
|
||||||
|
Expect(calls).Should(HaveLen(0))
|
||||||
|
calls.AddAll("fmt", "Sprint", "Sprintf", "Printf", "Println")
|
||||||
|
|
||||||
|
expected := map[string]bool{
|
||||||
|
"Sprint": true,
|
||||||
|
"Sprintf": true,
|
||||||
|
"Printf": true,
|
||||||
|
"Println": true,
|
||||||
|
}
|
||||||
|
actual := map[string]bool(calls["fmt"])
|
||||||
|
Expect(actual).Should(Equal(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not return a match if none are present", func() {
|
||||||
|
calls.Add("ioutil", "Copy")
|
||||||
|
Expect(calls.Contains("fmt", "Println")).Should(BeFalse())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should match a call based on selector and ident", func() {
|
||||||
|
calls.Add("ioutil", "Copy")
|
||||||
|
Expect(calls.Contains("ioutil", "Copy")).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should match a call expression", func() {
|
||||||
|
|
||||||
|
// Create file to be scanned
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("md5.go", testutils.SampleCodeG401[0].Code)
|
||||||
|
|
||||||
|
ctx := pkg.CreateContext("md5.go")
|
||||||
|
|
||||||
|
// Search for md5.New()
|
||||||
|
calls.Add("md5", "New")
|
||||||
|
|
||||||
|
// Stub out visitor and count number of matched call expr
|
||||||
|
matched := 0
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Context = ctx
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if _, ok := n.(*ast.CallExpr); ok && calls.ContainsCallExpr(n, ctx) != nil {
|
||||||
|
matched++
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(matched).Should(Equal(1))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
241
cmd/gas/main.go
Normal file
241
cmd/gas/main.go
Normal file
|
@ -0,0 +1,241 @@
|
||||||
|
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"flag"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"regexp"
|
||||||
|
"sort"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/GoASTScanner/gas/output"
|
||||||
|
"github.com/GoASTScanner/gas/rules"
|
||||||
|
"github.com/kisielk/gotool"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
usageText = `
|
||||||
|
GAS - Go AST Scanner
|
||||||
|
|
||||||
|
Gas analyzes Go source code to look for common programming mistakes that
|
||||||
|
can lead to security problems.
|
||||||
|
|
||||||
|
USAGE:
|
||||||
|
|
||||||
|
# Check a single package
|
||||||
|
$ gas $GOPATH/src/github.com/example/project
|
||||||
|
|
||||||
|
# Check all packages under the current directory and save results in
|
||||||
|
# json format.
|
||||||
|
$ gas -fmt=json -out=results.json ./...
|
||||||
|
|
||||||
|
# Run a specific set of rules (by default all rules will be run):
|
||||||
|
$ gas -include=G101,G203,G401 ./...
|
||||||
|
|
||||||
|
# Run all rules except the provided
|
||||||
|
$ gas -exclude=G101 $GOPATH/src/github.com/example/project/...
|
||||||
|
|
||||||
|
`
|
||||||
|
)
|
||||||
|
|
||||||
|
var (
|
||||||
|
// #nosec flag
|
||||||
|
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
||||||
|
|
||||||
|
// format output
|
||||||
|
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
|
||||||
|
|
||||||
|
// output file
|
||||||
|
flagOutput = flag.String("out", "", "Set output file for results")
|
||||||
|
|
||||||
|
// config file
|
||||||
|
flagConfig = flag.String("conf", "", "Path to optional config file")
|
||||||
|
|
||||||
|
// quiet
|
||||||
|
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
||||||
|
|
||||||
|
// rules to explicitly include
|
||||||
|
flagRulesInclude = flag.String("include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
||||||
|
|
||||||
|
// rules to explicitly exclude
|
||||||
|
flagRulesExclude = flag.String("exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
||||||
|
|
||||||
|
// log to file or stderr
|
||||||
|
flagLogfile = flag.String("log", "", "Log messages to file rather than stderr")
|
||||||
|
|
||||||
|
logger *log.Logger
|
||||||
|
)
|
||||||
|
|
||||||
|
// #nosec
|
||||||
|
func usage() {
|
||||||
|
|
||||||
|
fmt.Fprintln(os.Stderr, usageText)
|
||||||
|
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
||||||
|
flag.PrintDefaults()
|
||||||
|
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
||||||
|
|
||||||
|
// sorted rule list for ease of reading
|
||||||
|
rl := rules.Generate()
|
||||||
|
keys := make([]string, 0, len(rl))
|
||||||
|
for key := range rl {
|
||||||
|
keys = append(keys, key)
|
||||||
|
}
|
||||||
|
sort.Strings(keys)
|
||||||
|
for _, k := range keys {
|
||||||
|
v := rl[k]
|
||||||
|
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.Description)
|
||||||
|
}
|
||||||
|
fmt.Fprint(os.Stderr, "\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadConfig(configFile string) (gas.Config, error) {
|
||||||
|
config := gas.NewConfig()
|
||||||
|
if configFile != "" {
|
||||||
|
file, err := os.Open(configFile)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
defer file.Close()
|
||||||
|
if _, err := config.ReadFrom(file); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if *flagIgnoreNoSec {
|
||||||
|
config.SetGlobal("nosec", "true")
|
||||||
|
}
|
||||||
|
return config, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadRules(include, exclude string) rules.RuleList {
|
||||||
|
var filters []rules.RuleFilter
|
||||||
|
if include != "" {
|
||||||
|
log.Printf("including rules: %s", include)
|
||||||
|
including := strings.Split(include, ",")
|
||||||
|
filters = append(filters, rules.NewRuleFilter(false, including...))
|
||||||
|
} else {
|
||||||
|
log.Println("including rules: default")
|
||||||
|
}
|
||||||
|
|
||||||
|
if exclude != "" {
|
||||||
|
log.Printf("excluding rules: %s", exclude)
|
||||||
|
excluding := strings.Split(exclude, ",")
|
||||||
|
filters = append(filters, rules.NewRuleFilter(true, excluding...))
|
||||||
|
} else {
|
||||||
|
log.Println("excluding rules: default")
|
||||||
|
}
|
||||||
|
return rules.Generate(filters...)
|
||||||
|
}
|
||||||
|
|
||||||
|
func saveOutput(filename, format string, issues []*gas.Issue, metrics *gas.Metrics) error {
|
||||||
|
if filename != "" {
|
||||||
|
outfile, err := os.Create(filename)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer outfile.Close()
|
||||||
|
output.CreateReport(outfile, format, issues, metrics)
|
||||||
|
} else {
|
||||||
|
output.CreateReport(os.Stdout, format, issues, metrics)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
|
||||||
|
// Setup usage description
|
||||||
|
flag.Usage = usage
|
||||||
|
|
||||||
|
// Parse command line arguments
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
// Ensure at least one file was specified
|
||||||
|
if flag.NArg() == 0 {
|
||||||
|
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
|
||||||
|
flag.Usage()
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Setup logging
|
||||||
|
logWriter := os.Stderr
|
||||||
|
if *flagLogfile != "" {
|
||||||
|
var e error
|
||||||
|
logWriter, e = os.Create(*flagLogfile)
|
||||||
|
if e != nil {
|
||||||
|
flag.Usage()
|
||||||
|
log.Fatal(e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
logger = log.New(logWriter, "[gas] ", log.LstdFlags)
|
||||||
|
|
||||||
|
// Load config
|
||||||
|
config, err := loadConfig(*flagConfig)
|
||||||
|
if err != nil {
|
||||||
|
logger.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load enabled rule definitions
|
||||||
|
ruleDefinitions := loadRules(*flagRulesInclude, *flagRulesExclude)
|
||||||
|
if len(ruleDefinitions) <= 0 {
|
||||||
|
log.Fatal("cannot continue: no rules are configured.")
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the analyzer
|
||||||
|
analyzer := gas.NewAnalyzer(config, logger)
|
||||||
|
analyzer.LoadRules(ruleDefinitions.Builders()...)
|
||||||
|
|
||||||
|
vendor := regexp.MustCompile(`[\\/]vendor([\\/]|$)`)
|
||||||
|
|
||||||
|
// Iterate over packages on the import paths
|
||||||
|
for _, pkg := range gotool.ImportPaths(flag.Args()) {
|
||||||
|
|
||||||
|
// Skip vendor directory
|
||||||
|
if vendor.MatchString(pkg) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
abspath, _ := filepath.Abs(pkg)
|
||||||
|
logger.Println("Searching directory:", abspath)
|
||||||
|
if err := analyzer.Process(pkg); err != nil {
|
||||||
|
logger.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Collect the results
|
||||||
|
issues, metrics := analyzer.Report()
|
||||||
|
|
||||||
|
issuesFound := len(issues) > 0
|
||||||
|
// Exit quietly if nothing was found
|
||||||
|
if !issuesFound && *flagQuiet {
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create output report
|
||||||
|
if err := saveOutput(*flagOutput, *flagFormat, issues, metrics); err != nil {
|
||||||
|
logger.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finialize logging
|
||||||
|
logWriter.Close()
|
||||||
|
|
||||||
|
// Do we have an issue? If so exit 1
|
||||||
|
if issuesFound {
|
||||||
|
os.Exit(1)
|
||||||
|
}
|
||||||
|
}
|
|
@ -15,6 +15,7 @@
|
||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/importer"
|
"go/importer"
|
||||||
|
@ -274,3 +275,14 @@ func dumpImports(files ...string) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func main() {
|
||||||
|
tools := newUtils()
|
||||||
|
flag.Var(tools, "tool", "Utils to assist with rule development")
|
||||||
|
flag.Parse()
|
||||||
|
|
||||||
|
if len(tools.call) > 0 {
|
||||||
|
tools.run(flag.Args()...)
|
||||||
|
os.Exit(0)
|
||||||
|
}
|
||||||
|
}
|
88
config.go
Normal file
88
config.go
Normal file
|
@ -0,0 +1,88 @@
|
||||||
|
package gas
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"io/ioutil"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Globals are applicable to all rules and used for general
|
||||||
|
// configuration settings for gas.
|
||||||
|
Globals = "global"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Config is used to provide configuration and customization to each of the rules.
|
||||||
|
type Config map[string]interface{}
|
||||||
|
|
||||||
|
// NewConfig initializes a new configuration instance. The configuration data then
|
||||||
|
// needs to be loaded via c.ReadFrom(strings.NewReader("config data"))
|
||||||
|
// or from a *os.File.
|
||||||
|
func NewConfig() Config {
|
||||||
|
cfg := make(Config)
|
||||||
|
cfg[Globals] = make(map[string]string)
|
||||||
|
return cfg
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadFrom implements the io.ReaderFrom interface. This
|
||||||
|
// should be used with io.Reader to load configuration from
|
||||||
|
//file or from string etc.
|
||||||
|
func (c Config) ReadFrom(r io.Reader) (int64, error) {
|
||||||
|
data, err := ioutil.ReadAll(r)
|
||||||
|
if err != nil {
|
||||||
|
return int64(len(data)), err
|
||||||
|
}
|
||||||
|
if err = json.Unmarshal(data, &c); err != nil {
|
||||||
|
return int64(len(data)), err
|
||||||
|
}
|
||||||
|
return int64(len(data)), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// WriteTo implements the io.WriteTo interface. This should
|
||||||
|
// be used to save or print out the configuration information.
|
||||||
|
func (c Config) WriteTo(w io.Writer) (int64, error) {
|
||||||
|
data, err := json.Marshal(c)
|
||||||
|
if err != nil {
|
||||||
|
return int64(len(data)), err
|
||||||
|
}
|
||||||
|
return io.Copy(w, bytes.NewReader(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get returns the configuration section for the supplied key
|
||||||
|
func (c Config) Get(section string) (interface{}, error) {
|
||||||
|
settings, found := c[section]
|
||||||
|
if !found {
|
||||||
|
return nil, fmt.Errorf("Section %s not in configuration", section)
|
||||||
|
}
|
||||||
|
return settings, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set section in the configuration to specified value
|
||||||
|
func (c Config) Set(section string, value interface{}) {
|
||||||
|
c[section] = value
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetGlobal returns value associated with global configuration option
|
||||||
|
func (c Config) GetGlobal(option string) (string, error) {
|
||||||
|
if globals, ok := c[Globals]; ok {
|
||||||
|
if settings, ok := globals.(map[string]string); ok {
|
||||||
|
if value, ok := settings[option]; ok {
|
||||||
|
return value, nil
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("global setting for %s not found", option)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "", fmt.Errorf("no global config options found")
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
// SetGlobal associates a value with a global configuration ooption
|
||||||
|
func (c Config) SetGlobal(option, value string) {
|
||||||
|
if globals, ok := c[Globals]; ok {
|
||||||
|
if settings, ok := globals.(map[string]string); ok {
|
||||||
|
settings[option] = value
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
103
config_test.go
Normal file
103
config_test.go
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Configuration", func() {
|
||||||
|
var configuration gas.Config
|
||||||
|
BeforeEach(func() {
|
||||||
|
configuration = gas.NewConfig()
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when loading from disk", func() {
|
||||||
|
|
||||||
|
It("should be possible to load configuration from a file", func() {
|
||||||
|
json := `{"G101": {}}`
|
||||||
|
buffer := bytes.NewBufferString(json)
|
||||||
|
nread, err := configuration.ReadFrom(buffer)
|
||||||
|
Expect(nread).Should(Equal(int64(len(json))))
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return an error if configuration file is invalid", func() {
|
||||||
|
var err error
|
||||||
|
invalidBuffer := bytes.NewBuffer([]byte{0xc0, 0xff, 0xee})
|
||||||
|
_, err = configuration.ReadFrom(invalidBuffer)
|
||||||
|
Expect(err).Should(HaveOccurred())
|
||||||
|
|
||||||
|
emptyBuffer := bytes.NewBuffer([]byte{})
|
||||||
|
_, err = configuration.ReadFrom(emptyBuffer)
|
||||||
|
Expect(err).Should(HaveOccurred())
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when saving to disk", func() {
|
||||||
|
It("should be possible to save an empty configuration to file", func() {
|
||||||
|
expected := `{"global":{}}`
|
||||||
|
buffer := bytes.NewBuffer([]byte{})
|
||||||
|
nbytes, err := configuration.WriteTo(buffer)
|
||||||
|
Expect(int(nbytes)).Should(Equal(len(expected)))
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
Expect(buffer.String()).Should(Equal(expected))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to save configuration to file", func() {
|
||||||
|
|
||||||
|
configuration.Set("G101", map[string]string{
|
||||||
|
"mode": "strict",
|
||||||
|
})
|
||||||
|
|
||||||
|
buffer := bytes.NewBuffer([]byte{})
|
||||||
|
nbytes, err := configuration.WriteTo(buffer)
|
||||||
|
Expect(int(nbytes)).ShouldNot(BeZero())
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
Expect(buffer.String()).Should(Equal(`{"G101":{"mode":"strict"},"global":{}}`))
|
||||||
|
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when configuring rules", func() {
|
||||||
|
|
||||||
|
It("should be possible to get configuration for a rule", func() {
|
||||||
|
settings := map[string]string{
|
||||||
|
"ciphers": "AES256-GCM",
|
||||||
|
}
|
||||||
|
configuration.Set("G101", settings)
|
||||||
|
|
||||||
|
retrieved, err := configuration.Get("G101")
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
Expect(retrieved).Should(HaveKeyWithValue("ciphers", "AES256-GCM"))
|
||||||
|
Expect(retrieved).ShouldNot(HaveKey("foobar"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("when using global configuration options", func() {
|
||||||
|
It("should have a default global section", func() {
|
||||||
|
settings, err := configuration.Get("global")
|
||||||
|
Expect(err).Should(BeNil())
|
||||||
|
expectedType := make(map[string]string)
|
||||||
|
Expect(settings).Should(BeAssignableToTypeOf(expectedType))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should save global settings to correct section", func() {
|
||||||
|
configuration.SetGlobal("nosec", "enabled")
|
||||||
|
settings, err := configuration.Get("global")
|
||||||
|
Expect(err).Should(BeNil())
|
||||||
|
if globals, ok := settings.(map[string]string); ok {
|
||||||
|
Expect(globals["nosec"]).Should(MatchRegexp("enabled"))
|
||||||
|
} else {
|
||||||
|
Fail("globals are not defined as map")
|
||||||
|
}
|
||||||
|
|
||||||
|
setValue, err := configuration.GetGlobal("nosec")
|
||||||
|
Expect(err).Should(BeNil())
|
||||||
|
Expect(setValue).Should(MatchRegexp("enabled"))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
235
core/analyzer.go
235
core/analyzer.go
|
@ -1,235 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
// Package core holds the central scanning logic used by GAS
|
|
||||||
package core
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
"go/importer"
|
|
||||||
"go/parser"
|
|
||||||
"go/token"
|
|
||||||
"go/types"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"reflect"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
// ImportInfo is used to track aliased and initialization only imports.
|
|
||||||
type ImportInfo struct {
|
|
||||||
Imported map[string]string
|
|
||||||
Aliased map[string]string
|
|
||||||
InitOnly map[string]bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewImportInfo() *ImportInfo {
|
|
||||||
return &ImportInfo{
|
|
||||||
make(map[string]string),
|
|
||||||
make(map[string]string),
|
|
||||||
make(map[string]bool),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Context is populated with data parsed from the source code as it is scanned.
|
|
||||||
// It is passed through to all rule functions as they are called. Rules may use
|
|
||||||
// this data in conjunction withe the encoutered AST node.
|
|
||||||
type Context struct {
|
|
||||||
FileSet *token.FileSet
|
|
||||||
Comments ast.CommentMap
|
|
||||||
Info *types.Info
|
|
||||||
Pkg *types.Package
|
|
||||||
Root *ast.File
|
|
||||||
Config map[string]interface{}
|
|
||||||
Imports *ImportInfo
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Rule interface used by all rules supported by GAS.
|
|
||||||
type Rule interface {
|
|
||||||
Match(ast.Node, *Context) (*Issue, error)
|
|
||||||
}
|
|
||||||
|
|
||||||
// A RuleSet maps lists of rules to the type of AST node they should be run on.
|
|
||||||
// The anaylzer will only invoke rules contained in the list associated with the
|
|
||||||
// type of AST node it is currently visiting.
|
|
||||||
type RuleSet map[reflect.Type][]Rule
|
|
||||||
|
|
||||||
// Metrics used when reporting information about a scanning run.
|
|
||||||
type Metrics struct {
|
|
||||||
NumFiles int `json:"files"`
|
|
||||||
NumLines int `json:"lines"`
|
|
||||||
NumNosec int `json:"nosec"`
|
|
||||||
NumFound int `json:"found"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// The Analyzer object is the main object of GAS. It has methods traverse an AST
|
|
||||||
// and invoke the correct checking rules as on each node as required.
|
|
||||||
type Analyzer struct {
|
|
||||||
ignoreNosec bool
|
|
||||||
ruleset RuleSet
|
|
||||||
context *Context
|
|
||||||
logger *log.Logger
|
|
||||||
Issues []*Issue `json:"issues"`
|
|
||||||
Stats *Metrics `json:"metrics"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewAnalyzer builds a new anaylzer.
|
|
||||||
func NewAnalyzer(conf map[string]interface{}, logger *log.Logger) Analyzer {
|
|
||||||
if logger == nil {
|
|
||||||
logger = log.New(os.Stdout, "[gas]", 0)
|
|
||||||
}
|
|
||||||
a := Analyzer{
|
|
||||||
ignoreNosec: conf["ignoreNosec"].(bool),
|
|
||||||
ruleset: make(RuleSet),
|
|
||||||
context: &Context{nil, nil, nil, nil, nil, nil, nil},
|
|
||||||
logger: logger,
|
|
||||||
Issues: make([]*Issue, 0, 16),
|
|
||||||
Stats: &Metrics{0, 0, 0, 0},
|
|
||||||
}
|
|
||||||
|
|
||||||
// TODO(tkelsey): use the inc/exc lists
|
|
||||||
|
|
||||||
return a
|
|
||||||
}
|
|
||||||
|
|
||||||
func (gas *Analyzer) process(filename string, source interface{}) error {
|
|
||||||
mode := parser.ParseComments
|
|
||||||
gas.context.FileSet = token.NewFileSet()
|
|
||||||
root, err := parser.ParseFile(gas.context.FileSet, filename, source, mode)
|
|
||||||
if err == nil {
|
|
||||||
gas.context.Comments = ast.NewCommentMap(gas.context.FileSet, root, root.Comments)
|
|
||||||
gas.context.Root = root
|
|
||||||
|
|
||||||
// here we get type info
|
|
||||||
gas.context.Info = &types.Info{
|
|
||||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
|
||||||
Defs: make(map[*ast.Ident]types.Object),
|
|
||||||
Uses: make(map[*ast.Ident]types.Object),
|
|
||||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
|
||||||
Scopes: make(map[ast.Node]*types.Scope),
|
|
||||||
Implicits: make(map[ast.Node]types.Object),
|
|
||||||
}
|
|
||||||
|
|
||||||
conf := types.Config{Importer: importer.Default()}
|
|
||||||
gas.context.Pkg, err = conf.Check("pkg", gas.context.FileSet, []*ast.File{root}, gas.context.Info)
|
|
||||||
if err != nil {
|
|
||||||
// TODO(gm) Type checker not currently considering all files within a package
|
|
||||||
// see: issue #113
|
|
||||||
gas.logger.Printf(`Error during type checking: "%s"`, err)
|
|
||||||
err = nil
|
|
||||||
}
|
|
||||||
|
|
||||||
gas.context.Imports = NewImportInfo()
|
|
||||||
for _, pkg := range gas.context.Pkg.Imports() {
|
|
||||||
gas.context.Imports.Imported[pkg.Path()] = pkg.Name()
|
|
||||||
}
|
|
||||||
ast.Walk(gas, root)
|
|
||||||
gas.Stats.NumFiles++
|
|
||||||
}
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// AddRule adds a rule into a rule set list mapped to the given AST node's type.
|
|
||||||
// The node is only needed for its type and is not otherwise used.
|
|
||||||
func (gas *Analyzer) AddRule(r Rule, nodes []ast.Node) {
|
|
||||||
for _, n := range nodes {
|
|
||||||
t := reflect.TypeOf(n)
|
|
||||||
if val, ok := gas.ruleset[t]; ok {
|
|
||||||
gas.ruleset[t] = append(val, r)
|
|
||||||
} else {
|
|
||||||
gas.ruleset[t] = []Rule{r}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process reads in a source file, convert it to an AST and traverse it.
|
|
||||||
// Rule methods added with AddRule will be invoked as necessary.
|
|
||||||
func (gas *Analyzer) Process(filename string) error {
|
|
||||||
err := gas.process(filename, nil)
|
|
||||||
fun := func(f *token.File) bool {
|
|
||||||
gas.Stats.NumLines += f.LineCount()
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
gas.context.FileSet.Iterate(fun)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ProcessSource will convert a source code string into an AST and traverse it.
|
|
||||||
// Rule methods added with AddRule will be invoked as necessary. The string is
|
|
||||||
// identified by the filename given but no file IO will be done.
|
|
||||||
func (gas *Analyzer) ProcessSource(filename string, source string) error {
|
|
||||||
err := gas.process(filename, source)
|
|
||||||
fun := func(f *token.File) bool {
|
|
||||||
gas.Stats.NumLines += f.LineCount()
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
gas.context.FileSet.Iterate(fun)
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// ignore a node (and sub-tree) if it is tagged with a "#nosec" comment
|
|
||||||
func (gas *Analyzer) ignore(n ast.Node) bool {
|
|
||||||
if groups, ok := gas.context.Comments[n]; ok && !gas.ignoreNosec {
|
|
||||||
for _, group := range groups {
|
|
||||||
if strings.Contains(group.Text(), "#nosec") {
|
|
||||||
gas.Stats.NumNosec++
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
// Visit runs the GAS visitor logic over an AST created by parsing go code.
|
|
||||||
// Rule methods added with AddRule will be invoked as necessary.
|
|
||||||
func (gas *Analyzer) Visit(n ast.Node) ast.Visitor {
|
|
||||||
if !gas.ignore(n) {
|
|
||||||
|
|
||||||
// Track aliased and initialization imports
|
|
||||||
if imported, ok := n.(*ast.ImportSpec); ok {
|
|
||||||
path := strings.Trim(imported.Path.Value, `"`)
|
|
||||||
if imported.Name != nil {
|
|
||||||
if imported.Name.Name == "_" {
|
|
||||||
// Initialization import
|
|
||||||
gas.context.Imports.InitOnly[path] = true
|
|
||||||
} else {
|
|
||||||
// Aliased import
|
|
||||||
gas.context.Imports.Aliased[path] = imported.Name.Name
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// unsafe is not included in Package.Imports()
|
|
||||||
if path == "unsafe" {
|
|
||||||
gas.context.Imports.Imported[path] = path
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if val, ok := gas.ruleset[reflect.TypeOf(n)]; ok {
|
|
||||||
for _, rule := range val {
|
|
||||||
ret, err := rule.Match(n, gas.context)
|
|
||||||
if err != nil {
|
|
||||||
file, line := GetLocation(n, gas.context)
|
|
||||||
file = path.Base(file)
|
|
||||||
gas.logger.Printf("Rule error: %v => %s (%s:%d)\n", reflect.TypeOf(rule), err, file, line)
|
|
||||||
}
|
|
||||||
if ret != nil {
|
|
||||||
gas.Issues = append(gas.Issues, ret)
|
|
||||||
gas.Stats.NumFound++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return gas
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
|
@ -1,60 +0,0 @@
|
||||||
package core
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
type callListRule struct {
|
|
||||||
MetaData
|
|
||||||
callList CallList
|
|
||||||
matched int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *callListRule) Match(n ast.Node, c *Context) (gi *Issue, err error) {
|
|
||||||
if r.callList.ContainsCallExpr(n, c) {
|
|
||||||
r.matched += 1
|
|
||||||
}
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallListContainsCallExpr(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := NewAnalyzer(config, nil)
|
|
||||||
calls := NewCallList()
|
|
||||||
calls.AddAll("bytes.Buffer", "Write", "WriteTo")
|
|
||||||
rule := &callListRule{
|
|
||||||
MetaData: MetaData{
|
|
||||||
Severity: Low,
|
|
||||||
Confidence: Low,
|
|
||||||
What: "A dummy rule",
|
|
||||||
},
|
|
||||||
callList: calls,
|
|
||||||
matched: 0,
|
|
||||||
}
|
|
||||||
analyzer.AddRule(rule, []ast.Node{(*ast.CallExpr)(nil)})
|
|
||||||
source := `
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
var b bytes.Buffer
|
|
||||||
b.Write([]byte("Hello "))
|
|
||||||
fmt.Fprintf(&b, "world!")
|
|
||||||
}`
|
|
||||||
|
|
||||||
analyzer.ProcessSource("dummy.go", source)
|
|
||||||
if rule.matched != 1 {
|
|
||||||
t.Errorf("Expected to match a bytes.Buffer.Write call")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestCallListContains(t *testing.T) {
|
|
||||||
callList := NewCallList()
|
|
||||||
callList.Add("fmt", "Printf")
|
|
||||||
if !callList.Contains("fmt", "Printf") {
|
|
||||||
t.Errorf("Expected call list to contain fmt.Printf")
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,71 +0,0 @@
|
||||||
package core
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
type dummyCallback func(ast.Node, *Context, string, ...string) (*ast.CallExpr, bool)
|
|
||||||
|
|
||||||
type dummyRule struct {
|
|
||||||
MetaData
|
|
||||||
pkgOrType string
|
|
||||||
funcsOrMethods []string
|
|
||||||
callback dummyCallback
|
|
||||||
callExpr []ast.Node
|
|
||||||
matched int
|
|
||||||
}
|
|
||||||
|
|
||||||
func (r *dummyRule) Match(n ast.Node, c *Context) (gi *Issue, err error) {
|
|
||||||
if callexpr, matched := r.callback(n, c, r.pkgOrType, r.funcsOrMethods...); matched {
|
|
||||||
r.matched += 1
|
|
||||||
r.callExpr = append(r.callExpr, callexpr)
|
|
||||||
}
|
|
||||||
return nil, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMatchCallByType(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := NewAnalyzer(config, nil)
|
|
||||||
rule := &dummyRule{
|
|
||||||
MetaData: MetaData{
|
|
||||||
Severity: Low,
|
|
||||||
Confidence: Low,
|
|
||||||
What: "A dummy rule",
|
|
||||||
},
|
|
||||||
pkgOrType: "bytes.Buffer",
|
|
||||||
funcsOrMethods: []string{"Write"},
|
|
||||||
callback: MatchCallByType,
|
|
||||||
callExpr: []ast.Node{},
|
|
||||||
matched: 0,
|
|
||||||
}
|
|
||||||
analyzer.AddRule(rule, []ast.Node{(*ast.CallExpr)(nil)})
|
|
||||||
source := `
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"bytes"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
var b bytes.Buffer
|
|
||||||
b.Write([]byte("Hello "))
|
|
||||||
fmt.Fprintf(&b, "world!")
|
|
||||||
}`
|
|
||||||
|
|
||||||
analyzer.ProcessSource("dummy.go", source)
|
|
||||||
if rule.matched != 1 || len(rule.callExpr) != 1 {
|
|
||||||
t.Errorf("Expected to match a bytes.Buffer.Write call")
|
|
||||||
}
|
|
||||||
|
|
||||||
typeName, callName, err := GetCallInfo(rule.callExpr[0], analyzer.context)
|
|
||||||
if err != nil {
|
|
||||||
t.Errorf("Unable to resolve call info: %v\n", err)
|
|
||||||
}
|
|
||||||
if typeName != "bytes.Buffer" {
|
|
||||||
t.Errorf("Expected: %s, Got: %s\n", "bytes.Buffer", typeName)
|
|
||||||
}
|
|
||||||
if callName != "Write" {
|
|
||||||
t.Errorf("Expected: %s, Got: %s\n", "Write", callName)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
404
core/select.go
404
core/select.go
|
@ -1,404 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package core
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"go/ast"
|
|
||||||
"reflect"
|
|
||||||
)
|
|
||||||
|
|
||||||
// SelectFunc is like an AST visitor, but has a richer interface. It
|
|
||||||
// is called with the current ast.Node being visitied and that nodes depth in
|
|
||||||
// the tree. The function can return true to continue traversing the tree, or
|
|
||||||
// false to end traversal here.
|
|
||||||
type SelectFunc func(ast.Node, int) bool
|
|
||||||
|
|
||||||
func walkIdentList(list []*ast.Ident, depth int, fun SelectFunc) {
|
|
||||||
for _, x := range list {
|
|
||||||
depthWalk(x, depth, fun)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func walkExprList(list []ast.Expr, depth int, fun SelectFunc) {
|
|
||||||
for _, x := range list {
|
|
||||||
depthWalk(x, depth, fun)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func walkStmtList(list []ast.Stmt, depth int, fun SelectFunc) {
|
|
||||||
for _, x := range list {
|
|
||||||
depthWalk(x, depth, fun)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func walkDeclList(list []ast.Decl, depth int, fun SelectFunc) {
|
|
||||||
for _, x := range list {
|
|
||||||
depthWalk(x, depth, fun)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func depthWalk(node ast.Node, depth int, fun SelectFunc) {
|
|
||||||
if !fun(node, depth) {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
switch n := node.(type) {
|
|
||||||
// Comments and fields
|
|
||||||
case *ast.Comment:
|
|
||||||
|
|
||||||
case *ast.CommentGroup:
|
|
||||||
for _, c := range n.List {
|
|
||||||
depthWalk(c, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.Field:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
walkIdentList(n.Names, depth+1, fun)
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
if n.Tag != nil {
|
|
||||||
depthWalk(n.Tag, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Comment != nil {
|
|
||||||
depthWalk(n.Comment, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.FieldList:
|
|
||||||
for _, f := range n.List {
|
|
||||||
depthWalk(f, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Expressions
|
|
||||||
case *ast.BadExpr, *ast.Ident, *ast.BasicLit:
|
|
||||||
|
|
||||||
case *ast.Ellipsis:
|
|
||||||
if n.Elt != nil {
|
|
||||||
depthWalk(n.Elt, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.FuncLit:
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.CompositeLit:
|
|
||||||
if n.Type != nil {
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
}
|
|
||||||
walkExprList(n.Elts, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.ParenExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.SelectorExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
depthWalk(n.Sel, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.IndexExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
depthWalk(n.Index, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.SliceExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
if n.Low != nil {
|
|
||||||
depthWalk(n.Low, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.High != nil {
|
|
||||||
depthWalk(n.High, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Max != nil {
|
|
||||||
depthWalk(n.Max, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.TypeAssertExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
if n.Type != nil {
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.CallExpr:
|
|
||||||
depthWalk(n.Fun, depth+1, fun)
|
|
||||||
walkExprList(n.Args, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.StarExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.UnaryExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.BinaryExpr:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
depthWalk(n.Y, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.KeyValueExpr:
|
|
||||||
depthWalk(n.Key, depth+1, fun)
|
|
||||||
depthWalk(n.Value, depth+1, fun)
|
|
||||||
|
|
||||||
// Types
|
|
||||||
case *ast.ArrayType:
|
|
||||||
if n.Len != nil {
|
|
||||||
depthWalk(n.Len, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Elt, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.StructType:
|
|
||||||
depthWalk(n.Fields, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.FuncType:
|
|
||||||
if n.Params != nil {
|
|
||||||
depthWalk(n.Params, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Results != nil {
|
|
||||||
depthWalk(n.Results, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.InterfaceType:
|
|
||||||
depthWalk(n.Methods, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.MapType:
|
|
||||||
depthWalk(n.Key, depth+1, fun)
|
|
||||||
depthWalk(n.Value, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.ChanType:
|
|
||||||
depthWalk(n.Value, depth+1, fun)
|
|
||||||
|
|
||||||
// Statements
|
|
||||||
case *ast.BadStmt:
|
|
||||||
|
|
||||||
case *ast.DeclStmt:
|
|
||||||
depthWalk(n.Decl, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.EmptyStmt:
|
|
||||||
|
|
||||||
case *ast.LabeledStmt:
|
|
||||||
depthWalk(n.Label, depth+1, fun)
|
|
||||||
depthWalk(n.Stmt, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.ExprStmt:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.SendStmt:
|
|
||||||
depthWalk(n.Chan, depth+1, fun)
|
|
||||||
depthWalk(n.Value, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.IncDecStmt:
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.AssignStmt:
|
|
||||||
walkExprList(n.Lhs, depth+1, fun)
|
|
||||||
walkExprList(n.Rhs, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.GoStmt:
|
|
||||||
depthWalk(n.Call, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.DeferStmt:
|
|
||||||
depthWalk(n.Call, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.ReturnStmt:
|
|
||||||
walkExprList(n.Results, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.BranchStmt:
|
|
||||||
if n.Label != nil {
|
|
||||||
depthWalk(n.Label, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.BlockStmt:
|
|
||||||
walkStmtList(n.List, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.IfStmt:
|
|
||||||
if n.Init != nil {
|
|
||||||
depthWalk(n.Init, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Cond, depth+1, fun)
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
if n.Else != nil {
|
|
||||||
depthWalk(n.Else, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.CaseClause:
|
|
||||||
walkExprList(n.List, depth+1, fun)
|
|
||||||
walkStmtList(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.SwitchStmt:
|
|
||||||
if n.Init != nil {
|
|
||||||
depthWalk(n.Init, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Tag != nil {
|
|
||||||
depthWalk(n.Tag, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.TypeSwitchStmt:
|
|
||||||
if n.Init != nil {
|
|
||||||
depthWalk(n.Init, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Assign, depth+1, fun)
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.CommClause:
|
|
||||||
if n.Comm != nil {
|
|
||||||
depthWalk(n.Comm, depth+1, fun)
|
|
||||||
}
|
|
||||||
walkStmtList(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.SelectStmt:
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.ForStmt:
|
|
||||||
if n.Init != nil {
|
|
||||||
depthWalk(n.Init, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Cond != nil {
|
|
||||||
depthWalk(n.Cond, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Post != nil {
|
|
||||||
depthWalk(n.Post, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
case *ast.RangeStmt:
|
|
||||||
if n.Key != nil {
|
|
||||||
depthWalk(n.Key, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Value != nil {
|
|
||||||
depthWalk(n.Value, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.X, depth+1, fun)
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
|
|
||||||
// Declarations
|
|
||||||
case *ast.ImportSpec:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Name != nil {
|
|
||||||
depthWalk(n.Name, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Path, depth+1, fun)
|
|
||||||
if n.Comment != nil {
|
|
||||||
depthWalk(n.Comment, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.ValueSpec:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
walkIdentList(n.Names, depth+1, fun)
|
|
||||||
if n.Type != nil {
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
}
|
|
||||||
walkExprList(n.Values, depth+1, fun)
|
|
||||||
if n.Comment != nil {
|
|
||||||
depthWalk(n.Comment, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.TypeSpec:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Name, depth+1, fun)
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
if n.Comment != nil {
|
|
||||||
depthWalk(n.Comment, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.BadDecl:
|
|
||||||
|
|
||||||
case *ast.GenDecl:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
for _, s := range n.Specs {
|
|
||||||
depthWalk(s, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
case *ast.FuncDecl:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
if n.Recv != nil {
|
|
||||||
depthWalk(n.Recv, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Name, depth+1, fun)
|
|
||||||
depthWalk(n.Type, depth+1, fun)
|
|
||||||
if n.Body != nil {
|
|
||||||
depthWalk(n.Body, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Files and packages
|
|
||||||
case *ast.File:
|
|
||||||
if n.Doc != nil {
|
|
||||||
depthWalk(n.Doc, depth+1, fun)
|
|
||||||
}
|
|
||||||
depthWalk(n.Name, depth+1, fun)
|
|
||||||
walkDeclList(n.Decls, depth+1, fun)
|
|
||||||
// don't walk n.Comments - they have been
|
|
||||||
// visited already through the individual
|
|
||||||
// nodes
|
|
||||||
|
|
||||||
case *ast.Package:
|
|
||||||
for _, f := range n.Files {
|
|
||||||
depthWalk(f, depth+1, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
default:
|
|
||||||
panic(fmt.Sprintf("gas.depthWalk: unexpected node type %T", n))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Selector interface {
|
|
||||||
Final(ast.Node)
|
|
||||||
Partial(ast.Node) bool
|
|
||||||
}
|
|
||||||
|
|
||||||
func Select(s Selector, n ast.Node, bits ...reflect.Type) {
|
|
||||||
fun := func(n ast.Node, d int) bool {
|
|
||||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
|
||||||
if d == len(bits)-1 {
|
|
||||||
s.Final(n)
|
|
||||||
return false
|
|
||||||
} else if s.Partial(n) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
depthWalk(n, 0, fun)
|
|
||||||
}
|
|
||||||
|
|
||||||
// SimpleSelect will try to match a path through a sub-tree starting at a given AST node.
|
|
||||||
// The type of each node in the path at a given depth must match its entry in list of
|
|
||||||
// node types given.
|
|
||||||
func SimpleSelect(n ast.Node, bits ...reflect.Type) ast.Node {
|
|
||||||
var found ast.Node
|
|
||||||
fun := func(n ast.Node, d int) bool {
|
|
||||||
if found != nil {
|
|
||||||
return false // short cut logic if we have found a match
|
|
||||||
}
|
|
||||||
|
|
||||||
if d < len(bits) && reflect.TypeOf(n) == bits[d] {
|
|
||||||
if d == len(bits)-1 {
|
|
||||||
found = n
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
depthWalk(n, 0, fun)
|
|
||||||
return found
|
|
||||||
}
|
|
251
filelist_test.go
251
filelist_test.go
|
@ -1,251 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"reflect"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func Test_newFileList(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
paths []string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
want *fileList
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "nil paths",
|
|
||||||
args: args{paths: nil},
|
|
||||||
want: &fileList{patterns: map[string]struct{}{}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "empty paths",
|
|
||||||
args: args{paths: []string{}},
|
|
||||||
want: &fileList{patterns: map[string]struct{}{}},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "have paths",
|
|
||||||
args: args{paths: []string{"*_test.go"}},
|
|
||||||
want: &fileList{patterns: map[string]struct{}{
|
|
||||||
"*_test.go": struct{}{},
|
|
||||||
}},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
if got := newFileList(tt.args.paths...); !reflect.DeepEqual(got, tt.want) {
|
|
||||||
t.Errorf("%q. newFileList() = %v, want %v", tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_fileList_String(t *testing.T) {
|
|
||||||
type fields struct {
|
|
||||||
patterns []string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
fields fields
|
|
||||||
want string
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "nil patterns",
|
|
||||||
fields: fields{patterns: nil},
|
|
||||||
want: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "empty patterns",
|
|
||||||
fields: fields{patterns: []string{}},
|
|
||||||
want: "",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern",
|
|
||||||
fields: fields{patterns: []string{"foo"}},
|
|
||||||
want: "foo",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "two patterns",
|
|
||||||
fields: fields{patterns: []string{"bar", "foo"}},
|
|
||||||
want: "bar, foo",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
f := newFileList(tt.fields.patterns...)
|
|
||||||
if got := f.String(); got != tt.want {
|
|
||||||
t.Errorf("%q. fileList.String() = %v, want %v", tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_fileList_Set(t *testing.T) {
|
|
||||||
type fields struct {
|
|
||||||
patterns []string
|
|
||||||
}
|
|
||||||
type args struct {
|
|
||||||
path string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
fields fields
|
|
||||||
args args
|
|
||||||
want map[string]struct{}
|
|
||||||
wantErr bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "add empty path",
|
|
||||||
fields: fields{patterns: nil},
|
|
||||||
args: args{path: ""},
|
|
||||||
want: map[string]struct{}{},
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "add path to nil patterns",
|
|
||||||
fields: fields{patterns: nil},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: map[string]struct{}{
|
|
||||||
"foo": struct{}{},
|
|
||||||
},
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "add path to empty patterns",
|
|
||||||
fields: fields{patterns: []string{}},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: map[string]struct{}{
|
|
||||||
"foo": struct{}{},
|
|
||||||
},
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "add path to populated patterns",
|
|
||||||
fields: fields{patterns: []string{"foo"}},
|
|
||||||
args: args{path: "bar"},
|
|
||||||
want: map[string]struct{}{
|
|
||||||
"foo": struct{}{},
|
|
||||||
"bar": struct{}{},
|
|
||||||
},
|
|
||||||
wantErr: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
f := newFileList(tt.fields.patterns...)
|
|
||||||
if err := f.Set(tt.args.path); (err != nil) != tt.wantErr {
|
|
||||||
t.Errorf("%q. fileList.Set() error = %v, wantErr %v", tt.name, err, tt.wantErr)
|
|
||||||
}
|
|
||||||
if !reflect.DeepEqual(f.patterns, tt.want) {
|
|
||||||
t.Errorf("%q. got state fileList.patterns = %v, want state %v", tt.name, f.patterns, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func Test_fileList_Contains(t *testing.T) {
|
|
||||||
type fields struct {
|
|
||||||
patterns []string
|
|
||||||
}
|
|
||||||
type args struct {
|
|
||||||
path string
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
fields fields
|
|
||||||
args args
|
|
||||||
want bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "nil patterns",
|
|
||||||
fields: fields{patterns: nil},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "empty patterns",
|
|
||||||
fields: fields{patterns: nil},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern, no wildcard, no match",
|
|
||||||
fields: fields{patterns: []string{"foo"}},
|
|
||||||
args: args{path: "bar"},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern, no wildcard, match",
|
|
||||||
fields: fields{patterns: []string{"foo"}},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern, wildcard prefix, match",
|
|
||||||
fields: fields{patterns: []string{"*foo"}},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern, wildcard suffix, match",
|
|
||||||
fields: fields{patterns: []string{"foo*"}},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "one pattern, wildcard both ends, match",
|
|
||||||
fields: fields{patterns: []string{"*foo*"}},
|
|
||||||
args: args{path: "foo"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default test match 1",
|
|
||||||
fields: fields{patterns: []string{"*_test.go"}},
|
|
||||||
args: args{path: "foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default test match 2",
|
|
||||||
fields: fields{patterns: []string{"*_test.go"}},
|
|
||||||
args: args{path: "bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default test match 3",
|
|
||||||
fields: fields{patterns: []string{"*_test.go"}},
|
|
||||||
args: args{path: "/bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default test match 4",
|
|
||||||
fields: fields{patterns: []string{"*_test.go"}},
|
|
||||||
args: args{path: "baz/bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "default test match 5",
|
|
||||||
fields: fields{patterns: []string{"*_test.go"}},
|
|
||||||
args: args{path: "/baz/bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "many patterns, no match",
|
|
||||||
fields: fields{patterns: []string{"*_one.go", "*_two.go"}},
|
|
||||||
args: args{path: "/baz/bar/foo_test.go"},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "many patterns, match",
|
|
||||||
fields: fields{patterns: []string{"*_one.go", "*_two.go", "*_test.go"}},
|
|
||||||
args: args{path: "/baz/bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "sub-folder, match",
|
|
||||||
fields: fields{patterns: []string{"vendor"}},
|
|
||||||
args: args{path: "/baz/vendor/bar/foo_test.go"},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
f := newFileList(tt.fields.patterns...)
|
|
||||||
if got := f.Contains(tt.args.path); got != tt.want {
|
|
||||||
t.Errorf("%q. fileList.Contains() = %v, want %v", tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
13
gas_suite_test.go
Normal file
13
gas_suite_test.go
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGas(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Gas Suite")
|
||||||
|
}
|
25
glide.lock
generated
Normal file
25
glide.lock
generated
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
hash: fb4cbcb4f806804f30683cd298d9316522f1d7678498039eccdb29f020de1c7f
|
||||||
|
updated: 2017-05-09T21:54:08.9517391-07:00
|
||||||
|
imports:
|
||||||
|
- name: github.com/kisielk/gotool
|
||||||
|
version: 0de1eaf82fa3f583ce21fde859f1e7e0c5e9b220
|
||||||
|
- name: github.com/nbutton23/zxcvbn-go
|
||||||
|
version: a22cb81b2ecdde8b68e9ffb8824731cbf88e1de4
|
||||||
|
subpackages:
|
||||||
|
- adjacency
|
||||||
|
- data
|
||||||
|
- entropy
|
||||||
|
- frequency
|
||||||
|
- match
|
||||||
|
- matching
|
||||||
|
- scoring
|
||||||
|
- utils/math
|
||||||
|
- name: github.com/ryanuber/go-glob
|
||||||
|
version: 572520ed46dbddaed19ea3d9541bdd0494163693
|
||||||
|
- name: golang.org/x/tools
|
||||||
|
version: 1dbffd0798679c0c6b466e620725135944cfddea
|
||||||
|
subpackages:
|
||||||
|
- go/ast/astutil
|
||||||
|
- go/buildutil
|
||||||
|
- go/loader
|
||||||
|
testImports: []
|
|
@ -12,41 +12,16 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package core
|
package gas
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
"go/types"
|
"go/types"
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// helpfull "canned" matching routines ----------------------------------------
|
|
||||||
|
|
||||||
func selectName(n ast.Node, s reflect.Type) (string, bool) {
|
|
||||||
t := reflect.TypeOf(&ast.SelectorExpr{})
|
|
||||||
if node, ok := SimpleSelect(n, s, t).(*ast.SelectorExpr); ok {
|
|
||||||
t = reflect.TypeOf(&ast.Ident{})
|
|
||||||
if ident, ok := SimpleSelect(node.X, t).(*ast.Ident); ok {
|
|
||||||
return strings.Join([]string{ident.Name, node.Sel.Name}, "."), ok
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return "", false
|
|
||||||
}
|
|
||||||
|
|
||||||
// MatchCall will match an ast.CallNode if its method name obays the given regex.
|
|
||||||
func MatchCall(n ast.Node, r *regexp.Regexp) *ast.CallExpr {
|
|
||||||
t := reflect.TypeOf(&ast.CallExpr{})
|
|
||||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
|
||||||
return n.(*ast.CallExpr)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// MatchCallByPackage ensures that the specified package is imported,
|
// MatchCallByPackage ensures that the specified package is imported,
|
||||||
// adjusts the name for any aliases and ignores cases that are
|
// adjusts the name for any aliases and ignores cases that are
|
||||||
// initialization only imports.
|
// initialization only imports.
|
||||||
|
@ -100,11 +75,13 @@ func MatchCallByType(n ast.Node, ctx *Context, requiredType string, calls ...str
|
||||||
return nil, false
|
return nil, false
|
||||||
}
|
}
|
||||||
|
|
||||||
// MatchCompLit will match an ast.CompositeLit if its string value obays the given regex.
|
// MatchCompLit will match an ast.CompositeLit based on the supplied type
|
||||||
func MatchCompLit(n ast.Node, r *regexp.Regexp) *ast.CompositeLit {
|
func MatchCompLit(n ast.Node, ctx *Context, required string) *ast.CompositeLit {
|
||||||
t := reflect.TypeOf(&ast.CompositeLit{})
|
if complit, ok := n.(*ast.CompositeLit); ok {
|
||||||
if name, ok := selectName(n, t); ok && r.MatchString(name) {
|
typeOf := ctx.Info.TypeOf(complit)
|
||||||
return n.(*ast.CompositeLit)
|
if typeOf.String() == required {
|
||||||
|
return complit
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -117,7 +94,7 @@ func GetInt(n ast.Node) (int64, error) {
|
||||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetInt will read and return a float value from an ast.BasicLit
|
// GetFloat will read and return a float value from an ast.BasicLit
|
||||||
func GetFloat(n ast.Node) (float64, error) {
|
func GetFloat(n ast.Node) (float64, error) {
|
||||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT {
|
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.FLOAT {
|
||||||
return strconv.ParseFloat(node.Value, 64)
|
return strconv.ParseFloat(node.Value, 64)
|
||||||
|
@ -125,7 +102,7 @@ func GetFloat(n ast.Node) (float64, error) {
|
||||||
return 0.0, fmt.Errorf("Unexpected AST node type: %T", n)
|
return 0.0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetInt will read and return a char value from an ast.BasicLit
|
// GetChar will read and return a char value from an ast.BasicLit
|
||||||
func GetChar(n ast.Node) (byte, error) {
|
func GetChar(n ast.Node) (byte, error) {
|
||||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR {
|
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.CHAR {
|
||||||
return node.Value[0], nil
|
return node.Value[0], nil
|
||||||
|
@ -133,7 +110,7 @@ func GetChar(n ast.Node) (byte, error) {
|
||||||
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
return 0, fmt.Errorf("Unexpected AST node type: %T", n)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetInt will read and return a string value from an ast.BasicLit
|
// GetString will read and return a string value from an ast.BasicLit
|
||||||
func GetString(n ast.Node) (string, error) {
|
func GetString(n ast.Node) (string, error) {
|
||||||
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING {
|
if node, ok := n.(*ast.BasicLit); ok && node.Kind == token.STRING {
|
||||||
return strconv.Unquote(node.Value)
|
return strconv.Unquote(node.Value)
|
||||||
|
@ -170,13 +147,11 @@ func GetCallInfo(n ast.Node, ctx *Context) (string, string, error) {
|
||||||
t := ctx.Info.TypeOf(expr)
|
t := ctx.Info.TypeOf(expr)
|
||||||
if t != nil {
|
if t != nil {
|
||||||
return t.String(), fn.Sel.Name, nil
|
return t.String(), fn.Sel.Name, nil
|
||||||
} else {
|
}
|
||||||
return "undefined", fn.Sel.Name, fmt.Errorf("missing type info")
|
return "undefined", fn.Sel.Name, fmt.Errorf("missing type info")
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
return expr.Name, fn.Sel.Name, nil
|
return expr.Name, fn.Sel.Name, nil
|
||||||
}
|
}
|
||||||
}
|
|
||||||
case *ast.Ident:
|
case *ast.Ident:
|
||||||
return ctx.Pkg.Name(), fn.Name, nil
|
return ctx.Pkg.Name(), fn.Name, nil
|
||||||
}
|
}
|
||||||
|
@ -205,7 +180,7 @@ func GetImportedName(path string, ctx *Context) (string, bool) {
|
||||||
// GetImportPath resolves the full import path of an identifer based on
|
// GetImportPath resolves the full import path of an identifer based on
|
||||||
// the imports in the current context.
|
// the imports in the current context.
|
||||||
func GetImportPath(name string, ctx *Context) (string, bool) {
|
func GetImportPath(name string, ctx *Context) (string, bool) {
|
||||||
for path, _ := range ctx.Imports.Imported {
|
for path := range ctx.Imports.Imported {
|
||||||
if imported, ok := GetImportedName(path, ctx); ok && imported == name {
|
if imported, ok := GetImportedName(path, ctx); ok && imported == name {
|
||||||
return path, true
|
return path, true
|
||||||
}
|
}
|
14
helpers_test.go
Normal file
14
helpers_test.go
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
//. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Helpers", func() {
|
||||||
|
Context("todo", func() {
|
||||||
|
It("should fail", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
67
import_tracker.go
Normal file
67
import_tracker.go
Normal file
|
@ -0,0 +1,67 @@
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package gas
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"go/types"
|
||||||
|
"strings"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ImportTracker is used to normalize the packages that have been imported
|
||||||
|
// by a source file. It is able to differentiate between plain imports, aliased
|
||||||
|
// imports and init only imports.
|
||||||
|
type ImportTracker struct {
|
||||||
|
Imported map[string]string
|
||||||
|
Aliased map[string]string
|
||||||
|
InitOnly map[string]bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewImportTracker creates an empty Import tracker instance
|
||||||
|
func NewImportTracker() *ImportTracker {
|
||||||
|
return &ImportTracker{
|
||||||
|
make(map[string]string),
|
||||||
|
make(map[string]string),
|
||||||
|
make(map[string]bool),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TrackPackages tracks all the imports used by the supplied packages
|
||||||
|
func (t *ImportTracker) TrackPackages(pkgs ...*types.Package) {
|
||||||
|
for _, pkg := range pkgs {
|
||||||
|
t.Imported[pkg.Path()] = pkg.Name()
|
||||||
|
// Transient imports
|
||||||
|
//for _, imp := range pkg.Imports() {
|
||||||
|
// t.Imported[imp.Path()] = imp.Name()
|
||||||
|
//}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TrackImport tracks imports and handles the 'unsafe' import
|
||||||
|
func (t *ImportTracker) TrackImport(n ast.Node) {
|
||||||
|
if imported, ok := n.(*ast.ImportSpec); ok {
|
||||||
|
path := strings.Trim(imported.Path.Value, `"`)
|
||||||
|
if imported.Name != nil {
|
||||||
|
if imported.Name.Name == "_" {
|
||||||
|
// Initialization only import
|
||||||
|
t.InitOnly[path] = true
|
||||||
|
} else {
|
||||||
|
// Aliased import
|
||||||
|
t.Aliased[path] = imported.Name.Name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if path == "unsafe" {
|
||||||
|
t.Imported[path] = path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
29
import_tracker_test.go
Normal file
29
import_tracker_test.go
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
//. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("ImportTracker", func() {
|
||||||
|
var (
|
||||||
|
source string
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
source = `// TODO(gm)`
|
||||||
|
})
|
||||||
|
Context("when I have a valid go package", func() {
|
||||||
|
It("should record all import specs", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should correctly track aliased package imports", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should correctly track init only packages", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -11,32 +11,37 @@
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
package core
|
|
||||||
|
package gas
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"os"
|
"os"
|
||||||
|
"strconv"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Score type used by severity and confidence values
|
// Score type used by severity and confidence values
|
||||||
type Score int
|
type Score int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
Low Score = iota // Low value
|
// Low severity or confidence
|
||||||
Medium // Medium value
|
Low Score = iota
|
||||||
High // High value
|
// Medium severity or confidence
|
||||||
|
Medium
|
||||||
|
// High severity or confidence
|
||||||
|
High
|
||||||
)
|
)
|
||||||
|
|
||||||
// An Issue is returnd by a GAS rule if it discovers an issue with the scanned code.
|
// Issue is returnd by a GAS rule if it discovers an issue with the scanned code.
|
||||||
type Issue struct {
|
type Issue struct {
|
||||||
Severity Score `json:"severity"` // issue severity (how problematic it is)
|
Severity Score `json:"severity"` // issue severity (how problematic it is)
|
||||||
Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it)
|
Confidence Score `json:"confidence"` // issue confidence (how sure we are we found it)
|
||||||
What string `json:"details"` // Human readable explanation
|
What string `json:"details"` // Human readable explanation
|
||||||
File string `json:"file"` // File name we found it in
|
File string `json:"file"` // File name we found it in
|
||||||
Code string `json:"code"` // Impacted code line
|
Code string `json:"code"` // Impacted code line
|
||||||
Line int `json:"line"` // Line number in file
|
Line string `json:"line"` // Line number in file
|
||||||
}
|
}
|
||||||
|
|
||||||
// MetaData is embedded in all GAS rules. The Severity, Confidence and What message
|
// MetaData is embedded in all GAS rules. The Severity, Confidence and What message
|
||||||
|
@ -85,7 +90,12 @@ func NewIssue(ctx *Context, node ast.Node, desc string, severity Score, confiden
|
||||||
var code string
|
var code string
|
||||||
fobj := ctx.FileSet.File(node.Pos())
|
fobj := ctx.FileSet.File(node.Pos())
|
||||||
name := fobj.Name()
|
name := fobj.Name()
|
||||||
line := fobj.Line(node.Pos())
|
|
||||||
|
start, end := fobj.Line(node.Pos()), fobj.Line(node.End())
|
||||||
|
line := strconv.Itoa(start)
|
||||||
|
if start != end {
|
||||||
|
line = fmt.Sprintf("%d-%d", start, end)
|
||||||
|
}
|
||||||
|
|
||||||
if file, err := os.Open(fobj.Name()); err == nil {
|
if file, err := os.Open(fobj.Name()); err == nil {
|
||||||
defer file.Close()
|
defer file.Close()
|
99
issue_test.go
Normal file
99
issue_test.go
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/GoASTScanner/gas/rules"
|
||||||
|
"github.com/GoASTScanner/gas/testutils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Issue", func() {
|
||||||
|
|
||||||
|
Context("when creating a new issue", func() {
|
||||||
|
It("should create a code snippet from the specified ast.Node", func() {
|
||||||
|
var target *ast.BasicLit
|
||||||
|
source := `package main
|
||||||
|
const foo = "bar"
|
||||||
|
func main(){
|
||||||
|
println(foo)
|
||||||
|
}
|
||||||
|
`
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", source)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.BasicLit); ok {
|
||||||
|
target = node
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(target).ShouldNot(BeNil())
|
||||||
|
|
||||||
|
issue := gas.NewIssue(ctx, target, "", gas.High, gas.High)
|
||||||
|
Expect(issue).ShouldNot(BeNil())
|
||||||
|
Expect(issue.Code).Should(MatchRegexp(`"bar"`))
|
||||||
|
Expect(issue.Line).Should(Equal("2"))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should return an error if specific context is not able to be obtained", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should provide accurate line and file information", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should provide accurate line and file information for multi-line statements", func() {
|
||||||
|
var target *ast.BinaryExpr
|
||||||
|
|
||||||
|
source := `package main
|
||||||
|
import "os"
|
||||||
|
func main(){`
|
||||||
|
source += "q := `SELECT * FROM table WHERE` + \n os.Args[1] + `= ?` // nolint: gas\n"
|
||||||
|
source += `println(q)}`
|
||||||
|
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", source)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.BinaryExpr); ok {
|
||||||
|
target = node
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(target).ShouldNot(BeNil())
|
||||||
|
|
||||||
|
// Use SQL rule to check binary expr
|
||||||
|
cfg := gas.NewConfig()
|
||||||
|
rule, _ := rules.NewSQLStrConcat(cfg)
|
||||||
|
issue, err := rule.Match(target, ctx)
|
||||||
|
Expect(err).ShouldNot(HaveOccurred())
|
||||||
|
Expect(issue).ShouldNot(BeNil())
|
||||||
|
Expect(issue.File).Should(MatchRegexp("foo.go"))
|
||||||
|
Expect(issue.Line).Should(MatchRegexp("3-4"))
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should maintain the provided severity score", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should maintain the provided confidence score", func() {
|
||||||
|
Skip("Not implemented")
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
293
main.go
293
main.go
|
@ -1,293 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"flag"
|
|
||||||
"fmt"
|
|
||||||
"io/ioutil"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path/filepath"
|
|
||||||
"sort"
|
|
||||||
"strings"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"github.com/GoASTScanner/gas/output"
|
|
||||||
)
|
|
||||||
|
|
||||||
type recursion bool
|
|
||||||
|
|
||||||
const (
|
|
||||||
recurse recursion = true
|
|
||||||
noRecurse recursion = false
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
// #nosec flag
|
|
||||||
flagIgnoreNoSec = flag.Bool("nosec", false, "Ignores #nosec comments when set")
|
|
||||||
|
|
||||||
// format output
|
|
||||||
flagFormat = flag.String("fmt", "text", "Set output format. Valid options are: json, csv, html, or text")
|
|
||||||
|
|
||||||
// output file
|
|
||||||
flagOutput = flag.String("out", "", "Set output file for results")
|
|
||||||
|
|
||||||
// config file
|
|
||||||
flagConfig = flag.String("conf", "", "Path to optional config file")
|
|
||||||
|
|
||||||
// quiet
|
|
||||||
flagQuiet = flag.Bool("quiet", false, "Only show output when errors are found")
|
|
||||||
|
|
||||||
usageText = `
|
|
||||||
GAS - Go AST Scanner
|
|
||||||
|
|
||||||
Gas analyzes Go source code to look for common programming mistakes that
|
|
||||||
can lead to security problems.
|
|
||||||
|
|
||||||
USAGE:
|
|
||||||
|
|
||||||
# Check a single Go file
|
|
||||||
$ gas example.go
|
|
||||||
|
|
||||||
# Check all files under the current directory and save results in
|
|
||||||
# json format.
|
|
||||||
$ gas -fmt=json -out=results.json ./...
|
|
||||||
|
|
||||||
# Run a specific set of rules (by default all rules will be run):
|
|
||||||
$ gas -include=G101,G203,G401 ./...
|
|
||||||
|
|
||||||
# Run all rules except the provided
|
|
||||||
$ gas -exclude=G101 ./...
|
|
||||||
|
|
||||||
`
|
|
||||||
|
|
||||||
logger *log.Logger
|
|
||||||
)
|
|
||||||
|
|
||||||
func extendConfList(conf map[string]interface{}, name string, inputStr string) {
|
|
||||||
if inputStr == "" {
|
|
||||||
conf[name] = []string{}
|
|
||||||
} else {
|
|
||||||
input := strings.Split(inputStr, ",")
|
|
||||||
if val, ok := conf[name]; ok {
|
|
||||||
if data, ok := val.(*[]string); ok {
|
|
||||||
conf[name] = append(*data, input...)
|
|
||||||
} else {
|
|
||||||
logger.Fatal("Config item must be a string list: ", name)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
conf[name] = input
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildConfig(incRules string, excRules string) map[string]interface{} {
|
|
||||||
config := make(map[string]interface{})
|
|
||||||
if flagConfig != nil && *flagConfig != "" { // parse config if we have one
|
|
||||||
if data, err := ioutil.ReadFile(*flagConfig); err == nil {
|
|
||||||
if err := json.Unmarshal(data, &(config)); err != nil {
|
|
||||||
logger.Fatal("Could not parse JSON config: ", *flagConfig, ": ", err)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
logger.Fatal("Could not read config file: ", *flagConfig)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// add in CLI include and exclude data
|
|
||||||
extendConfList(config, "include", incRules)
|
|
||||||
extendConfList(config, "exclude", excRules)
|
|
||||||
|
|
||||||
// override ignoreNosec if given on CLI
|
|
||||||
if flagIgnoreNoSec != nil {
|
|
||||||
config["ignoreNosec"] = *flagIgnoreNoSec
|
|
||||||
} else {
|
|
||||||
val, ok := config["ignoreNosec"]
|
|
||||||
if !ok {
|
|
||||||
config["ignoreNosec"] = false
|
|
||||||
} else if _, ok := val.(bool); !ok {
|
|
||||||
logger.Fatal("Config value must be a bool: 'ignoreNosec'")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return config
|
|
||||||
}
|
|
||||||
|
|
||||||
// #nosec
|
|
||||||
func usage() {
|
|
||||||
|
|
||||||
fmt.Fprintln(os.Stderr, usageText)
|
|
||||||
fmt.Fprint(os.Stderr, "OPTIONS:\n\n")
|
|
||||||
flag.PrintDefaults()
|
|
||||||
fmt.Fprint(os.Stderr, "\n\nRULES:\n\n")
|
|
||||||
|
|
||||||
// sorted rule list for eas of reading
|
|
||||||
rl := GetFullRuleList()
|
|
||||||
keys := make([]string, 0, len(rl))
|
|
||||||
for key := range rl {
|
|
||||||
keys = append(keys, key)
|
|
||||||
}
|
|
||||||
sort.Strings(keys)
|
|
||||||
for _, k := range keys {
|
|
||||||
v := rl[k]
|
|
||||||
fmt.Fprintf(os.Stderr, "\t%s: %s\n", k, v.description)
|
|
||||||
}
|
|
||||||
fmt.Fprint(os.Stderr, "\n")
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
|
|
||||||
// Setup usage description
|
|
||||||
flag.Usage = usage
|
|
||||||
|
|
||||||
// Exclude files
|
|
||||||
excluded := newFileList("*_test.go")
|
|
||||||
flag.Var(excluded, "skip", "File pattern to exclude from scan. Uses simple * globs and requires full or partial match")
|
|
||||||
|
|
||||||
incRules := ""
|
|
||||||
flag.StringVar(&incRules, "include", "", "Comma separated list of rules IDs to include. (see rule list)")
|
|
||||||
|
|
||||||
excRules := ""
|
|
||||||
flag.StringVar(&excRules, "exclude", "", "Comma separated list of rules IDs to exclude. (see rule list)")
|
|
||||||
|
|
||||||
// Custom commands / utilities to run instead of default analyzer
|
|
||||||
tools := newUtils()
|
|
||||||
flag.Var(tools, "tool", "GAS utilities to assist with rule development")
|
|
||||||
|
|
||||||
// Setup logging
|
|
||||||
logger = log.New(os.Stderr, "[gas] ", log.LstdFlags)
|
|
||||||
|
|
||||||
// Parse command line arguments
|
|
||||||
flag.Parse()
|
|
||||||
|
|
||||||
// Ensure at least one file was specified
|
|
||||||
if flag.NArg() == 0 {
|
|
||||||
|
|
||||||
fmt.Fprintf(os.Stderr, "\nError: FILE [FILE...] or './...' expected\n")
|
|
||||||
flag.Usage()
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run utils instead of analysis
|
|
||||||
if len(tools.call) > 0 {
|
|
||||||
tools.run(flag.Args()...)
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Setup analyzer
|
|
||||||
config := buildConfig(incRules, excRules)
|
|
||||||
analyzer := gas.NewAnalyzer(config, logger)
|
|
||||||
AddRules(&analyzer, config)
|
|
||||||
|
|
||||||
toAnalyze := getFilesToAnalyze(flag.Args(), excluded)
|
|
||||||
|
|
||||||
for _, file := range toAnalyze {
|
|
||||||
logger.Printf(`Processing "%s"...`, file)
|
|
||||||
if err := analyzer.Process(file); err != nil {
|
|
||||||
logger.Printf(`Failed to process: "%s"`, file)
|
|
||||||
logger.Println(err)
|
|
||||||
logger.Fatalf(`Halting execution.`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
issuesFound := len(analyzer.Issues) > 0
|
|
||||||
// Exit quietly if nothing was found
|
|
||||||
if !issuesFound && *flagQuiet {
|
|
||||||
os.Exit(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create output report
|
|
||||||
if *flagOutput != "" {
|
|
||||||
outfile, err := os.Create(*flagOutput)
|
|
||||||
if err != nil {
|
|
||||||
logger.Fatalf("Couldn't open: %s for writing. Reason - %s", *flagOutput, err)
|
|
||||||
}
|
|
||||||
defer outfile.Close()
|
|
||||||
output.CreateReport(outfile, *flagFormat, &analyzer)
|
|
||||||
} else {
|
|
||||||
output.CreateReport(os.Stdout, *flagFormat, &analyzer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Do we have an issue? If so exit 1
|
|
||||||
if issuesFound {
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getFilesToAnalyze lists all files
|
|
||||||
func getFilesToAnalyze(paths []string, excluded *fileList) []string {
|
|
||||||
//log.Println("getFilesToAnalyze: start")
|
|
||||||
var toAnalyze []string
|
|
||||||
for _, relativePath := range paths {
|
|
||||||
//log.Printf("getFilesToAnalyze: processing \"%s\"\n", path)
|
|
||||||
// get the absolute path before doing anything else
|
|
||||||
path, err := filepath.Abs(relativePath)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
if filepath.Base(relativePath) == "..." {
|
|
||||||
toAnalyze = append(
|
|
||||||
toAnalyze,
|
|
||||||
listFiles(filepath.Dir(path), recurse, excluded)...,
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
var (
|
|
||||||
finfo os.FileInfo
|
|
||||||
err error
|
|
||||||
)
|
|
||||||
if finfo, err = os.Stat(path); err != nil {
|
|
||||||
logger.Fatal(err)
|
|
||||||
}
|
|
||||||
if !finfo.IsDir() {
|
|
||||||
if shouldInclude(path, excluded) {
|
|
||||||
toAnalyze = append(toAnalyze, path)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
toAnalyze = listFiles(path, noRecurse, excluded)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//log.Println("getFilesToAnalyze: end")
|
|
||||||
return toAnalyze
|
|
||||||
}
|
|
||||||
|
|
||||||
// listFiles returns a list of all files found that pass the shouldInclude check.
|
|
||||||
// If doRecursiveWalk it true, it will walk the tree rooted at absPath, otherwise it
|
|
||||||
// will only include files directly within the dir referenced by absPath.
|
|
||||||
func listFiles(absPath string, doRecursiveWalk recursion, excluded *fileList) []string {
|
|
||||||
var files []string
|
|
||||||
|
|
||||||
walk := func(path string, info os.FileInfo, err error) error {
|
|
||||||
if info.IsDir() && doRecursiveWalk == noRecurse {
|
|
||||||
return filepath.SkipDir
|
|
||||||
}
|
|
||||||
if shouldInclude(path, excluded) {
|
|
||||||
files = append(files, path)
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := filepath.Walk(absPath, walk); err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
return files
|
|
||||||
}
|
|
||||||
|
|
||||||
// shouldInclude checks if a specific path which is expected to reference
|
|
||||||
// a regular file should be included
|
|
||||||
func shouldInclude(path string, excluded *fileList) bool {
|
|
||||||
return filepath.Ext(path) == ".go" && !excluded.Contains(path)
|
|
||||||
}
|
|
45
main_test.go
45
main_test.go
|
@ -1,45 +0,0 @@
|
||||||
package main
|
|
||||||
|
|
||||||
import "testing"
|
|
||||||
|
|
||||||
func Test_shouldInclude(t *testing.T) {
|
|
||||||
type args struct {
|
|
||||||
path string
|
|
||||||
excluded *fileList
|
|
||||||
}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
args args
|
|
||||||
want bool
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "non .go file",
|
|
||||||
args: args{
|
|
||||||
path: "thing.txt",
|
|
||||||
excluded: newFileList(),
|
|
||||||
},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: ".go file, not excluded",
|
|
||||||
args: args{
|
|
||||||
path: "thing.go",
|
|
||||||
excluded: newFileList(),
|
|
||||||
},
|
|
||||||
want: true,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: ".go file, excluded",
|
|
||||||
args: args{
|
|
||||||
path: "thing.go",
|
|
||||||
excluded: newFileList("thing.go"),
|
|
||||||
},
|
|
||||||
want: false,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
for _, tt := range tests {
|
|
||||||
if got := shouldInclude(tt.args.path, tt.args.excluded); got != tt.want {
|
|
||||||
t.Errorf("%q. shouldInclude() = %v, want %v", tt.name, got, tt.want)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -19,18 +19,22 @@ import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
htmlTemplate "html/template"
|
htmlTemplate "html/template"
|
||||||
"io"
|
"io"
|
||||||
"strconv"
|
|
||||||
plainTemplate "text/template"
|
plainTemplate "text/template"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
// The output format for reported issues
|
// ReportFormat enumrates the output format for reported issues
|
||||||
type ReportFormat int
|
type ReportFormat int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
// ReportText is the default format that writes to stdout
|
||||||
ReportText ReportFormat = iota // Plain text format
|
ReportText ReportFormat = iota // Plain text format
|
||||||
|
|
||||||
|
// ReportJSON set the output format to json
|
||||||
ReportJSON // Json format
|
ReportJSON // Json format
|
||||||
|
|
||||||
|
// ReportCSV set the output format to csv
|
||||||
ReportCSV // CSV format
|
ReportCSV // CSV format
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -48,7 +52,18 @@ Summary:
|
||||||
|
|
||||||
`
|
`
|
||||||
|
|
||||||
func CreateReport(w io.Writer, format string, data *gas.Analyzer) error {
|
type reportInfo struct {
|
||||||
|
Issues []*gas.Issue
|
||||||
|
Stats *gas.Metrics
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateReport generates a report based for the supplied issues and metrics given
|
||||||
|
// the specified format. The formats currently accepted are: json, csv, html and text.
|
||||||
|
func CreateReport(w io.Writer, format string, issues []*gas.Issue, metrics *gas.Metrics) error {
|
||||||
|
data := &reportInfo{
|
||||||
|
Issues: issues,
|
||||||
|
Stats: metrics,
|
||||||
|
}
|
||||||
var err error
|
var err error
|
||||||
switch format {
|
switch format {
|
||||||
case "json":
|
case "json":
|
||||||
|
@ -65,7 +80,7 @@ func CreateReport(w io.Writer, format string, data *gas.Analyzer) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func reportJSON(w io.Writer, data *gas.Analyzer) error {
|
func reportJSON(w io.Writer, data *reportInfo) error {
|
||||||
raw, err := json.MarshalIndent(data, "", "\t")
|
raw, err := json.MarshalIndent(data, "", "\t")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -78,13 +93,13 @@ func reportJSON(w io.Writer, data *gas.Analyzer) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
func reportCSV(w io.Writer, data *gas.Analyzer) error {
|
func reportCSV(w io.Writer, data *reportInfo) error {
|
||||||
out := csv.NewWriter(w)
|
out := csv.NewWriter(w)
|
||||||
defer out.Flush()
|
defer out.Flush()
|
||||||
for _, issue := range data.Issues {
|
for _, issue := range data.Issues {
|
||||||
err := out.Write([]string{
|
err := out.Write([]string{
|
||||||
issue.File,
|
issue.File,
|
||||||
strconv.Itoa(issue.Line),
|
issue.Line,
|
||||||
issue.What,
|
issue.What,
|
||||||
issue.Severity.String(),
|
issue.Severity.String(),
|
||||||
issue.Confidence.String(),
|
issue.Confidence.String(),
|
||||||
|
@ -97,7 +112,7 @@ func reportCSV(w io.Writer, data *gas.Analyzer) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *reportInfo) error {
|
||||||
t, e := plainTemplate.New("gas").Parse(reportTemplate)
|
t, e := plainTemplate.New("gas").Parse(reportTemplate)
|
||||||
if e != nil {
|
if e != nil {
|
||||||
return e
|
return e
|
||||||
|
@ -106,7 +121,7 @@ func reportFromPlaintextTemplate(w io.Writer, reportTemplate string, data *gas.A
|
||||||
return t.Execute(w, data)
|
return t.Execute(w, data)
|
||||||
}
|
}
|
||||||
|
|
||||||
func reportFromHTMLTemplate(w io.Writer, reportTemplate string, data *gas.Analyzer) error {
|
func reportFromHTMLTemplate(w io.Writer, reportTemplate string, data *reportInfo) error {
|
||||||
t, e := htmlTemplate.New("gas").Parse(reportTemplate)
|
t, e := htmlTemplate.New("gas").Parse(reportTemplate)
|
||||||
if e != nil {
|
if e != nil {
|
||||||
return e
|
return e
|
||||||
|
|
|
@ -12,11 +12,12 @@
|
||||||
// See the License for the specific language governing permissions and
|
// See the License for the specific language governing permissions and
|
||||||
// limitations under the License.
|
// limitations under the License.
|
||||||
|
|
||||||
package core
|
package gas
|
||||||
|
|
||||||
import "go/ast"
|
import "go/ast"
|
||||||
|
|
||||||
func resolveIdent(n *ast.Ident, c *Context) bool {
|
func resolveIdent(n *ast.Ident, c *Context) bool {
|
||||||
|
|
||||||
if n.Obj == nil || n.Obj.Kind != ast.Var {
|
if n.Obj == nil || n.Obj.Kind != ast.Var {
|
||||||
return true
|
return true
|
||||||
}
|
}
|
99
resolve_test.go
Normal file
99
resolve_test.go
Normal file
|
@ -0,0 +1,99 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/GoASTScanner/gas/testutils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("Resolve ast node to concrete value", func() {
|
||||||
|
Context("when attempting to resolve an ast node", func() {
|
||||||
|
It("should successfully resolve basic literal", func() {
|
||||||
|
var basicLiteral *ast.BasicLit
|
||||||
|
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", `package main; const foo = "bar"; func main(){}`)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.BasicLit); ok {
|
||||||
|
basicLiteral = node
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(basicLiteral).ShouldNot(BeNil())
|
||||||
|
Expect(gas.TryResolve(basicLiteral, ctx)).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should successfully resolve identifier", func() {
|
||||||
|
var ident *ast.Ident
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", `package main; var foo string = "bar"; func main(){}`)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.Ident); ok {
|
||||||
|
ident = node
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(ident).ShouldNot(BeNil())
|
||||||
|
Expect(gas.TryResolve(ident, ctx)).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should successfully resolve assign statement", func() {
|
||||||
|
var assign *ast.AssignStmt
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", `package main; const x = "bar"; func main(){ y := x; println(y) }`)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.AssignStmt); ok {
|
||||||
|
if id, ok := node.Lhs[0].(*ast.Ident); ok && id.Name == "y" {
|
||||||
|
assign = node
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(assign).ShouldNot(BeNil())
|
||||||
|
Expect(gas.TryResolve(assign, ctx)).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should successfully resolve a binary statement", func() {
|
||||||
|
var target *ast.BinaryExpr
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile("foo.go", `package main; const (x = "bar"; y = "baz"); func main(){ z := x + y; println(z) }`)
|
||||||
|
ctx := pkg.CreateContext("foo.go")
|
||||||
|
v := testutils.NewMockVisitor()
|
||||||
|
v.Callback = func(n ast.Node, ctx *gas.Context) bool {
|
||||||
|
if node, ok := n.(*ast.BinaryExpr); ok {
|
||||||
|
target = node
|
||||||
|
}
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
v.Context = ctx
|
||||||
|
ast.Walk(v, ctx.Root)
|
||||||
|
Expect(target).ShouldNot(BeNil())
|
||||||
|
Expect(gas.TryResolve(target, ctx)).Should(BeTrue())
|
||||||
|
})
|
||||||
|
|
||||||
|
// TODO: It should resolve call expressions
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
58
rule.go
Normal file
58
rule.go
Normal file
|
@ -0,0 +1,58 @@
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package gas
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
"reflect"
|
||||||
|
)
|
||||||
|
|
||||||
|
// The Rule interface used by all rules supported by GAS.
|
||||||
|
type Rule interface {
|
||||||
|
Match(ast.Node, *Context) (*Issue, error)
|
||||||
|
}
|
||||||
|
|
||||||
|
// RuleBuilder is used to register a rule definition with the analyzer
|
||||||
|
type RuleBuilder func(c Config) (Rule, []ast.Node)
|
||||||
|
|
||||||
|
// A RuleSet maps lists of rules to the type of AST node they should be run on.
|
||||||
|
// The anaylzer will only invoke rules contained in the list associated with the
|
||||||
|
// type of AST node it is currently visiting.
|
||||||
|
type RuleSet map[reflect.Type][]Rule
|
||||||
|
|
||||||
|
// NewRuleSet constructs a new RuleSet
|
||||||
|
func NewRuleSet() RuleSet {
|
||||||
|
return make(RuleSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register adds a trigger for the supplied rule for the the
|
||||||
|
// specified ast nodes.
|
||||||
|
func (r RuleSet) Register(rule Rule, nodes ...ast.Node) {
|
||||||
|
for _, n := range nodes {
|
||||||
|
t := reflect.TypeOf(n)
|
||||||
|
if rules, ok := r[t]; ok {
|
||||||
|
r[t] = append(rules, rule)
|
||||||
|
} else {
|
||||||
|
r[t] = []Rule{rule}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// RegisteredFor will return all rules that are registered for a
|
||||||
|
// specified ast node.
|
||||||
|
func (r RuleSet) RegisteredFor(n ast.Node) []Rule {
|
||||||
|
if rules, found := r[reflect.TypeOf(n)]; found {
|
||||||
|
return rules
|
||||||
|
}
|
||||||
|
return []Rule{}
|
||||||
|
}
|
85
rule_test.go
Normal file
85
rule_test.go
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
package gas_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
type mockrule struct {
|
||||||
|
issue *gas.Issue
|
||||||
|
err error
|
||||||
|
callback func(n ast.Node, ctx *gas.Context) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func (m *mockrule) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
|
if m.callback(n, ctx) {
|
||||||
|
return m.issue, nil
|
||||||
|
}
|
||||||
|
return nil, m.err
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ = Describe("Rule", func() {
|
||||||
|
|
||||||
|
Context("when using a ruleset", func() {
|
||||||
|
|
||||||
|
var (
|
||||||
|
ruleset gas.RuleSet
|
||||||
|
dummyErrorRule gas.Rule
|
||||||
|
dummyIssueRule gas.Rule
|
||||||
|
)
|
||||||
|
|
||||||
|
JustBeforeEach(func() {
|
||||||
|
ruleset = gas.NewRuleSet()
|
||||||
|
dummyErrorRule = &mockrule{
|
||||||
|
issue: nil,
|
||||||
|
err: fmt.Errorf("An unexpected error occurred"),
|
||||||
|
callback: func(n ast.Node, ctx *gas.Context) bool { return false },
|
||||||
|
}
|
||||||
|
dummyIssueRule = &mockrule{
|
||||||
|
issue: &gas.Issue{
|
||||||
|
Severity: gas.High,
|
||||||
|
Confidence: gas.High,
|
||||||
|
What: `Some explanation of the thing`,
|
||||||
|
File: "main.go",
|
||||||
|
Code: `#include <stdio.h> int main(){ puts("hello world"); }`,
|
||||||
|
Line: "42",
|
||||||
|
},
|
||||||
|
err: nil,
|
||||||
|
callback: func(n ast.Node, ctx *gas.Context) bool { return true },
|
||||||
|
}
|
||||||
|
})
|
||||||
|
It("should be possible to register a rule for multiple ast.Node", func() {
|
||||||
|
registeredNodeA := (*ast.CallExpr)(nil)
|
||||||
|
registeredNodeB := (*ast.AssignStmt)(nil)
|
||||||
|
unregisteredNode := (*ast.BinaryExpr)(nil)
|
||||||
|
|
||||||
|
ruleset.Register(dummyIssueRule, registeredNodeA, registeredNodeB)
|
||||||
|
Expect(ruleset.RegisteredFor(unregisteredNode)).Should(BeEmpty())
|
||||||
|
Expect(ruleset.RegisteredFor(registeredNodeA)).Should(ContainElement(dummyIssueRule))
|
||||||
|
Expect(ruleset.RegisteredFor(registeredNodeB)).Should(ContainElement(dummyIssueRule))
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should not register a rule when no ast.Nodes are specified", func() {
|
||||||
|
ruleset.Register(dummyErrorRule)
|
||||||
|
Expect(ruleset).Should(BeEmpty())
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should be possible to retrieve a list of rules for a given node type", func() {
|
||||||
|
registeredNode := (*ast.CallExpr)(nil)
|
||||||
|
unregisteredNode := (*ast.AssignStmt)(nil)
|
||||||
|
ruleset.Register(dummyErrorRule, registeredNode)
|
||||||
|
ruleset.Register(dummyIssueRule, registeredNode)
|
||||||
|
Expect(ruleset.RegisteredFor(unregisteredNode)).Should(BeEmpty())
|
||||||
|
Expect(ruleset.RegisteredFor(registeredNode)).Should(HaveLen(2))
|
||||||
|
Expect(ruleset.RegisteredFor(registeredNode)).Should(ContainElement(dummyErrorRule))
|
||||||
|
Expect(ruleset.RegisteredFor(registeredNode)).Should(ContainElement(dummyIssueRule))
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
91
rulelist.go
91
rulelist.go
|
@ -1,91 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"go/ast"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"github.com/GoASTScanner/gas/rules"
|
|
||||||
)
|
|
||||||
|
|
||||||
type RuleInfo struct {
|
|
||||||
description string
|
|
||||||
build func(map[string]interface{}) (gas.Rule, []ast.Node)
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetFullRuleList get the full list of all rules available to GAS
|
|
||||||
func GetFullRuleList() map[string]RuleInfo {
|
|
||||||
return map[string]RuleInfo{
|
|
||||||
// misc
|
|
||||||
"G101": RuleInfo{"Look for hardcoded credentials", rules.NewHardcodedCredentials},
|
|
||||||
"G102": RuleInfo{"Bind to all interfaces", rules.NewBindsToAllNetworkInterfaces},
|
|
||||||
"G103": RuleInfo{"Audit the use of unsafe block", rules.NewUsingUnsafe},
|
|
||||||
"G104": RuleInfo{"Audit errors not checked", rules.NewNoErrorCheck},
|
|
||||||
"G105": RuleInfo{"Audit the use of big.Exp function", rules.NewUsingBigExp},
|
|
||||||
|
|
||||||
// injection
|
|
||||||
"G201": RuleInfo{"SQL query construction using format string", rules.NewSqlStrFormat},
|
|
||||||
"G202": RuleInfo{"SQL query construction using string concatenation", rules.NewSqlStrConcat},
|
|
||||||
"G203": RuleInfo{"Use of unescaped data in HTML templates", rules.NewTemplateCheck},
|
|
||||||
"G204": RuleInfo{"Audit use of command execution", rules.NewSubproc},
|
|
||||||
|
|
||||||
// filesystem
|
|
||||||
"G301": RuleInfo{"Poor file permissions used when creating a directory", rules.NewMkdirPerms},
|
|
||||||
"G302": RuleInfo{"Poor file permisions used when creation file or using chmod", rules.NewFilePerms},
|
|
||||||
"G303": RuleInfo{"Creating tempfile using a predictable path", rules.NewBadTempFile},
|
|
||||||
|
|
||||||
// crypto
|
|
||||||
"G401": RuleInfo{"Detect the usage of DES, RC4, or MD5", rules.NewUsesWeakCryptography},
|
|
||||||
"G402": RuleInfo{"Look for bad TLS connection settings", rules.NewIntermediateTlsCheck},
|
|
||||||
"G403": RuleInfo{"Ensure minimum RSA key length of 2048 bits", rules.NewWeakKeyStrength},
|
|
||||||
"G404": RuleInfo{"Insecure random number source (rand)", rules.NewWeakRandCheck},
|
|
||||||
|
|
||||||
// blacklist
|
|
||||||
"G501": RuleInfo{"Import blacklist: crypto/md5", rules.NewBlacklist_crypto_md5},
|
|
||||||
"G502": RuleInfo{"Import blacklist: crypto/des", rules.NewBlacklist_crypto_des},
|
|
||||||
"G503": RuleInfo{"Import blacklist: crypto/rc4", rules.NewBlacklist_crypto_rc4},
|
|
||||||
"G504": RuleInfo{"Import blacklist: net/http/cgi", rules.NewBlacklist_net_http_cgi},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func AddRules(analyzer *gas.Analyzer, conf map[string]interface{}) {
|
|
||||||
var all map[string]RuleInfo
|
|
||||||
|
|
||||||
inc := conf["include"].([]string)
|
|
||||||
exc := conf["exclude"].([]string)
|
|
||||||
|
|
||||||
// add included rules
|
|
||||||
if len(inc) == 0 {
|
|
||||||
all = GetFullRuleList()
|
|
||||||
} else {
|
|
||||||
all = map[string]RuleInfo{}
|
|
||||||
tmp := GetFullRuleList()
|
|
||||||
for _, v := range inc {
|
|
||||||
if val, ok := tmp[v]; ok {
|
|
||||||
all[v] = val
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// remove excluded rules
|
|
||||||
for _, v := range exc {
|
|
||||||
delete(all, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, v := range all {
|
|
||||||
analyzer.AddRule(v.build(conf))
|
|
||||||
}
|
|
||||||
}
|
|
13
rules/big.go
13
rules/big.go
|
@ -15,24 +15,27 @@
|
||||||
package rules
|
package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UsingBigExp struct {
|
type usingBigExp struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
pkg string
|
pkg string
|
||||||
calls []string
|
calls []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *UsingBigExp) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (r *usingBigExp) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||||
if _, matched := gas.MatchCallByType(n, c, r.pkg, r.calls...); matched {
|
if _, matched := gas.MatchCallByType(n, c, r.pkg, r.calls...); matched {
|
||||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||||
}
|
}
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
func NewUsingBigExp(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
|
||||||
return &UsingBigExp{
|
// NewUsingBigExp detects issues with modulus == 0 for Bignum
|
||||||
|
func NewUsingBigExp(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
return &usingBigExp{
|
||||||
pkg: "*math/big.Int",
|
pkg: "*math/big.Int",
|
||||||
calls: []string{"Exp"},
|
calls: []string{"Exp"},
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestBigExp(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewUsingBigExp(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"math/big"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
z := new(big.Int)
|
|
||||||
x := new(big.Int)
|
|
||||||
x = x.SetUint64(2)
|
|
||||||
y := new(big.Int)
|
|
||||||
y = y.SetUint64(4)
|
|
||||||
m := new(big.Int)
|
|
||||||
m = m.SetUint64(0)
|
|
||||||
|
|
||||||
z = z.Exp(x, y, m)
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Use of math/big.Int.Exp function")
|
|
||||||
}
|
|
|
@ -18,30 +18,37 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Looks for net.Listen("0.0.0.0") or net.Listen(":8080")
|
// Looks for net.Listen("0.0.0.0") or net.Listen(":8080")
|
||||||
type BindsToAllNetworkInterfaces struct {
|
type bindsToAllNetworkInterfaces struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
call *regexp.Regexp
|
calls gas.CallList
|
||||||
pattern *regexp.Regexp
|
pattern *regexp.Regexp
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *BindsToAllNetworkInterfaces) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (r *bindsToAllNetworkInterfaces) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node := gas.MatchCall(n, r.call); node != nil {
|
callExpr := r.calls.ContainsCallExpr(n, c)
|
||||||
if arg, err := gas.GetString(node.Args[1]); err == nil {
|
if callExpr == nil {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
if arg, err := gas.GetString(callExpr.Args[1]); err == nil {
|
||||||
if r.pattern.MatchString(arg) {
|
if r.pattern.MatchString(arg) {
|
||||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
return nil, nil
|
||||||
return
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBindsToAllNetworkInterfaces(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBindsToAllNetworkInterfaces detects socket connections that are setup to
|
||||||
return &BindsToAllNetworkInterfaces{
|
// listen on all network interfaces.
|
||||||
call: regexp.MustCompile(`^(net|tls)\.Listen$`),
|
func NewBindsToAllNetworkInterfaces(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
calls := gas.NewCallList()
|
||||||
|
calls.Add("net", "Listen")
|
||||||
|
calls.Add("tls", "Listen")
|
||||||
|
return &bindsToAllNetworkInterfaces{
|
||||||
|
calls: calls,
|
||||||
pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`),
|
pattern: regexp.MustCompile(`^(0.0.0.0|:).*$`),
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
|
|
@ -1,65 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestBind0000(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBindsToAllNetworkInterfaces(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"net"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
l, err := net.Listen("tcp", "0.0.0.0:2000")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
defer l.Close()
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Binds to all network interfaces")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestBindEmptyHost(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBindsToAllNetworkInterfaces(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"net"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
l, err := net.Listen("tcp", ":2000")
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
defer l.Close()
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Binds to all network interfaces")
|
|
||||||
}
|
|
|
@ -16,64 +16,67 @@ package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
"strings"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type BlacklistImport struct {
|
type blacklistedImport struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
Path string
|
Blacklisted map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *BlacklistImport) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func unquote(original string) string {
|
||||||
|
copy := strings.TrimSpace(original)
|
||||||
|
copy = strings.TrimLeft(copy, `"`)
|
||||||
|
return strings.TrimRight(copy, `"`)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *blacklistedImport) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node, ok := n.(*ast.ImportSpec); ok {
|
if node, ok := n.(*ast.ImportSpec); ok {
|
||||||
if r.Path == node.Path.Value && node.Name.String() != "_" {
|
if description, ok := r.Blacklisted[unquote(node.Path.Value)]; ok {
|
||||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
return gas.NewIssue(c, node, description, r.Severity, r.Confidence), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlacklist_crypto_md5(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBlacklistedImports reports when a blacklisted import is being used.
|
||||||
return &BlacklistImport{
|
// Typically when a deprecated technology is being used.
|
||||||
|
func NewBlacklistedImports(conf gas.Config, blacklist map[string]string) (gas.Rule, []ast.Node) {
|
||||||
|
return &blacklistedImport{
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.High,
|
Severity: gas.Medium,
|
||||||
Confidence: gas.High,
|
Confidence: gas.High,
|
||||||
What: "Use of weak cryptographic primitive",
|
|
||||||
},
|
},
|
||||||
Path: `"crypto/md5"`,
|
Blacklisted: blacklist,
|
||||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlacklist_crypto_des(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBlacklistedImportMD5 fails if MD5 is imported
|
||||||
return &BlacklistImport{
|
func NewBlacklistedImportMD5(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
MetaData: gas.MetaData{
|
return NewBlacklistedImports(conf, map[string]string{
|
||||||
Severity: gas.High,
|
"crypto/md5": "Blacklisted import crypto/md5: weak cryptographic primitive",
|
||||||
Confidence: gas.High,
|
})
|
||||||
What: "Use of weak cryptographic primitive",
|
|
||||||
},
|
|
||||||
Path: `"crypto/des"`,
|
|
||||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlacklist_crypto_rc4(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBlacklistedImportDES fails if DES is imported
|
||||||
return &BlacklistImport{
|
func NewBlacklistedImportDES(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
MetaData: gas.MetaData{
|
return NewBlacklistedImports(conf, map[string]string{
|
||||||
Severity: gas.High,
|
"crypto/des": "Blacklisted import crypto/des: weak cryptographic primitive",
|
||||||
Confidence: gas.High,
|
})
|
||||||
What: "Use of weak cryptographic primitive",
|
|
||||||
},
|
|
||||||
Path: `"crypto/rc4"`,
|
|
||||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBlacklist_net_http_cgi(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBlacklistedImportRC4 fails if DES is imported
|
||||||
return &BlacklistImport{
|
func NewBlacklistedImportRC4(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
MetaData: gas.MetaData{
|
return NewBlacklistedImports(conf, map[string]string{
|
||||||
Severity: gas.High,
|
"crypto/rc4": "Blacklisted import crypto/rc4: weak cryptographic primitive",
|
||||||
Confidence: gas.High,
|
})
|
||||||
What: "Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)",
|
}
|
||||||
},
|
|
||||||
Path: `"net/http/cgi"`,
|
// NewBlacklistedImportCGI fails if CGI is imported
|
||||||
}, []ast.Node{(*ast.ImportSpec)(nil)}
|
func NewBlacklistedImportCGI(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
return NewBlacklistedImports(conf, map[string]string{
|
||||||
|
"net/http/cgi": "Blacklisted import net/http/cgi: Go versions < 1.6.3 are vulnerable to Httpoxy attack: (CVE-2016-5386)",
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,39 +0,0 @@
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
const initOnlyImportSrc = `
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
_ "crypto/md5"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
for _, arg := range os.Args {
|
|
||||||
fmt.Println(arg)
|
|
||||||
}
|
|
||||||
}`
|
|
||||||
|
|
||||||
func TestInitOnlyImport(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBlacklist_crypto_md5(config))
|
|
||||||
issues := gasTestRunner(initOnlyImportSrc, analyzer)
|
|
||||||
checkTestResults(t, issues, 0, "")
|
|
||||||
}
|
|
|
@ -15,12 +15,13 @@
|
||||||
package rules
|
package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/types"
|
"go/types"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type NoErrorCheck struct {
|
type noErrorCheck struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
whitelist gas.CallList
|
whitelist gas.CallList
|
||||||
}
|
}
|
||||||
|
@ -29,7 +30,7 @@ func returnsError(callExpr *ast.CallExpr, ctx *gas.Context) int {
|
||||||
if tv := ctx.Info.TypeOf(callExpr); tv != nil {
|
if tv := ctx.Info.TypeOf(callExpr); tv != nil {
|
||||||
switch t := tv.(type) {
|
switch t := tv.(type) {
|
||||||
case *types.Tuple:
|
case *types.Tuple:
|
||||||
for pos := 0; pos < t.Len(); pos += 1 {
|
for pos := 0; pos < t.Len(); pos++ {
|
||||||
variable := t.At(pos)
|
variable := t.At(pos)
|
||||||
if variable != nil && variable.Type().String() == "error" {
|
if variable != nil && variable.Type().String() == "error" {
|
||||||
return pos
|
return pos
|
||||||
|
@ -44,11 +45,11 @@ func returnsError(callExpr *ast.CallExpr, ctx *gas.Context) int {
|
||||||
return -1
|
return -1
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
func (r *noErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
switch stmt := n.(type) {
|
switch stmt := n.(type) {
|
||||||
case *ast.AssignStmt:
|
case *ast.AssignStmt:
|
||||||
for _, expr := range stmt.Rhs {
|
for _, expr := range stmt.Rhs {
|
||||||
if callExpr, ok := expr.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
if callExpr, ok := expr.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(expr, ctx) == nil {
|
||||||
pos := returnsError(callExpr, ctx)
|
pos := returnsError(callExpr, ctx)
|
||||||
if pos < 0 || pos >= len(stmt.Lhs) {
|
if pos < 0 || pos >= len(stmt.Lhs) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
|
@ -59,7 +60,7 @@ func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
case *ast.ExprStmt:
|
case *ast.ExprStmt:
|
||||||
if callExpr, ok := stmt.X.(*ast.CallExpr); ok && !r.whitelist.ContainsCallExpr(callExpr, ctx) {
|
if callExpr, ok := stmt.X.(*ast.CallExpr); ok && r.whitelist.ContainsCallExpr(stmt.X, ctx) == nil {
|
||||||
pos := returnsError(callExpr, ctx)
|
pos := returnsError(callExpr, ctx)
|
||||||
if pos >= 0 {
|
if pos >= 0 {
|
||||||
return gas.NewIssue(ctx, n, r.What, r.Severity, r.Confidence), nil
|
return gas.NewIssue(ctx, n, r.What, r.Severity, r.Confidence), nil
|
||||||
|
@ -69,7 +70,8 @@ func (r *NoErrorCheck) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewNoErrorCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewNoErrorCheck detects if the returned error is unchecked
|
||||||
|
func NewNoErrorCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
|
||||||
// TODO(gm) Come up with sensible defaults here. Or flip it to use a
|
// TODO(gm) Come up with sensible defaults here. Or flip it to use a
|
||||||
// black list instead.
|
// black list instead.
|
||||||
|
@ -85,7 +87,7 @@ func NewNoErrorCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return &NoErrorCheck{
|
return &noErrorCheck{
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Low,
|
Severity: gas.Low,
|
||||||
Confidence: gas.High,
|
Confidence: gas.High,
|
||||||
|
|
|
@ -1,144 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestErrorsMulti(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewNoErrorCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func test() (int,error) {
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
v, _ := test()
|
|
||||||
fmt.Println(v)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Errors unhandled")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestErrorsSingle(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewNoErrorCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func a() error {
|
|
||||||
return fmt.Errorf("This is an error")
|
|
||||||
}
|
|
||||||
|
|
||||||
func b() {
|
|
||||||
fmt.Println("b")
|
|
||||||
}
|
|
||||||
|
|
||||||
func c() string {
|
|
||||||
return fmt.Sprintf("This isn't anything")
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
_ = a()
|
|
||||||
a()
|
|
||||||
b()
|
|
||||||
_ = c()
|
|
||||||
c()
|
|
||||||
}`, analyzer)
|
|
||||||
checkTestResults(t, issues, 2, "Errors unhandled")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestErrorsGood(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewNoErrorCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func test() err error {
|
|
||||||
return 0, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
e := test()
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestErrorsWhitelisted(t *testing.T) {
|
|
||||||
config := map[string]interface{}{
|
|
||||||
"ignoreNosec": false,
|
|
||||||
"G104": map[string][]string{
|
|
||||||
"compress/zlib": []string{"NewReader"},
|
|
||||||
"io": []string{"Copy"},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewNoErrorCheck(config))
|
|
||||||
source := `package main
|
|
||||||
import (
|
|
||||||
"io"
|
|
||||||
"os"
|
|
||||||
"fmt"
|
|
||||||
"bytes"
|
|
||||||
"compress/zlib"
|
|
||||||
)
|
|
||||||
|
|
||||||
func a() error {
|
|
||||||
return fmt.Errorf("This is an error ok")
|
|
||||||
}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
// Expect at least one failure
|
|
||||||
_ = a()
|
|
||||||
|
|
||||||
var b bytes.Buffer
|
|
||||||
// Default whitelist
|
|
||||||
nbytes, _ := b.Write([]byte("Hello "))
|
|
||||||
if nbytes <= 0 {
|
|
||||||
os.Exit(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Whitelisted via configuration
|
|
||||||
r, _ := zlib.NewReader(&b)
|
|
||||||
io.Copy(os.Stdout, r)
|
|
||||||
}`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "Errors unhandled")
|
|
||||||
}
|
|
|
@ -19,10 +19,10 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type FilePermissions struct {
|
type filePermissions struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
mode int64
|
mode int64
|
||||||
pkg string
|
pkg string
|
||||||
|
@ -30,7 +30,7 @@ type FilePermissions struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 {
|
func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMode int64) int64 {
|
||||||
var mode int64 = defaultMode
|
var mode = defaultMode
|
||||||
if value, ok := conf[configKey]; ok {
|
if value, ok := conf[configKey]; ok {
|
||||||
switch value.(type) {
|
switch value.(type) {
|
||||||
case int64:
|
case int64:
|
||||||
|
@ -46,7 +46,7 @@ func getConfiguredMode(conf map[string]interface{}, configKey string, defaultMod
|
||||||
return mode
|
return mode
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *FilePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
func (r *filePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if callexpr, matched := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matched {
|
if callexpr, matched := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matched {
|
||||||
modeArg := callexpr.Args[len(callexpr.Args)-1]
|
modeArg := callexpr.Args[len(callexpr.Args)-1]
|
||||||
if mode, err := gas.GetInt(modeArg); err == nil && mode > r.mode {
|
if mode, err := gas.GetInt(modeArg); err == nil && mode > r.mode {
|
||||||
|
@ -56,9 +56,11 @@ func (r *FilePermissions) Match(n ast.Node, c *gas.Context) (*gas.Issue, error)
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewFilePerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewFilePerms creates a rule to detect file creation with a more permissive than configured
|
||||||
|
// permission mask.
|
||||||
|
func NewFilePerms(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
mode := getConfiguredMode(conf, "G302", 0600)
|
mode := getConfiguredMode(conf, "G302", 0600)
|
||||||
return &FilePermissions{
|
return &filePermissions{
|
||||||
mode: mode,
|
mode: mode,
|
||||||
pkg: "os",
|
pkg: "os",
|
||||||
calls: []string{"OpenFile", "Chmod"},
|
calls: []string{"OpenFile", "Chmod"},
|
||||||
|
@ -70,9 +72,11 @@ func NewFilePerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
}, []ast.Node{(*ast.CallExpr)(nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewMkdirPerms(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewMkdirPerms creates a rule to detect directory creation with more permissive than
|
||||||
|
// configured permission mask.
|
||||||
|
func NewMkdirPerms(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
mode := getConfiguredMode(conf, "G301", 0700)
|
mode := getConfiguredMode(conf, "G301", 0700)
|
||||||
return &FilePermissions{
|
return &filePermissions{
|
||||||
mode: mode,
|
mode: mode,
|
||||||
pkg: "os",
|
pkg: "os",
|
||||||
calls: []string{"Mkdir", "MkdirAll"},
|
calls: []string{"Mkdir", "MkdirAll"},
|
||||||
|
|
|
@ -1,56 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestChmod(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewFilePerms(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import "os"
|
|
||||||
func main() {
|
|
||||||
os.Chmod("/tmp/somefile", 0777)
|
|
||||||
os.Chmod("/tmp/someotherfile", 0600)
|
|
||||||
os.OpenFile("/tmp/thing", os.O_CREATE|os.O_WRONLY, 0666)
|
|
||||||
os.OpenFile("/tmp/thing", os.O_CREATE|os.O_WRONLY, 0600)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 2, "Expect file permissions")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestMkdir(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewMkdirPerms(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import "os"
|
|
||||||
func main() {
|
|
||||||
os.Mkdir("/tmp/mydir", 0777)
|
|
||||||
os.Mkdir("/tmp/mydir", 0600)
|
|
||||||
os.MkdirAll("/tmp/mydir/mysubidr", 0775)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 2, "Expect directory permissions")
|
|
||||||
}
|
|
|
@ -15,16 +15,16 @@
|
||||||
package rules
|
package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
"github.com/nbutton23/zxcvbn-go"
|
|
||||||
"strconv"
|
"strconv"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"github.com/nbutton23/zxcvbn-go"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Credentials struct {
|
type credentials struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
pattern *regexp.Regexp
|
pattern *regexp.Regexp
|
||||||
entropyThreshold float64
|
entropyThreshold float64
|
||||||
|
@ -40,7 +40,7 @@ func truncate(s string, n int) string {
|
||||||
return s[:n]
|
return s[:n]
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Credentials) isHighEntropyString(str string) bool {
|
func (r *credentials) isHighEntropyString(str string) bool {
|
||||||
s := truncate(str, r.truncate)
|
s := truncate(str, r.truncate)
|
||||||
info := zxcvbn.PasswordStrength(s, []string{})
|
info := zxcvbn.PasswordStrength(s, []string{})
|
||||||
entropyPerChar := info.Entropy / float64(len(s))
|
entropyPerChar := info.Entropy / float64(len(s))
|
||||||
|
@ -49,7 +49,7 @@ func (r *Credentials) isHighEntropyString(str string) bool {
|
||||||
entropyPerChar >= r.perCharThreshold))
|
entropyPerChar >= r.perCharThreshold))
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
func (r *credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
switch node := n.(type) {
|
switch node := n.(type) {
|
||||||
case *ast.AssignStmt:
|
case *ast.AssignStmt:
|
||||||
return r.matchAssign(node, ctx)
|
return r.matchAssign(node, ctx)
|
||||||
|
@ -59,7 +59,7 @@ func (r *Credentials) Match(n ast.Node, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*gas.Issue, error) {
|
func (r *credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
for _, i := range assign.Lhs {
|
for _, i := range assign.Lhs {
|
||||||
if ident, ok := i.(*ast.Ident); ok {
|
if ident, ok := i.(*ast.Ident); ok {
|
||||||
if r.pattern.MatchString(ident.Name) {
|
if r.pattern.MatchString(ident.Name) {
|
||||||
|
@ -76,7 +76,7 @@ func (r *Credentials) matchAssign(assign *ast.AssignStmt, ctx *gas.Context) (*ga
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Issue, error) {
|
func (r *credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Issue, error) {
|
||||||
if decl.Tok != token.CONST && decl.Tok != token.VAR {
|
if decl.Tok != token.CONST && decl.Tok != token.VAR {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
@ -100,12 +100,14 @@ func (r *Credentials) matchGenDecl(decl *ast.GenDecl, ctx *gas.Context) (*gas.Is
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewHardcodedCredentials(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewHardcodedCredentials attempts to find high entropy string constants being
|
||||||
|
// assigned to variables that appear to be related to credentials.
|
||||||
|
func NewHardcodedCredentials(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
pattern := `(?i)passwd|pass|password|pwd|secret|token`
|
pattern := `(?i)passwd|pass|password|pwd|secret|token`
|
||||||
entropyThreshold := 80.0
|
entropyThreshold := 80.0
|
||||||
perCharThreshold := 3.0
|
perCharThreshold := 3.0
|
||||||
ignoreEntropy := false
|
ignoreEntropy := false
|
||||||
var truncateString int = 16
|
var truncateString = 16
|
||||||
if val, ok := conf["G101"]; ok {
|
if val, ok := conf["G101"]; ok {
|
||||||
conf := val.(map[string]string)
|
conf := val.(map[string]string)
|
||||||
if configPattern, ok := conf["pattern"]; ok {
|
if configPattern, ok := conf["pattern"]; ok {
|
||||||
|
@ -133,7 +135,7 @@ func NewHardcodedCredentials(conf map[string]interface{}) (gas.Rule, []ast.Node)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return &Credentials{
|
return &credentials{
|
||||||
pattern: regexp.MustCompile(pattern),
|
pattern: regexp.MustCompile(pattern),
|
||||||
entropyThreshold: entropyThreshold,
|
entropyThreshold: entropyThreshold,
|
||||||
perCharThreshold: perCharThreshold,
|
perCharThreshold: perCharThreshold,
|
||||||
|
|
|
@ -1,194 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestHardcoded(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
username := "admin"
|
|
||||||
password := "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedWithEntropy(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
username := "admin"
|
|
||||||
password := "secret"
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedIgnoreEntropy(t *testing.T) {
|
|
||||||
config := map[string]interface{}{
|
|
||||||
"ignoreNosec": false,
|
|
||||||
"G101": map[string]string{
|
|
||||||
"ignore_entropy": "true",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
username := "admin"
|
|
||||||
password := "admin"
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedGlobalVar(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
var password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
username := "admin"
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedConstant(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
const password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
username := "admin"
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedConstantMulti(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import "fmt"
|
|
||||||
|
|
||||||
const (
|
|
||||||
username = "user"
|
|
||||||
password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
fmt.Println("Doing something with: ", username, password)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardecodedVarsNotAssigned(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
var password string
|
|
||||||
func init() {
|
|
||||||
password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
}`, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedConstInteger(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
const (
|
|
||||||
ATNStateSomethingElse = 1
|
|
||||||
ATNStateTokenStart = 42
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
println(ATNStateTokenStart)
|
|
||||||
}`, analyzer)
|
|
||||||
checkTestResults(t, issues, 0, "Potential hardcoded credentials")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestHardcodedConstString(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewHardcodedCredentials(config))
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
const (
|
|
||||||
ATNStateTokenStart = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
println(ATNStateTokenStart)
|
|
||||||
}`, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "Potential hardcoded credentials")
|
|
||||||
}
|
|
|
@ -1,39 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestHttpoxy(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBlacklist_net_http_cgi(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"net/http/cgi"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
cgi.Serve(http.FileServer(http.Dir("/usr/share/doc")))
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Go versions < 1.6.3 are vulnerable to Httpoxy")
|
|
||||||
}
|
|
|
@ -1,85 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestNosec(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cmd := exec.Command("sh", "-c", os.Getenv("BLAH")) // #nosec
|
|
||||||
cmd.Run()
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "None")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNosecBlock(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
// #nosec
|
|
||||||
if true {
|
|
||||||
cmd := exec.Command("sh", "-c", os.Getenv("BLAH"))
|
|
||||||
cmd.Run()
|
|
||||||
}
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "None")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestNosecIgnore(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": true}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cmd := exec.Command("sh", "-c", os.Args[1]) // #nosec
|
|
||||||
cmd.Run()
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Subprocess launching with variable.")
|
|
||||||
}
|
|
|
@ -17,16 +17,16 @@ package rules
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type WeakRand struct {
|
type weakRand struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
funcNames []string
|
funcNames []string
|
||||||
packagePath string
|
packagePath string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *WeakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
func (w *weakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
for _, funcName := range w.funcNames {
|
for _, funcName := range w.funcNames {
|
||||||
if _, matched := gas.MatchCallByPackage(n, c, w.packagePath, funcName); matched {
|
if _, matched := gas.MatchCallByPackage(n, c, w.packagePath, funcName); matched {
|
||||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||||
|
@ -36,8 +36,9 @@ func (w *WeakRand) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewWeakRandCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewWeakRandCheck detects the use of random number generator that isn't cryptographically secure
|
||||||
return &WeakRand{
|
func NewWeakRandCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
return &weakRand{
|
||||||
funcNames: []string{"Read", "Int"},
|
funcNames: []string{"Read", "Int"},
|
||||||
packagePath: "math/rand",
|
packagePath: "math/rand",
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
|
|
|
@ -1,85 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRandOk(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewWeakRandCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import "crypto/rand"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
good, _ := rand.Read(nil)
|
|
||||||
println(good)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRandBad(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewWeakRandCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import "math/rand"
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
bad := rand.Int()
|
|
||||||
println(bad)
|
|
||||||
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Use of weak random number generator (math/rand instead of crypto/rand)")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRandRenamed(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewWeakRandCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/rand"
|
|
||||||
mrand "math/rand"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
good, _ := rand.Read(nil)
|
|
||||||
println(good)
|
|
||||||
i := mrand.Int31()
|
|
||||||
println(i)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
22
rules/rsa.go
22
rules/rsa.go
|
@ -17,30 +17,32 @@ package rules
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type WeakKeyStrength struct {
|
type weakKeyStrength struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
pattern *regexp.Regexp
|
calls gas.CallList
|
||||||
bits int
|
bits int
|
||||||
}
|
}
|
||||||
|
|
||||||
func (w *WeakKeyStrength) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
func (w *weakKeyStrength) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node := gas.MatchCall(n, w.pattern); node != nil {
|
if callExpr := w.calls.ContainsCallExpr(n, c); callExpr != nil {
|
||||||
if bits, err := gas.GetInt(node.Args[1]); err == nil && bits < (int64)(w.bits) {
|
if bits, err := gas.GetInt(callExpr.Args[1]); err == nil && bits < (int64)(w.bits) {
|
||||||
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
return gas.NewIssue(c, n, w.What, w.Severity, w.Confidence), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewWeakKeyStrength(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewWeakKeyStrength builds a rule that detects RSA keys < 2048 bits
|
||||||
|
func NewWeakKeyStrength(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
calls := gas.NewCallList()
|
||||||
|
calls.Add("rsa", "GenerateKey")
|
||||||
bits := 2048
|
bits := 2048
|
||||||
return &WeakKeyStrength{
|
return &weakKeyStrength{
|
||||||
pattern: regexp.MustCompile(`^rsa\.GenerateKey$`),
|
calls: calls,
|
||||||
bits: bits,
|
bits: bits,
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
|
|
@ -1,50 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestRSAKeys(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewWeakKeyStrength(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(
|
|
||||||
`package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/rand"
|
|
||||||
"crypto/rsa"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
|
|
||||||
//Generate Private Key
|
|
||||||
pvk, err := rsa.GenerateKey(rand.Reader, 1024)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
fmt.Println(pvk)
|
|
||||||
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "RSA keys should")
|
|
||||||
}
|
|
101
rules/rulelist.go
Normal file
101
rules/rulelist.go
Normal file
|
@ -0,0 +1,101 @@
|
||||||
|
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
package rules
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
)
|
||||||
|
|
||||||
|
// RuleDefinition contains the description of a rule and a mechanism to
|
||||||
|
// create it.
|
||||||
|
type RuleDefinition struct {
|
||||||
|
Description string
|
||||||
|
Create gas.RuleBuilder
|
||||||
|
}
|
||||||
|
|
||||||
|
// RuleList is a mapping of rule ID's to rule definitions
|
||||||
|
type RuleList map[string]RuleDefinition
|
||||||
|
|
||||||
|
// Builders returns all the create methods for a given rule list
|
||||||
|
func (rl RuleList) Builders() []gas.RuleBuilder {
|
||||||
|
builders := make([]gas.RuleBuilder, 0, len(rl))
|
||||||
|
for _, def := range rl {
|
||||||
|
builders = append(builders, def.Create)
|
||||||
|
}
|
||||||
|
return builders
|
||||||
|
}
|
||||||
|
|
||||||
|
// RuleFilter can be used to include or exclude a rule depending on the return
|
||||||
|
// value of the function
|
||||||
|
type RuleFilter func(string) bool
|
||||||
|
|
||||||
|
// NewRuleFilter is a closure that will include/exclude the rule ID's based on
|
||||||
|
// the supplied boolean value.
|
||||||
|
func NewRuleFilter(action bool, ruleIDs ...string) RuleFilter {
|
||||||
|
rulelist := make(map[string]bool)
|
||||||
|
for _, rule := range ruleIDs {
|
||||||
|
rulelist[rule] = true
|
||||||
|
}
|
||||||
|
return func(rule string) bool {
|
||||||
|
if _, found := rulelist[rule]; found {
|
||||||
|
return action
|
||||||
|
}
|
||||||
|
return !action
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Generate the list of rules to use
|
||||||
|
func Generate(filters ...RuleFilter) RuleList {
|
||||||
|
rules := map[string]RuleDefinition{
|
||||||
|
// misc
|
||||||
|
"G101": RuleDefinition{"Look for hardcoded credentials", NewHardcodedCredentials},
|
||||||
|
"G102": RuleDefinition{"Bind to all interfaces", NewBindsToAllNetworkInterfaces},
|
||||||
|
"G103": RuleDefinition{"Audit the use of unsafe block", NewUsingUnsafe},
|
||||||
|
"G104": RuleDefinition{"Audit errors not checked", NewNoErrorCheck},
|
||||||
|
"G105": RuleDefinition{"Audit the use of big.Exp function", NewUsingBigExp},
|
||||||
|
|
||||||
|
// injection
|
||||||
|
"G201": RuleDefinition{"SQL query construction using format string", NewSQLStrFormat},
|
||||||
|
"G202": RuleDefinition{"SQL query construction using string concatenation", NewSQLStrConcat},
|
||||||
|
"G203": RuleDefinition{"Use of unescaped data in HTML templates", NewTemplateCheck},
|
||||||
|
"G204": RuleDefinition{"Audit use of command execution", NewSubproc},
|
||||||
|
|
||||||
|
// filesystem
|
||||||
|
"G301": RuleDefinition{"Poor file permissions used when creating a directory", NewMkdirPerms},
|
||||||
|
"G302": RuleDefinition{"Poor file permisions used when creation file or using chmod", NewFilePerms},
|
||||||
|
"G303": RuleDefinition{"Creating tempfile using a predictable path", NewBadTempFile},
|
||||||
|
|
||||||
|
// crypto
|
||||||
|
"G401": RuleDefinition{"Detect the usage of DES, RC4, or MD5", NewUsesWeakCryptography},
|
||||||
|
"G402": RuleDefinition{"Look for bad TLS connection settings", NewIntermediateTLSCheck},
|
||||||
|
"G403": RuleDefinition{"Ensure minimum RSA key length of 2048 bits", NewWeakKeyStrength},
|
||||||
|
"G404": RuleDefinition{"Insecure random number source (rand)", NewWeakRandCheck},
|
||||||
|
|
||||||
|
// blacklist
|
||||||
|
"G501": RuleDefinition{"Import blacklist: crypto/md5", NewBlacklistedImportMD5},
|
||||||
|
"G502": RuleDefinition{"Import blacklist: crypto/des", NewBlacklistedImportDES},
|
||||||
|
"G503": RuleDefinition{"Import blacklist: crypto/rc4", NewBlacklistedImportRC4},
|
||||||
|
"G504": RuleDefinition{"Import blacklist: net/http/cgi", NewBlacklistedImportCGI},
|
||||||
|
}
|
||||||
|
|
||||||
|
for rule := range rules {
|
||||||
|
for _, filter := range filters {
|
||||||
|
if filter(rule) {
|
||||||
|
delete(rules, rule)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return rules
|
||||||
|
}
|
13
rules/rules_suite_test.go
Normal file
13
rules/rules_suite_test.go
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
package rules_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestRules(t *testing.T) {
|
||||||
|
RegisterFailHandler(Fail)
|
||||||
|
RunSpecs(t, "Rules Suite")
|
||||||
|
}
|
132
rules/rules_test.go
Normal file
132
rules/rules_test.go
Normal file
|
@ -0,0 +1,132 @@
|
||||||
|
package rules_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"fmt"
|
||||||
|
"log"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas/rules"
|
||||||
|
"github.com/GoASTScanner/gas/testutils"
|
||||||
|
. "github.com/onsi/ginkgo"
|
||||||
|
. "github.com/onsi/gomega"
|
||||||
|
)
|
||||||
|
|
||||||
|
var _ = Describe("gas rules", func() {
|
||||||
|
|
||||||
|
var (
|
||||||
|
logger *log.Logger
|
||||||
|
output *bytes.Buffer
|
||||||
|
config gas.Config
|
||||||
|
analyzer *gas.Analyzer
|
||||||
|
runner func(string, []testutils.CodeSample)
|
||||||
|
)
|
||||||
|
|
||||||
|
BeforeEach(func() {
|
||||||
|
logger, output = testutils.NewLogger()
|
||||||
|
config = gas.NewConfig()
|
||||||
|
analyzer = gas.NewAnalyzer(config, logger)
|
||||||
|
runner = func(rule string, samples []testutils.CodeSample) {
|
||||||
|
analyzer.LoadRules(rules.Generate(rules.NewRuleFilter(false, rule)).Builders()...)
|
||||||
|
for n, sample := range samples {
|
||||||
|
analyzer.Reset()
|
||||||
|
pkg := testutils.NewTestPackage()
|
||||||
|
defer pkg.Close()
|
||||||
|
pkg.AddFile(fmt.Sprintf("sample_%d.go", n), sample.Code)
|
||||||
|
pkg.Build()
|
||||||
|
e := analyzer.Process(pkg.Path)
|
||||||
|
Expect(e).ShouldNot(HaveOccurred())
|
||||||
|
issues, _ := analyzer.Report()
|
||||||
|
if len(issues) != sample.Errors {
|
||||||
|
fmt.Println(sample.Code)
|
||||||
|
}
|
||||||
|
Expect(issues).Should(HaveLen(sample.Errors))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
Context("report correct errors for all samples", func() {
|
||||||
|
It("should detect hardcoded credentials", func() {
|
||||||
|
runner("G101", testutils.SampleCodeG101)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect binding to all network interfaces", func() {
|
||||||
|
runner("G102", testutils.SampleCodeG102)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should use of unsafe block", func() {
|
||||||
|
runner("G103", testutils.SampleCodeG103)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should errors not being checked", func() {
|
||||||
|
runner("G104", testutils.SampleCodeG104)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect of big.Exp function", func() {
|
||||||
|
runner("G105", testutils.SampleCodeG105)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect sql injection via format strings", func() {
|
||||||
|
runner("G201", testutils.SampleCodeG201)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect sql injection via string concatenation", func() {
|
||||||
|
runner("G202", testutils.SampleCodeG202)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect unescaped html in templates", func() {
|
||||||
|
runner("G203", testutils.SampleCodeG203)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect command execution", func() {
|
||||||
|
runner("G204", testutils.SampleCodeG204)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect poor file permissions on mkdir", func() {
|
||||||
|
runner("G301", testutils.SampleCodeG301)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect poor permissions when creating or chmod a file", func() {
|
||||||
|
runner("G302", testutils.SampleCodeG302)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect insecure temp file creation", func() {
|
||||||
|
runner("G303", testutils.SampleCodeG303)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect weak crypto algorithms", func() {
|
||||||
|
runner("G401", testutils.SampleCodeG401)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should find insecure tls settings", func() {
|
||||||
|
runner("G402", testutils.SampleCodeG402)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect weak creation of weak rsa keys", func() {
|
||||||
|
runner("G403", testutils.SampleCodeG403)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should find non cryptographically secure random number sources", func() {
|
||||||
|
runner("G404", testutils.SampleCodeG404)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect blacklisted imports - MD5", func() {
|
||||||
|
runner("G501", testutils.SampleCodeG501)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect blacklisted imports - DES", func() {
|
||||||
|
runner("G502", testutils.SampleCodeG502)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect blacklisted imports - RC4", func() {
|
||||||
|
runner("G503", testutils.SampleCodeG503)
|
||||||
|
})
|
||||||
|
|
||||||
|
It("should detect blacklisted imports - CGI (httpoxy)", func() {
|
||||||
|
runner("G504", testutils.SampleCodeG504)
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
})
|
44
rules/sql.go
44
rules/sql.go
|
@ -18,20 +18,20 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SqlStatement struct {
|
type sqlStatement struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
pattern *regexp.Regexp
|
pattern *regexp.Regexp
|
||||||
}
|
}
|
||||||
|
|
||||||
type SqlStrConcat struct {
|
type sqlStrConcat struct {
|
||||||
SqlStatement
|
sqlStatement
|
||||||
}
|
}
|
||||||
|
|
||||||
// see if we can figure out what it is
|
// see if we can figure out what it is
|
||||||
func (s *SqlStrConcat) checkObject(n *ast.Ident) bool {
|
func (s *sqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||||
if n.Obj != nil {
|
if n.Obj != nil {
|
||||||
return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun
|
return n.Obj.Kind != ast.Var && n.Obj.Kind != ast.Fun
|
||||||
}
|
}
|
||||||
|
@ -39,7 +39,7 @@ func (s *SqlStrConcat) checkObject(n *ast.Ident) bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Look for "SELECT * FROM table WHERE " + " ' OR 1=1"
|
// Look for "SELECT * FROM table WHERE " + " ' OR 1=1"
|
||||||
func (s *SqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
func (s *sqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node, ok := n.(*ast.BinaryExpr); ok {
|
if node, ok := n.(*ast.BinaryExpr); ok {
|
||||||
if start, ok := node.X.(*ast.BasicLit); ok {
|
if start, ok := node.X.(*ast.BasicLit); ok {
|
||||||
if str, e := gas.GetString(start); s.pattern.MatchString(str) && e == nil {
|
if str, e := gas.GetString(start); s.pattern.MatchString(str) && e == nil {
|
||||||
|
@ -56,9 +56,10 @@ func (s *SqlStrConcat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSqlStrConcat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewSQLStrConcat looks for cases where we are building SQL strings via concatenation
|
||||||
return &SqlStrConcat{
|
func NewSQLStrConcat(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
SqlStatement: SqlStatement{
|
return &sqlStrConcat{
|
||||||
|
sqlStatement: sqlStatement{
|
||||||
pattern: regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `),
|
pattern: regexp.MustCompile(`(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) `),
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
@ -69,14 +70,16 @@ func NewSqlStrConcat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||||
}, []ast.Node{(*ast.BinaryExpr)(nil)}
|
}, []ast.Node{(*ast.BinaryExpr)(nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
type SqlStrFormat struct {
|
type sqlStrFormat struct {
|
||||||
SqlStatement
|
sqlStatement
|
||||||
call *regexp.Regexp
|
calls gas.CallList
|
||||||
}
|
}
|
||||||
|
|
||||||
// Looks for "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)"
|
// Looks for "fmt.Sprintf("SELECT * FROM foo where '%s', userInput)"
|
||||||
func (s *SqlStrFormat) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (s *sqlStrFormat) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node := gas.MatchCall(n, s.call); node != nil {
|
|
||||||
|
// TODO(gm) improve confidence if database/sql is being used
|
||||||
|
if node := s.calls.ContainsCallExpr(n, c); node != nil {
|
||||||
if arg, e := gas.GetString(node.Args[0]); s.pattern.MatchString(arg) && e == nil {
|
if arg, e := gas.GetString(node.Args[0]); s.pattern.MatchString(arg) && e == nil {
|
||||||
return gas.NewIssue(c, n, s.What, s.Severity, s.Confidence), nil
|
return gas.NewIssue(c, n, s.What, s.Severity, s.Confidence), nil
|
||||||
}
|
}
|
||||||
|
@ -84,10 +87,11 @@ func (s *SqlStrFormat) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err err
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSqlStrFormat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewSQLStrFormat looks for cases where we're building SQL query strings using format strings
|
||||||
return &SqlStrFormat{
|
func NewSQLStrFormat(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
call: regexp.MustCompile(`^fmt\.Sprintf$`),
|
rule := &sqlStrFormat{
|
||||||
SqlStatement: SqlStatement{
|
calls: gas.NewCallList(),
|
||||||
|
sqlStatement: sqlStatement{
|
||||||
pattern: regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "),
|
pattern: regexp.MustCompile("(?)(SELECT|DELETE|INSERT|UPDATE|INTO|FROM|WHERE) "),
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
@ -95,5 +99,7 @@ func NewSqlStrFormat(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||||
What: "SQL string formatting",
|
What: "SQL string formatting",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
}
|
||||||
|
rule.calls.AddAll("fmt", "Sprint", "Sprintf", "Sprintln")
|
||||||
|
return rule, []ast.Node{(*ast.CallExpr)(nil)}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,216 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSQLInjectionViaConcatenation(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrConcat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rows, err := db.Query("SELECT * FROM foo WHERE name = " + os.Args[1])
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "SQL string concatenation")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSQLInjectionViaIntepolation(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrFormat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
)
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
q := fmt.Sprintf("SELECT * FROM foo where name = '%s'", os.Args[1])
|
|
||||||
rows, err := db.Query(q)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "SQL string formatting")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSQLInjectionFalsePositiveA(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrConcat(config))
|
|
||||||
analyzer.AddRule(NewSqlStrFormat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
)
|
|
||||||
|
|
||||||
var staticQuery = "SELECT * FROM foo WHERE age < 32"
|
|
||||||
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rows, err := db.Query(staticQuery)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSQLInjectionFalsePositiveB(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrConcat(config))
|
|
||||||
analyzer.AddRule(NewSqlStrFormat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
)
|
|
||||||
|
|
||||||
var staticQuery = "SELECT * FROM foo WHERE age < 32"
|
|
||||||
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rows, err := db.Query(staticQuery)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSQLInjectionFalsePositiveC(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrConcat(config))
|
|
||||||
analyzer.AddRule(NewSqlStrFormat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
)
|
|
||||||
|
|
||||||
var staticQuery = "SELECT * FROM foo WHERE age < "
|
|
||||||
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rows, err := db.Query(staticQuery + "32")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSQLInjectionFalsePositiveD(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSqlStrConcat(config))
|
|
||||||
analyzer.AddRule(NewSqlStrFormat(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"database/sql"
|
|
||||||
//_ "github.com/mattn/go-sqlite3"
|
|
||||||
)
|
|
||||||
|
|
||||||
const age = "32"
|
|
||||||
var staticQuery = "SELECT * FROM foo WHERE age < "
|
|
||||||
|
|
||||||
func main(){
|
|
||||||
db, err := sql.Open("sqlite3", ":memory:")
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
rows, err := db.Query(staticQuery + age)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
defer rows.Close()
|
|
||||||
}
|
|
||||||
|
|
||||||
`
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 0, "Not expected to match")
|
|
||||||
}
|
|
|
@ -16,41 +16,43 @@ package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
"go/types"
|
||||||
"strings"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Subprocess struct {
|
type subprocess struct {
|
||||||
pattern *regexp.Regexp
|
gas.CallList
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *Subprocess) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
// TODO(gm) The only real potential for command injection with a Go project
|
||||||
if node := gas.MatchCall(n, r.pattern); node != nil {
|
// is something like this:
|
||||||
|
//
|
||||||
|
// syscall.Exec("/bin/sh", []string{"-c", tainted})
|
||||||
|
//
|
||||||
|
// E.g. Input is correctly escaped but the execution context being used
|
||||||
|
// is unsafe. For example:
|
||||||
|
//
|
||||||
|
// syscall.Exec("echo", "foobar" + tainted)
|
||||||
|
func (r *subprocess) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
|
if node := r.ContainsCallExpr(n, c); node != nil {
|
||||||
for _, arg := range node.Args {
|
for _, arg := range node.Args {
|
||||||
if !gas.TryResolve(arg, c) {
|
if ident, ok := arg.(*ast.Ident); ok {
|
||||||
what := "Subprocess launching with variable."
|
obj := c.Info.ObjectOf(ident)
|
||||||
return gas.NewIssue(c, n, what, gas.High, gas.High), nil
|
if _, ok := obj.(*types.Var); ok && !gas.TryResolve(ident, c) {
|
||||||
|
return gas.NewIssue(c, n, "Subprocess launched with variable", gas.Medium, gas.High), nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// call with partially qualified command
|
|
||||||
if str, err := gas.GetString(node.Args[0]); err == nil {
|
|
||||||
if !strings.HasPrefix(str, "/") {
|
|
||||||
what := "Subprocess launching with partial path."
|
|
||||||
return gas.NewIssue(c, n, what, gas.Medium, gas.High), nil
|
|
||||||
}
|
}
|
||||||
}
|
return gas.NewIssue(c, n, "Subprocess launching should be audited", gas.Low, gas.High), nil
|
||||||
|
|
||||||
what := "Subprocess launching should be audited."
|
|
||||||
return gas.NewIssue(c, n, what, gas.Low, gas.High), nil
|
|
||||||
}
|
}
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewSubproc(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewSubproc detects cases where we are forking out to an external process
|
||||||
return &Subprocess{
|
func NewSubproc(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
pattern: regexp.MustCompile(`^exec\.Command|syscall\.Exec$`),
|
rule := &subprocess{gas.NewCallList()}
|
||||||
}, []ast.Node{(*ast.CallExpr)(nil)}
|
rule.Add("exec", "Command")
|
||||||
|
rule.Add("syscall", "Exec")
|
||||||
|
return rule, []ast.Node{(*ast.CallExpr)(nil)}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,124 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestSubprocess(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
val := "/bin/" + "sleep"
|
|
||||||
cmd := exec.Command(val, "5")
|
|
||||||
err := cmd.Start()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
log.Printf("Waiting for command to finish...")
|
|
||||||
err = cmd.Wait()
|
|
||||||
log.Printf("Command finished with error: %v", err)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Subprocess launching should be audited.")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSubprocessVar(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
run := "sleep" + os.Getenv("SOMETHING")
|
|
||||||
cmd := exec.Command(run, "5")
|
|
||||||
err := cmd.Start()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
log.Printf("Waiting for command to finish...")
|
|
||||||
err = cmd.Wait()
|
|
||||||
log.Printf("Command finished with error: %v", err)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Subprocess launching with variable.")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSubprocessPath(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"log"
|
|
||||||
"os/exec"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cmd := exec.Command("sleep", "5")
|
|
||||||
err := cmd.Start()
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
log.Printf("Waiting for command to finish...")
|
|
||||||
err = cmd.Wait()
|
|
||||||
log.Printf("Command finished with error: %v", err)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Subprocess launching with partial path.")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSubprocessSyscall(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewSubproc(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"syscall"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
syscall.Exec("/bin/cat", []string{ "/etc/passwd" }, nil)
|
|
||||||
}`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "Subprocess launching should be audited.")
|
|
||||||
}
|
|
|
@ -18,17 +18,17 @@ import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type BadTempFile struct {
|
type badTempFile struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
|
calls gas.CallList
|
||||||
args *regexp.Regexp
|
args *regexp.Regexp
|
||||||
call *regexp.Regexp
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *BadTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (t *badTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||||
if node := gas.MatchCall(n, t.call); node != nil {
|
if node := t.calls.ContainsCallExpr(n, c); node != nil {
|
||||||
if arg, e := gas.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil {
|
if arg, e := gas.GetString(node.Args[0]); t.args.MatchString(arg) && e == nil {
|
||||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||||
}
|
}
|
||||||
|
@ -36,9 +36,13 @@ func (t *BadTempFile) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err erro
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewBadTempFile(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewBadTempFile detects direct writes to predictable path in temporary directory
|
||||||
return &BadTempFile{
|
func NewBadTempFile(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
call: regexp.MustCompile(`ioutil\.WriteFile|os\.Create`),
|
calls := gas.NewCallList()
|
||||||
|
calls.Add("ioutil", "WriteFile")
|
||||||
|
calls.Add("os", "Create")
|
||||||
|
return &badTempFile{
|
||||||
|
calls: calls,
|
||||||
args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`),
|
args: regexp.MustCompile(`^/tmp/.*$|^/var/tmp/.*$`),
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
|
|
@ -1,47 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestTempfiles(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBadTempFile(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import (
|
|
||||||
"io/ioutil"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
|
|
||||||
file1, _ := os.Create("/tmp/demo1")
|
|
||||||
defer file1.Close()
|
|
||||||
|
|
||||||
ioutil.WriteFile("/tmp/demo2", []byte("This is some data"), 0644)
|
|
||||||
}
|
|
||||||
`
|
|
||||||
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 2, "shared tmp directory")
|
|
||||||
}
|
|
|
@ -16,18 +16,17 @@ package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"regexp"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type TemplateCheck struct {
|
type templateCheck struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
call *regexp.Regexp
|
calls gas.CallList
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *TemplateCheck) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (t *templateCheck) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node := gas.MatchCall(n, t.call); node != nil {
|
if node := t.calls.ContainsCallExpr(n, c); node != nil {
|
||||||
for _, arg := range node.Args {
|
for _, arg := range node.Args {
|
||||||
if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe
|
if _, ok := arg.(*ast.BasicLit); !ok { // basic lits are safe
|
||||||
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
return gas.NewIssue(c, n, t.What, t.Severity, t.Confidence), nil
|
||||||
|
@ -37,9 +36,17 @@ func (t *TemplateCheck) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err er
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewTemplateCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewTemplateCheck constructs the template check rule. This rule is used to
|
||||||
return &TemplateCheck{
|
// find use of tempaltes where HTML/JS escaping is not being used
|
||||||
call: regexp.MustCompile(`^template\.(HTML|JS|URL)$`),
|
func NewTemplateCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
|
||||||
|
calls := gas.NewCallList()
|
||||||
|
calls.Add("template", "HTML")
|
||||||
|
calls.Add("template", "HTMLAttr")
|
||||||
|
calls.Add("template", "JS")
|
||||||
|
calls.Add("template", "URL")
|
||||||
|
return &templateCheck{
|
||||||
|
calls: calls,
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
Confidence: gas.Low,
|
Confidence: gas.Low,
|
||||||
|
|
|
@ -1,136 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestTemplateCheckSafe(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewTemplateCheck(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import (
|
|
||||||
"html/template"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
const tmpl = ""
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
t := template.Must(template.New("ex").Parse(tmpl))
|
|
||||||
v := map[string]interface{}{
|
|
||||||
"Title": "Test <b>World</b>",
|
|
||||||
"Body": template.HTML("<script>alert(1)</script>"),
|
|
||||||
}
|
|
||||||
t.Execute(os.Stdout, v)
|
|
||||||
}`
|
|
||||||
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 0, "this method will not auto-escape HTML. Verify data is well formed")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTemplateCheckBadHTML(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewTemplateCheck(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import (
|
|
||||||
"html/template"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
const tmpl = ""
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
a := "something from another place"
|
|
||||||
t := template.Must(template.New("ex").Parse(tmpl))
|
|
||||||
v := map[string]interface{}{
|
|
||||||
"Title": "Test <b>World</b>",
|
|
||||||
"Body": template.HTML(a),
|
|
||||||
}
|
|
||||||
t.Execute(os.Stdout, v)
|
|
||||||
}`
|
|
||||||
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "this method will not auto-escape HTML. Verify data is well formed")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTemplateCheckBadJS(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewTemplateCheck(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import (
|
|
||||||
"html/template"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
const tmpl = ""
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
a := "something from another place"
|
|
||||||
t := template.Must(template.New("ex").Parse(tmpl))
|
|
||||||
v := map[string]interface{}{
|
|
||||||
"Title": "Test <b>World</b>",
|
|
||||||
"Body": template.JS(a),
|
|
||||||
}
|
|
||||||
t.Execute(os.Stdout, v)
|
|
||||||
}`
|
|
||||||
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "this method will not auto-escape HTML. Verify data is well formed")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestTemplateCheckBadURL(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewTemplateCheck(config))
|
|
||||||
|
|
||||||
source := `
|
|
||||||
package samples
|
|
||||||
|
|
||||||
import (
|
|
||||||
"html/template"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
|
|
||||||
const tmpl = ""
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
a := "something from another place"
|
|
||||||
t := template.Must(template.New("ex").Parse(tmpl))
|
|
||||||
v := map[string]interface{}{
|
|
||||||
"Title": "Test <b>World</b>",
|
|
||||||
"Body": template.URL(a),
|
|
||||||
}
|
|
||||||
t.Execute(os.Stdout, v)
|
|
||||||
}`
|
|
||||||
|
|
||||||
issues := gasTestRunner(source, analyzer)
|
|
||||||
checkTestResults(t, issues, 1, "this method will not auto-escape HTML. Verify data is well formed")
|
|
||||||
}
|
|
66
rules/tls.go
66
rules/tls.go
|
@ -17,16 +17,14 @@ package rules
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"reflect"
|
|
||||||
"regexp"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type InsecureConfigTLS struct {
|
type insecureConfigTLS struct {
|
||||||
MinVersion int16
|
MinVersion int16
|
||||||
MaxVersion int16
|
MaxVersion int16
|
||||||
pattern *regexp.Regexp
|
requiredType string
|
||||||
goodCiphers []string
|
goodCiphers []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -39,15 +37,14 @@ func stringInSlice(a string, list []string) bool {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *InsecureConfigTLS) processTlsCipherSuites(n ast.Node, c *gas.Context) *gas.Issue {
|
func (t *insecureConfigTLS) processTLSCipherSuites(n ast.Node, c *gas.Context) *gas.Issue {
|
||||||
a := reflect.TypeOf(&ast.KeyValueExpr{})
|
|
||||||
b := reflect.TypeOf(&ast.CompositeLit{})
|
if ciphers, ok := n.(*ast.CompositeLit); ok {
|
||||||
if node, ok := gas.SimpleSelect(n, a, b).(*ast.CompositeLit); ok {
|
for _, cipher := range ciphers.Elts {
|
||||||
for _, elt := range node.Elts {
|
if ident, ok := cipher.(*ast.SelectorExpr); ok {
|
||||||
if ident, ok := elt.(*ast.SelectorExpr); ok {
|
|
||||||
if !stringInSlice(ident.Sel.Name, t.goodCiphers) {
|
if !stringInSlice(ident.Sel.Name, t.goodCiphers) {
|
||||||
str := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name)
|
err := fmt.Sprintf("TLS Bad Cipher Suite: %s", ident.Sel.Name)
|
||||||
return gas.NewIssue(c, n, str, gas.High, gas.High)
|
return gas.NewIssue(c, ident, err, gas.High, gas.High)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -55,9 +52,10 @@ func (t *InsecureConfigTLS) processTlsCipherSuites(n ast.Node, c *gas.Context) *
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Context) *gas.Issue {
|
func (t *insecureConfigTLS) processTLSConfVal(n *ast.KeyValueExpr, c *gas.Context) *gas.Issue {
|
||||||
if ident, ok := n.Key.(*ast.Ident); ok {
|
if ident, ok := n.Key.(*ast.Ident); ok {
|
||||||
switch ident.Name {
|
switch ident.Name {
|
||||||
|
|
||||||
case "InsecureSkipVerify":
|
case "InsecureSkipVerify":
|
||||||
if node, ok := n.Value.(*ast.Ident); ok {
|
if node, ok := n.Value.(*ast.Ident); ok {
|
||||||
if node.Name != "false" {
|
if node.Name != "false" {
|
||||||
|
@ -97,7 +95,7 @@ func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Contex
|
||||||
}
|
}
|
||||||
|
|
||||||
case "CipherSuites":
|
case "CipherSuites":
|
||||||
if ret := t.processTlsCipherSuites(n, c); ret != nil {
|
if ret := t.processTLSCipherSuites(n.Value, c); ret != nil {
|
||||||
return ret
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -107,24 +105,24 @@ func (t *InsecureConfigTLS) processTlsConfVal(n *ast.KeyValueExpr, c *gas.Contex
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (t *InsecureConfigTLS) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (t *insecureConfigTLS) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
if node := gas.MatchCompLit(n, t.pattern); node != nil {
|
if complit, ok := n.(*ast.CompositeLit); ok && c.Info.TypeOf(complit.Type).String() == t.requiredType {
|
||||||
for _, elt := range node.Elts {
|
for _, elt := range complit.Elts {
|
||||||
if kve, ok := elt.(*ast.KeyValueExpr); ok {
|
if kve, ok := elt.(*ast.KeyValueExpr); ok {
|
||||||
gi = t.processTlsConfVal(kve, c)
|
issue := t.processTLSConfVal(kve, c)
|
||||||
if gi != nil {
|
if issue != nil {
|
||||||
break
|
return issue, nil
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewModernTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewModernTLSCheck see: https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
|
||||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Modern_compatibility
|
func NewModernTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
return &InsecureConfigTLS{
|
return &insecureConfigTLS{
|
||||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
requiredType: "crypto/tls.Config",
|
||||||
MinVersion: 0x0303, // TLS 1.2 only
|
MinVersion: 0x0303, // TLS 1.2 only
|
||||||
MaxVersion: 0x0303,
|
MaxVersion: 0x0303,
|
||||||
goodCiphers: []string{
|
goodCiphers: []string{
|
||||||
|
@ -138,10 +136,10 @@ func NewModernTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
||||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewIntermediateTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewIntermediateTLSCheck see: https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
|
||||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Intermediate_compatibility_.28default.29
|
func NewIntermediateTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
return &InsecureConfigTLS{
|
return &insecureConfigTLS{
|
||||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
requiredType: "crypto/tls.Config",
|
||||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||||
MaxVersion: 0x0303,
|
MaxVersion: 0x0303,
|
||||||
goodCiphers: []string{
|
goodCiphers: []string{
|
||||||
|
@ -164,10 +162,10 @@ func NewIntermediateTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node)
|
||||||
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
}, []ast.Node{(*ast.CompositeLit)(nil)}
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCompatTlsCheck(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewCompatTLSCheck see: https://wiki.mozilla.org/Security/Server_Side_TLS#Old_compatibility_.28default.29
|
||||||
// https://wiki.mozilla.org/Security/Server_Side_TLS#Old_compatibility_.28default.29
|
func NewCompatTLSCheck(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
return &InsecureConfigTLS{
|
return &insecureConfigTLS{
|
||||||
pattern: regexp.MustCompile(`^tls\.Config$`),
|
requiredType: "crypto/tls.Config",
|
||||||
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
MinVersion: 0x0301, // TLS 1.2, 1.1, 1.0
|
||||||
MaxVersion: 0x0303,
|
MaxVersion: 0x0303,
|
||||||
goodCiphers: []string{
|
goodCiphers: []string{
|
||||||
|
|
|
@ -1,169 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInsecureSkipVerify(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewModernTlsCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
tr := &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
|
||||||
}
|
|
||||||
client := &http.Client{Transport: tr}
|
|
||||||
_, err := client.Get("https://golang.org/")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "TLS InsecureSkipVerify set true")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInsecureMinVersion(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewModernTlsCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
tr := &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{MinVersion: 0},
|
|
||||||
}
|
|
||||||
client := &http.Client{Transport: tr}
|
|
||||||
_, err := client.Get("https://golang.org/")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "TLS MinVersion too low")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInsecureMaxVersion(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewModernTlsCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
tr := &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{MaxVersion: 0},
|
|
||||||
}
|
|
||||||
client := &http.Client{Transport: tr}
|
|
||||||
_, err := client.Get("https://golang.org/")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "TLS MaxVersion too low")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestInsecureCipherSuite(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewModernTlsCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
tr := &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{CipherSuites: []uint16{
|
|
||||||
tls.TLS_RSA_WITH_RC4_128_SHA,
|
|
||||||
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
|
||||||
},},
|
|
||||||
}
|
|
||||||
client := &http.Client{Transport: tr}
|
|
||||||
_, err := client.Get("https://golang.org/")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "TLS Bad Cipher Suite: TLS_RSA_WITH_RC4_128_SHA")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPreferServerCipherSuites(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewModernTlsCheck(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/tls"
|
|
||||||
"fmt"
|
|
||||||
"net/http"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
tr := &http.Transport{
|
|
||||||
TLSClientConfig: &tls.Config{PreferServerCipherSuites: false},
|
|
||||||
}
|
|
||||||
client := &http.Client{Transport: tr}
|
|
||||||
_, err := client.Get("https://golang.org/")
|
|
||||||
if err != nil {
|
|
||||||
fmt.Println(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 1, "TLS PreferServerCipherSuites set false")
|
|
||||||
}
|
|
|
@ -15,25 +15,28 @@
|
||||||
package rules
|
package rules
|
||||||
|
|
||||||
import (
|
import (
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UsingUnsafe struct {
|
type usingUnsafe struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
pkg string
|
pkg string
|
||||||
calls []string
|
calls []string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *UsingUnsafe) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
func (r *usingUnsafe) Match(n ast.Node, c *gas.Context) (gi *gas.Issue, err error) {
|
||||||
if _, matches := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matches {
|
if _, matches := gas.MatchCallByPackage(n, c, r.pkg, r.calls...); matches {
|
||||||
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
return gas.NewIssue(c, n, r.What, r.Severity, r.Confidence), nil
|
||||||
}
|
}
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewUsingUnsafe(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
// NewUsingUnsafe rule detects the use of the unsafe package. This is only
|
||||||
return &UsingUnsafe{
|
// really useful for auditing purposes.
|
||||||
|
func NewUsingUnsafe(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
|
return &usingUnsafe{
|
||||||
pkg: "unsafe",
|
pkg: "unsafe",
|
||||||
calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"},
|
calls: []string{"Alignof", "Offsetof", "Sizeof", "Pointer"},
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
|
|
|
@ -1,55 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestUnsafe(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewUsingUnsafe(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"unsafe"
|
|
||||||
)
|
|
||||||
|
|
||||||
type Fake struct{}
|
|
||||||
|
|
||||||
func (Fake) Good() {}
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
unsafeM := Fake{}
|
|
||||||
unsafeM.Good()
|
|
||||||
intArray := [...]int{1, 2}
|
|
||||||
fmt.Printf("\nintArray: %v\n", intArray)
|
|
||||||
intPtr := &intArray[0]
|
|
||||||
fmt.Printf("\nintPtr=%p, *intPtr=%d.\n", intPtr, *intPtr)
|
|
||||||
addressHolder := uintptr(unsafe.Pointer(intPtr)) + unsafe.Sizeof(intArray[0])
|
|
||||||
intPtr = (*int)(unsafe.Pointer(addressHolder))
|
|
||||||
fmt.Printf("\nintPtr=%p, *intPtr=%d.\n\n", intPtr, *intPtr)
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 3, "Use of unsafe calls")
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,40 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func gasTestRunner(source string, analyzer gas.Analyzer) []*gas.Issue {
|
|
||||||
analyzer.ProcessSource("dummy.go", source)
|
|
||||||
return analyzer.Issues
|
|
||||||
}
|
|
||||||
|
|
||||||
func checkTestResults(t *testing.T, issues []*gas.Issue, expected int, msg string) {
|
|
||||||
found := len(issues)
|
|
||||||
if found != expected {
|
|
||||||
t.Errorf("Found %d issues, expected %d", found, expected)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, issue := range issues {
|
|
||||||
if !strings.Contains(issue.What, msg) {
|
|
||||||
t.Errorf("Unexpected issue identified: %s", issue.What)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -17,15 +17,15 @@ package rules
|
||||||
import (
|
import (
|
||||||
"go/ast"
|
"go/ast"
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
"github.com/GoASTScanner/gas"
|
||||||
)
|
)
|
||||||
|
|
||||||
type UsesWeakCryptography struct {
|
type usesWeakCryptography struct {
|
||||||
gas.MetaData
|
gas.MetaData
|
||||||
blacklist map[string][]string
|
blacklist map[string][]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func (r *UsesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
func (r *usesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, error) {
|
||||||
|
|
||||||
for pkg, funcs := range r.blacklist {
|
for pkg, funcs := range r.blacklist {
|
||||||
if _, matched := gas.MatchCallByPackage(n, c, pkg, funcs...); matched {
|
if _, matched := gas.MatchCallByPackage(n, c, pkg, funcs...); matched {
|
||||||
|
@ -35,13 +35,13 @@ func (r *UsesWeakCryptography) Match(n ast.Node, c *gas.Context) (*gas.Issue, er
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Uses des.* md5.* or rc4.*
|
// NewUsesWeakCryptography detects uses of des.* md5.* or rc4.*
|
||||||
func NewUsesWeakCryptography(conf map[string]interface{}) (gas.Rule, []ast.Node) {
|
func NewUsesWeakCryptography(conf gas.Config) (gas.Rule, []ast.Node) {
|
||||||
calls := make(map[string][]string)
|
calls := make(map[string][]string)
|
||||||
calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"}
|
calls["crypto/des"] = []string{"NewCipher", "NewTripleDESCipher"}
|
||||||
calls["crypto/md5"] = []string{"New", "Sum"}
|
calls["crypto/md5"] = []string{"New", "Sum"}
|
||||||
calls["crypto/rc4"] = []string{"NewCipher"}
|
calls["crypto/rc4"] = []string{"NewCipher"}
|
||||||
rule := &UsesWeakCryptography{
|
rule := &usesWeakCryptography{
|
||||||
blacklist: calls,
|
blacklist: calls,
|
||||||
MetaData: gas.MetaData{
|
MetaData: gas.MetaData{
|
||||||
Severity: gas.Medium,
|
Severity: gas.Medium,
|
||||||
|
|
|
@ -1,114 +0,0 @@
|
||||||
// (c) Copyright 2016 Hewlett Packard Enterprise Development LP
|
|
||||||
//
|
|
||||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
// you may not use this file except in compliance with the License.
|
|
||||||
// You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
|
|
||||||
package rules
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
|
|
||||||
gas "github.com/GoASTScanner/gas/core"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestMD5(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBlacklist_crypto_md5(config))
|
|
||||||
analyzer.AddRule(NewUsesWeakCryptography(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
import (
|
|
||||||
"crypto/md5"
|
|
||||||
"fmt"
|
|
||||||
"os"
|
|
||||||
)
|
|
||||||
func main() {
|
|
||||||
for _, arg := range os.Args {
|
|
||||||
fmt.Printf("%x - %s\n", md5.Sum([]byte(arg)), arg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
checkTestResults(t, issues, 2, "weak cryptographic")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestDES(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBlacklist_crypto_des(config))
|
|
||||||
analyzer.AddRule(NewUsesWeakCryptography(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/cipher"
|
|
||||||
"crypto/des"
|
|
||||||
"crypto/rand"
|
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
|
||||||
"io"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
block, err := des.NewCipher([]byte("sekritz"))
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
plaintext := []byte("I CAN HAZ SEKRIT MSG PLZ")
|
|
||||||
ciphertext := make([]byte, des.BlockSize+len(plaintext))
|
|
||||||
iv := ciphertext[:des.BlockSize]
|
|
||||||
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
stream := cipher.NewCFBEncrypter(block, iv)
|
|
||||||
stream.XORKeyStream(ciphertext[des.BlockSize:], plaintext)
|
|
||||||
fmt.Println("Secret message is: %s", hex.EncodeToString(ciphertext))
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 2, "weak cryptographic")
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestRC4(t *testing.T) {
|
|
||||||
config := map[string]interface{}{"ignoreNosec": false}
|
|
||||||
analyzer := gas.NewAnalyzer(config, nil)
|
|
||||||
analyzer.AddRule(NewBlacklist_crypto_rc4(config))
|
|
||||||
analyzer.AddRule(NewUsesWeakCryptography(config))
|
|
||||||
|
|
||||||
issues := gasTestRunner(`
|
|
||||||
package main
|
|
||||||
|
|
||||||
import (
|
|
||||||
"crypto/rc4"
|
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
|
||||||
)
|
|
||||||
|
|
||||||
func main() {
|
|
||||||
cipher, err := rc4.NewCipher([]byte("sekritz"))
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
|
|
||||||
plaintext := []byte("I CAN HAZ SEKRIT MSG PLZ")
|
|
||||||
ciphertext := make([]byte, len(plaintext))
|
|
||||||
cipher.XORKeyStream(ciphertext, plaintext)
|
|
||||||
fmt.Println("Secret message is: %s", hex.EncodeToString(ciphertext))
|
|
||||||
}
|
|
||||||
`, analyzer)
|
|
||||||
|
|
||||||
checkTestResults(t, issues, 2, "weak cryptographic")
|
|
||||||
}
|
|
12
testutils/log.go
Normal file
12
testutils/log.go
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
package testutils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"log"
|
||||||
|
)
|
||||||
|
|
||||||
|
// NewLogger returns a logger and the buffer that it will be written to
|
||||||
|
func NewLogger() (*log.Logger, *bytes.Buffer) {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
return log.New(&buf, "", log.Lshortfile), &buf
|
||||||
|
}
|
133
testutils/pkg.go
Normal file
133
testutils/pkg.go
Normal file
|
@ -0,0 +1,133 @@
|
||||||
|
package testutils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"go/build"
|
||||||
|
"go/parser"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
"golang.org/x/tools/go/loader"
|
||||||
|
)
|
||||||
|
|
||||||
|
type buildObj struct {
|
||||||
|
pkg *build.Package
|
||||||
|
config loader.Config
|
||||||
|
program *loader.Program
|
||||||
|
}
|
||||||
|
|
||||||
|
// TestPackage is a mock package for testing purposes
|
||||||
|
type TestPackage struct {
|
||||||
|
Path string
|
||||||
|
Files map[string]string
|
||||||
|
ondisk bool
|
||||||
|
build *buildObj
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewTestPackage will create a new and empty package. Must call Close() to cleanup
|
||||||
|
// auxilary files
|
||||||
|
func NewTestPackage() *TestPackage {
|
||||||
|
// Files must exist in $GOPATH
|
||||||
|
sourceDir := path.Join(os.Getenv("GOPATH"), "src")
|
||||||
|
workingDir, err := ioutil.TempDir(sourceDir, "gas_test")
|
||||||
|
if err != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
return &TestPackage{
|
||||||
|
Path: workingDir,
|
||||||
|
Files: make(map[string]string),
|
||||||
|
ondisk: false,
|
||||||
|
build: nil,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddFile inserts the filename and contents into the package contents
|
||||||
|
func (p *TestPackage) AddFile(filename, content string) {
|
||||||
|
p.Files[path.Join(p.Path, filename)] = content
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *TestPackage) write() error {
|
||||||
|
if p.ondisk {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
for filename, content := range p.Files {
|
||||||
|
if e := ioutil.WriteFile(filename, []byte(content), 0644); e != nil {
|
||||||
|
return e
|
||||||
|
}
|
||||||
|
}
|
||||||
|
p.ondisk = true
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build ensures all files are persisted to disk and built
|
||||||
|
func (p *TestPackage) Build() error {
|
||||||
|
if p.build != nil {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
if err := p.write(); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
basePackage, err := build.Default.ImportDir(p.Path, build.ImportComment)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
var packageFiles []string
|
||||||
|
packageConfig := loader.Config{Build: &build.Default, ParserMode: parser.ParseComments}
|
||||||
|
for _, filename := range basePackage.GoFiles {
|
||||||
|
packageFiles = append(packageFiles, path.Join(p.Path, filename))
|
||||||
|
}
|
||||||
|
|
||||||
|
packageConfig.CreateFromFilenames(basePackage.Name, packageFiles...)
|
||||||
|
program, err := packageConfig.Load()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
p.build = &buildObj{
|
||||||
|
pkg: basePackage,
|
||||||
|
config: packageConfig,
|
||||||
|
program: program,
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// CreateContext builds a context out of supplied package context
|
||||||
|
func (p *TestPackage) CreateContext(filename string) *gas.Context {
|
||||||
|
if err := p.Build(); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, pkg := range p.build.program.Created {
|
||||||
|
for _, file := range pkg.Files {
|
||||||
|
pkgFile := p.build.program.Fset.File(file.Pos()).Name()
|
||||||
|
strip := fmt.Sprintf("%s%c", p.Path, os.PathSeparator)
|
||||||
|
pkgFile = strings.TrimPrefix(pkgFile, strip)
|
||||||
|
if pkgFile == filename {
|
||||||
|
ctx := &gas.Context{
|
||||||
|
FileSet: p.build.program.Fset,
|
||||||
|
Root: file,
|
||||||
|
Config: gas.NewConfig(),
|
||||||
|
Info: &pkg.Info,
|
||||||
|
Pkg: pkg.Pkg,
|
||||||
|
Imports: gas.NewImportTracker(),
|
||||||
|
}
|
||||||
|
ctx.Imports.TrackPackages(ctx.Pkg.Imports()...)
|
||||||
|
return ctx
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Close will delete the package and all files in that directory
|
||||||
|
func (p *TestPackage) Close() {
|
||||||
|
if p.ondisk {
|
||||||
|
os.RemoveAll(p.Path)
|
||||||
|
}
|
||||||
|
}
|
652
testutils/source.go
Normal file
652
testutils/source.go
Normal file
|
@ -0,0 +1,652 @@
|
||||||
|
package testutils
|
||||||
|
|
||||||
|
// CodeSample encapsulates a snippet of source code that compiles, and how many errors should be detected
|
||||||
|
type CodeSample struct {
|
||||||
|
Code string
|
||||||
|
Errors int
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
// SampleCodeG101 code snippets for hardcoded credentials
|
||||||
|
SampleCodeG101 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
func main() {
|
||||||
|
username := "admin"
|
||||||
|
password := "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
fmt.Println("Doing something with: ", username, password)
|
||||||
|
}`, 1}, {`
|
||||||
|
// Entropy check should not report this error by default
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
func main() {
|
||||||
|
username := "admin"
|
||||||
|
password := "secret"
|
||||||
|
fmt.Println("Doing something with: ", username, password)
|
||||||
|
}`, 0}, {`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
var password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
func main() {
|
||||||
|
username := "admin"
|
||||||
|
fmt.Println("Doing something with: ", username, password)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
const password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
func main() {
|
||||||
|
username := "admin"
|
||||||
|
fmt.Println("Doing something with: ", username, password)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
const (
|
||||||
|
username = "user"
|
||||||
|
password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
fmt.Println("Doing something with: ", username, password)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
var password string
|
||||||
|
func init() {
|
||||||
|
password = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
const (
|
||||||
|
ATNStateSomethingElse = 1
|
||||||
|
ATNStateTokenStart = 42
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
println(ATNStateTokenStart)
|
||||||
|
}`, 0}, {`
|
||||||
|
package main
|
||||||
|
const (
|
||||||
|
ATNStateTokenStart = "f62e5bcda4fae4f82370da0c6f20697b8f8447ef"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
println(ATNStateTokenStart)
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG102 code snippets for network binding
|
||||||
|
SampleCodeG102 = []CodeSample{
|
||||||
|
// Bind to all networks explicitly
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"net"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
l, err := net.Listen("tcp", "0.0.0.0:2000")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer l.Close()
|
||||||
|
}`, 1},
|
||||||
|
|
||||||
|
// Bind to all networks implicitly (default if host omitted)
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"net"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
l, err := net.Listen("tcp", ":2000")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer l.Close()
|
||||||
|
}`, 1},
|
||||||
|
}
|
||||||
|
// SampleCodeG103 find instances of unsafe blocks for auditing purposes
|
||||||
|
SampleCodeG103 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"unsafe"
|
||||||
|
)
|
||||||
|
type Fake struct{}
|
||||||
|
func (Fake) Good() {}
|
||||||
|
func main() {
|
||||||
|
unsafeM := Fake{}
|
||||||
|
unsafeM.Good()
|
||||||
|
intArray := [...]int{1, 2}
|
||||||
|
fmt.Printf("\nintArray: %v\n", intArray)
|
||||||
|
intPtr := &intArray[0]
|
||||||
|
fmt.Printf("\nintPtr=%p, *intPtr=%d.\n", intPtr, *intPtr)
|
||||||
|
addressHolder := uintptr(unsafe.Pointer(intPtr)) + unsafe.Sizeof(intArray[0])
|
||||||
|
intPtr = (*int)(unsafe.Pointer(addressHolder))
|
||||||
|
fmt.Printf("\nintPtr=%p, *intPtr=%d.\n\n", intPtr, *intPtr)
|
||||||
|
}`, 3}}
|
||||||
|
|
||||||
|
// SampleCodeG104 finds errors that aren't being handled
|
||||||
|
SampleCodeG104 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
func test() (int,error) {
|
||||||
|
return 0, nil
|
||||||
|
}
|
||||||
|
func main() {
|
||||||
|
v, _ := test()
|
||||||
|
fmt.Println(v)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
func a() error {
|
||||||
|
return fmt.Errorf("This is an error")
|
||||||
|
}
|
||||||
|
func b() {
|
||||||
|
fmt.Println("b")
|
||||||
|
ioutil.WriteFile("foo.txt", []byte("bar"), os.ModeExclusive)
|
||||||
|
}
|
||||||
|
func c() string {
|
||||||
|
return fmt.Sprintf("This isn't anything")
|
||||||
|
}
|
||||||
|
func main() {
|
||||||
|
_ = a()
|
||||||
|
a()
|
||||||
|
b()
|
||||||
|
c()
|
||||||
|
}`, 3}, {`
|
||||||
|
package main
|
||||||
|
import "fmt"
|
||||||
|
func test() error {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
func main() {
|
||||||
|
e := test()
|
||||||
|
fmt.Println(e)
|
||||||
|
}`, 0}}
|
||||||
|
|
||||||
|
// SampleCodeG105 - bignum overflow
|
||||||
|
SampleCodeG105 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"math/big"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
z := new(big.Int)
|
||||||
|
x := new(big.Int)
|
||||||
|
x = x.SetUint64(2)
|
||||||
|
y := new(big.Int)
|
||||||
|
y = y.SetUint64(4)
|
||||||
|
m := new(big.Int)
|
||||||
|
m = m.SetUint64(0)
|
||||||
|
z = z.Exp(x, y, m)
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG201 - SQL injection via format string
|
||||||
|
SampleCodeG201 = []CodeSample{
|
||||||
|
{`
|
||||||
|
// Format string without proper quoting
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
//_ "github.com/mattn/go-sqlite3"
|
||||||
|
)
|
||||||
|
|
||||||
|
func main(){
|
||||||
|
db, err := sql.Open("sqlite3", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
q := fmt.Sprintf("SELECT * FROM foo where name = '%s'", os.Args[1])
|
||||||
|
rows, err := db.Query(q)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
}`, 1}, {
|
||||||
|
`
|
||||||
|
// Format string false positive
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
//_ "github.com/mattn/go-sqlite3"
|
||||||
|
)
|
||||||
|
var staticQuery = "SELECT * FROM foo WHERE age < 32"
|
||||||
|
func main(){
|
||||||
|
db, err := sql.Open("sqlite3", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
rows, err := db.Query(staticQuery)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
}`, 0}}
|
||||||
|
|
||||||
|
// SampleCodeG202 - SQL query string building via string concatenation
|
||||||
|
SampleCodeG202 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
//_ "github.com/mattn/go-sqlite3"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
func main(){
|
||||||
|
db, err := sql.Open("sqlite3", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
rows, err := db.Query("SELECT * FROM foo WHERE name = " + os.Args[1])
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
}`, 1}, {`
|
||||||
|
// false positive
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
//_ "github.com/mattn/go-sqlite3"
|
||||||
|
)
|
||||||
|
var staticQuery = "SELECT * FROM foo WHERE age < "
|
||||||
|
func main(){
|
||||||
|
db, err := sql.Open("sqlite3", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
rows, err := db.Query(staticQuery + "32")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
}`, 0}, {`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
//_ "github.com/mattn/go-sqlite3"
|
||||||
|
)
|
||||||
|
const age = "32"
|
||||||
|
var staticQuery = "SELECT * FROM foo WHERE age < "
|
||||||
|
func main(){
|
||||||
|
db, err := sql.Open("sqlite3", ":memory:")
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
rows, err := db.Query(staticQuery + age)
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
}
|
||||||
|
`, 0}}
|
||||||
|
|
||||||
|
// SampleCodeG203 - Template checks
|
||||||
|
SampleCodeG203 = []CodeSample{
|
||||||
|
{`
|
||||||
|
// We assume that hardcoded template strings are safe as the programmer would
|
||||||
|
// need to be explicitly shooting themselves in the foot (as below)
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
const tmpl = ""
|
||||||
|
func main() {
|
||||||
|
t := template.Must(template.New("ex").Parse(tmpl))
|
||||||
|
v := map[string]interface{}{
|
||||||
|
"Title": "Test <b>World</b>",
|
||||||
|
"Body": template.HTML("<script>alert(1)</script>"),
|
||||||
|
}
|
||||||
|
t.Execute(os.Stdout, v)
|
||||||
|
}`, 0}, {
|
||||||
|
`
|
||||||
|
// Using a variable to initialize could potentially be dangerous. Under the
|
||||||
|
// current model this will likely produce some false positives.
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
const tmpl = ""
|
||||||
|
func main() {
|
||||||
|
a := "something from another place"
|
||||||
|
t := template.Must(template.New("ex").Parse(tmpl))
|
||||||
|
v := map[string]interface{}{
|
||||||
|
"Title": "Test <b>World</b>",
|
||||||
|
"Body": template.HTML(a),
|
||||||
|
}
|
||||||
|
t.Execute(os.Stdout, v)
|
||||||
|
}`, 1}, {
|
||||||
|
`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
const tmpl = ""
|
||||||
|
func main() {
|
||||||
|
a := "something from another place"
|
||||||
|
t := template.Must(template.New("ex").Parse(tmpl))
|
||||||
|
v := map[string]interface{}{
|
||||||
|
"Title": "Test <b>World</b>",
|
||||||
|
"Body": template.JS(a),
|
||||||
|
}
|
||||||
|
t.Execute(os.Stdout, v)
|
||||||
|
}`, 1}, {
|
||||||
|
`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"html/template"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
const tmpl = ""
|
||||||
|
func main() {
|
||||||
|
a := "something from another place"
|
||||||
|
t := template.Must(template.New("ex").Parse(tmpl))
|
||||||
|
v := map[string]interface{}{
|
||||||
|
"Title": "Test <b>World</b>",
|
||||||
|
"Body": template.URL(a),
|
||||||
|
}
|
||||||
|
t.Execute(os.Stdout, v)
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG204 - Subprocess auditing
|
||||||
|
SampleCodeG204 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import "syscall"
|
||||||
|
func main() {
|
||||||
|
syscall.Exec("/bin/cat", []string{ "/etc/passwd" }, nil)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os/exec"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
cmd := exec.Command("sleep", "5")
|
||||||
|
err := cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
log.Printf("Waiting for command to finish...")
|
||||||
|
err = cmd.Wait()
|
||||||
|
log.Printf("Command finished with error: %v", err)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
run := "sleep" + os.Getenv("SOMETHING")
|
||||||
|
cmd := exec.Command(run, "5")
|
||||||
|
err := cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
log.Printf("Waiting for command to finish...")
|
||||||
|
err = cmd.Wait()
|
||||||
|
log.Printf("Command finished with error: %v", err)
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG301 - mkdir permission check
|
||||||
|
SampleCodeG301 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import "os"
|
||||||
|
func main() {
|
||||||
|
os.Mkdir("/tmp/mydir", 0777)
|
||||||
|
os.Mkdir("/tmp/mydir", 0600)
|
||||||
|
os.MkdirAll("/tmp/mydir/mysubidr", 0775)
|
||||||
|
}`, 2}}
|
||||||
|
|
||||||
|
// SampleCodeG302 - file create / chmod permissions check
|
||||||
|
SampleCodeG302 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import "os"
|
||||||
|
func main() {
|
||||||
|
os.Chmod("/tmp/somefile", 0777)
|
||||||
|
os.Chmod("/tmp/someotherfile", 0600)
|
||||||
|
os.OpenFile("/tmp/thing", os.O_CREATE|os.O_WRONLY, 0666)
|
||||||
|
os.OpenFile("/tmp/thing", os.O_CREATE|os.O_WRONLY, 0600)
|
||||||
|
}`, 2}}
|
||||||
|
|
||||||
|
// SampleCodeG303 - bad tempfile permissions & hardcoded shared path
|
||||||
|
SampleCodeG303 = []CodeSample{{`
|
||||||
|
package samples
|
||||||
|
import (
|
||||||
|
"io/ioutil"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
file1, _ := os.Create("/tmp/demo1")
|
||||||
|
defer file1.Close()
|
||||||
|
ioutil.WriteFile("/tmp/demo2", []byte("This is some data"), 0644)
|
||||||
|
}`, 2}}
|
||||||
|
|
||||||
|
// SampleCodeG401 - Use of weak crypto MD5
|
||||||
|
SampleCodeG401 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
f, err := os.Open("file.txt")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
|
||||||
|
h := md5.New()
|
||||||
|
if _, err := io.Copy(h, f); err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Printf("%x", h.Sum(nil))
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG402 - TLS settings
|
||||||
|
SampleCodeG402 = []CodeSample{{`
|
||||||
|
// InsecureSkipVerify
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
tr := &http.Transport{
|
||||||
|
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
|
||||||
|
}
|
||||||
|
|
||||||
|
client := &http.Client{Transport: tr}
|
||||||
|
_, err := client.Get("https://golang.org/")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
}`, 1}, {
|
||||||
|
`
|
||||||
|
// Insecure minimum version
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
tr := &http.Transport{
|
||||||
|
TLSClientConfig: &tls.Config{MinVersion: 0},
|
||||||
|
}
|
||||||
|
client := &http.Client{Transport: tr}
|
||||||
|
_, err := client.Get("https://golang.org/")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
}`, 1}, {`
|
||||||
|
// Insecure max version
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
tr := &http.Transport{
|
||||||
|
TLSClientConfig: &tls.Config{MaxVersion: 0},
|
||||||
|
}
|
||||||
|
client := &http.Client{Transport: tr}
|
||||||
|
_, err := client.Get("https://golang.org/")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`, 1}, {
|
||||||
|
`
|
||||||
|
// Insecure ciphersuite selection
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"fmt"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
tr := &http.Transport{
|
||||||
|
TLSClientConfig: &tls.Config{CipherSuites: []uint16{
|
||||||
|
tls.TLS_RSA_WITH_RC4_128_SHA,
|
||||||
|
tls.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
|
||||||
|
},},
|
||||||
|
}
|
||||||
|
client := &http.Client{Transport: tr}
|
||||||
|
_, err := client.Get("https://golang.org/")
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG403 - weak key strength
|
||||||
|
SampleCodeG403 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/rsa"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
//Generate Private Key
|
||||||
|
pvk, err := rsa.GenerateKey(rand.Reader, 1024)
|
||||||
|
if err != nil {
|
||||||
|
fmt.Println(err)
|
||||||
|
}
|
||||||
|
fmt.Println(pvk)
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG404 - weak random number
|
||||||
|
SampleCodeG404 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import "crypto/rand"
|
||||||
|
func main() {
|
||||||
|
good, _ := rand.Read(nil)
|
||||||
|
println(good)
|
||||||
|
}`, 0}, {`
|
||||||
|
package main
|
||||||
|
import "math/rand"
|
||||||
|
func main() {
|
||||||
|
bad := rand.Int()
|
||||||
|
println(bad)
|
||||||
|
}`, 1}, {`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/rand"
|
||||||
|
mrand "math/rand"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
good, _ := rand.Read(nil)
|
||||||
|
println(good)
|
||||||
|
i := mrand.Int31()
|
||||||
|
println(i)
|
||||||
|
}`, 0}}
|
||||||
|
|
||||||
|
// SampleCodeG501 - Blacklisted import MD5
|
||||||
|
SampleCodeG501 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/md5"
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
for _, arg := range os.Args {
|
||||||
|
fmt.Printf("%x - %s\n", md5.Sum([]byte(arg)), arg)
|
||||||
|
}
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG502 - Blacklisted import DES
|
||||||
|
SampleCodeG502 = []CodeSample{
|
||||||
|
{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/cipher"
|
||||||
|
"crypto/des"
|
||||||
|
"crypto/rand"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
block, err := des.NewCipher([]byte("sekritz"))
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
plaintext := []byte("I CAN HAZ SEKRIT MSG PLZ")
|
||||||
|
ciphertext := make([]byte, des.BlockSize+len(plaintext))
|
||||||
|
iv := ciphertext[:des.BlockSize]
|
||||||
|
if _, err := io.ReadFull(rand.Reader, iv); err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
stream := cipher.NewCFBEncrypter(block, iv)
|
||||||
|
stream.XORKeyStream(ciphertext[des.BlockSize:], plaintext)
|
||||||
|
fmt.Println("Secret message is: %s", hex.EncodeToString(ciphertext))
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG503 - Blacklisted import RC4
|
||||||
|
SampleCodeG503 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"crypto/rc4"
|
||||||
|
"encoding/hex"
|
||||||
|
"fmt"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
cipher, err := rc4.NewCipher([]byte("sekritz"))
|
||||||
|
if err != nil {
|
||||||
|
panic(err)
|
||||||
|
}
|
||||||
|
plaintext := []byte("I CAN HAZ SEKRIT MSG PLZ")
|
||||||
|
ciphertext := make([]byte, len(plaintext))
|
||||||
|
cipher.XORKeyStream(ciphertext, plaintext)
|
||||||
|
fmt.Println("Secret message is: %s", hex.EncodeToString(ciphertext))
|
||||||
|
}`, 1}}
|
||||||
|
|
||||||
|
// SampleCodeG504 - Blacklisted import CGI
|
||||||
|
SampleCodeG504 = []CodeSample{{`
|
||||||
|
package main
|
||||||
|
import (
|
||||||
|
"net/http/cgi"
|
||||||
|
"net/http"
|
||||||
|
)
|
||||||
|
func main() {
|
||||||
|
cgi.Serve(http.FileServer(http.Dir("/usr/share/doc")))
|
||||||
|
}`, 1}}
|
||||||
|
)
|
28
testutils/visitor.go
Normal file
28
testutils/visitor.go
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
package testutils
|
||||||
|
|
||||||
|
import (
|
||||||
|
"go/ast"
|
||||||
|
|
||||||
|
"github.com/GoASTScanner/gas"
|
||||||
|
)
|
||||||
|
|
||||||
|
// MockVisitor is useful for stubbing out ast.Visitor with callback
|
||||||
|
// and looking for specific conditions to exist.
|
||||||
|
type MockVisitor struct {
|
||||||
|
Context *gas.Context
|
||||||
|
Callback func(n ast.Node, ctx *gas.Context) bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewMockVisitor creates a new empty struct, the Context and
|
||||||
|
// Callback must be set manually. See call_list_test.go for an example.
|
||||||
|
func NewMockVisitor() *MockVisitor {
|
||||||
|
return &MockVisitor{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Visit satisfies the ast.Visitor interface
|
||||||
|
func (v *MockVisitor) Visit(n ast.Node) ast.Visitor {
|
||||||
|
if v.Callback(n, v.Context) {
|
||||||
|
return v
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
|
@ -1,7 +0,0 @@
|
||||||
# package
|
|
||||||
github.com/GoAstScanner/gas
|
|
||||||
|
|
||||||
# import
|
|
||||||
github.com/GoASTScanner/gas cc52ef5
|
|
||||||
github.com/nbutton23/zxcvbn-go a22cb81
|
|
||||||
github.com/ryanuber/go-glob v0.1
|
|
2
vendor/github.com/nbutton23/zxcvbn-go/.gitignore
generated
vendored
2
vendor/github.com/nbutton23/zxcvbn-go/.gitignore
generated
vendored
|
@ -1,2 +0,0 @@
|
||||||
zxcvbn
|
|
||||||
debug.test
|
|
20
vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt
generated
vendored
20
vendor/github.com/nbutton23/zxcvbn-go/LICENSE.txt
generated
vendored
|
@ -1,20 +0,0 @@
|
||||||
Copyright (c) Nathan Button
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining
|
|
||||||
a copy of this software and associated documentation files (the
|
|
||||||
"Software"), to deal in the Software without restriction, including
|
|
||||||
without limitation the rights to use, copy, modify, merge, publish,
|
|
||||||
distribute, sublicense, and/or sell copies of the Software, and to
|
|
||||||
permit persons to whom the Software is furnished to do so, subject to
|
|
||||||
the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be
|
|
||||||
included in all copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
|
||||||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
|
||||||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
|
||||||
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
|
||||||
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
|
||||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
|
||||||
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
78
vendor/github.com/nbutton23/zxcvbn-go/README.md
generated
vendored
78
vendor/github.com/nbutton23/zxcvbn-go/README.md
generated
vendored
|
@ -1,78 +0,0 @@
|
||||||
This is a goLang port of python-zxcvbn and zxcvbn, which are python and JavaScript password strength
|
|
||||||
generators. zxcvbn attempts to give sound password advice through pattern
|
|
||||||
matching and conservative entropy calculations. It finds 10k common passwords,
|
|
||||||
common American names and surnames, common English words, and common patterns
|
|
||||||
like dates, repeats (aaa), sequences (abcd), and QWERTY patterns.
|
|
||||||
|
|
||||||
Please refer to http://tech.dropbox.com/?p=165 for the full details and
|
|
||||||
motivation behind zxcbvn. The source code for the original JavaScript (well,
|
|
||||||
actually CoffeeScript) implementation can be found at:
|
|
||||||
|
|
||||||
https://github.com/lowe/zxcvbn
|
|
||||||
|
|
||||||
Python at:
|
|
||||||
|
|
||||||
https://github.com/dropbox/python-zxcvbn
|
|
||||||
|
|
||||||
For full motivation, see:
|
|
||||||
|
|
||||||
http://tech.dropbox.com/?p=165
|
|
||||||
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
Use
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
|
|
||||||
The zxcvbn module has the public method PasswordStrength() function. Import zxcvbn, and
|
|
||||||
call PasswordStrength(password string, userInputs []string). The function will return a
|
|
||||||
result dictionary with the following keys:
|
|
||||||
|
|
||||||
Entropy # bits
|
|
||||||
|
|
||||||
CrackTime # estimation of actual crack time, in seconds.
|
|
||||||
|
|
||||||
CrackTimeDisplay # same crack time, as a friendlier string:
|
|
||||||
# "instant", "6 minutes", "centuries", etc.
|
|
||||||
|
|
||||||
Score # [0,1,2,3,4] if crack time is less than
|
|
||||||
# [10^2, 10^4, 10^6, 10^8, Infinity].
|
|
||||||
# (useful for implementing a strength bar.)
|
|
||||||
|
|
||||||
MatchSequence # the list of patterns that zxcvbn based the
|
|
||||||
# entropy calculation on.
|
|
||||||
|
|
||||||
CalcTime # how long it took to calculate an answer,
|
|
||||||
# in milliseconds. usually only a few ms.
|
|
||||||
|
|
||||||
The userInputs argument is an splice of strings that zxcvbn
|
|
||||||
will add to its internal dictionary. This can be whatever list of
|
|
||||||
strings you like, but is meant for user inputs from other fields of the
|
|
||||||
form, like name and email. That way a password that includes the user's
|
|
||||||
personal info can be heavily penalized. This list is also good for
|
|
||||||
site-specific vocabulary.
|
|
||||||
|
|
||||||
Bug reports and pull requests welcome!
|
|
||||||
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
Project Status
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Use zxcvbn_test.go to check how close to feature parity the project is.
|
|
||||||
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
Acknowledgment
|
|
||||||
------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Thanks to Dan Wheeler (https://github.com/lowe) for the CoffeeScript implementation
|
|
||||||
(see above.) To repeat his outside acknowledgements (which remain useful, as always):
|
|
||||||
|
|
||||||
Many thanks to Mark Burnett for releasing his 10k top passwords list:
|
|
||||||
http://xato.net/passwords/more-top-worst-passwords
|
|
||||||
and for his 2006 book,
|
|
||||||
"Perfect Passwords: Selection, Protection, Authentication"
|
|
||||||
|
|
||||||
Huge thanks to Wiktionary contributors for building a frequency list
|
|
||||||
of English as used in television and movies:
|
|
||||||
http://en.wiktionary.org/wiki/Wiktionary:Frequency_lists
|
|
||||||
|
|
||||||
Last but not least, big thanks to xkcd :)
|
|
||||||
https://xkcd.com/936/
|
|
96
vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go
generated
vendored
96
vendor/github.com/nbutton23/zxcvbn-go/adjacency/adjcmartix.go
generated
vendored
|
@ -1,96 +0,0 @@
|
||||||
package adjacency
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"log"
|
|
||||||
// "fmt"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/data"
|
|
||||||
)
|
|
||||||
|
|
||||||
type AdjacencyGraph struct {
|
|
||||||
Graph map[string][]string
|
|
||||||
averageDegree float64
|
|
||||||
Name string
|
|
||||||
}
|
|
||||||
|
|
||||||
var AdjacencyGph = make(map[string]AdjacencyGraph)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
AdjacencyGph["qwerty"] = BuildQwerty()
|
|
||||||
AdjacencyGph["dvorak"] = BuildDvorak()
|
|
||||||
AdjacencyGph["keypad"] = BuildKeypad()
|
|
||||||
AdjacencyGph["macKeypad"] = BuildMacKeypad()
|
|
||||||
AdjacencyGph["l33t"] = BuildLeet()
|
|
||||||
}
|
|
||||||
|
|
||||||
func BuildQwerty() AdjacencyGraph {
|
|
||||||
data, err := zxcvbn_data.Asset("data/Qwerty.json")
|
|
||||||
if err != nil {
|
|
||||||
panic("Can't find asset")
|
|
||||||
}
|
|
||||||
return GetAdjancencyGraphFromFile(data, "qwerty")
|
|
||||||
}
|
|
||||||
func BuildDvorak() AdjacencyGraph {
|
|
||||||
data, err := zxcvbn_data.Asset("data/Dvorak.json")
|
|
||||||
if err != nil {
|
|
||||||
panic("Can't find asset")
|
|
||||||
}
|
|
||||||
return GetAdjancencyGraphFromFile(data, "dvorak")
|
|
||||||
}
|
|
||||||
func BuildKeypad() AdjacencyGraph {
|
|
||||||
data, err := zxcvbn_data.Asset("data/Keypad.json")
|
|
||||||
if err != nil {
|
|
||||||
panic("Can't find asset")
|
|
||||||
}
|
|
||||||
return GetAdjancencyGraphFromFile(data, "keypad")
|
|
||||||
}
|
|
||||||
func BuildMacKeypad() AdjacencyGraph {
|
|
||||||
data, err := zxcvbn_data.Asset("data/MacKeypad.json")
|
|
||||||
if err != nil {
|
|
||||||
panic("Can't find asset")
|
|
||||||
}
|
|
||||||
return GetAdjancencyGraphFromFile(data, "mac_keypad")
|
|
||||||
}
|
|
||||||
func BuildLeet() AdjacencyGraph {
|
|
||||||
data, err := zxcvbn_data.Asset("data/L33t.json")
|
|
||||||
if err != nil {
|
|
||||||
panic("Can't find asset")
|
|
||||||
}
|
|
||||||
return GetAdjancencyGraphFromFile(data, "keypad")
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetAdjancencyGraphFromFile(data []byte, name string) AdjacencyGraph {
|
|
||||||
|
|
||||||
var graph AdjacencyGraph
|
|
||||||
err := json.Unmarshal(data, &graph)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
graph.Name = name
|
|
||||||
return graph
|
|
||||||
}
|
|
||||||
|
|
||||||
//on qwerty, 'g' has degree 6, being adjacent to 'ftyhbv'. '\' has degree 1.
|
|
||||||
//this calculates the average over all keys.
|
|
||||||
//TODO double check that i ported this correctly scoring.coffee ln 5
|
|
||||||
func (adjGrp AdjacencyGraph) CalculateAvgDegree() float64 {
|
|
||||||
if adjGrp.averageDegree != float64(0) {
|
|
||||||
return adjGrp.averageDegree
|
|
||||||
}
|
|
||||||
var avg float64
|
|
||||||
var count float64
|
|
||||||
for _, value := range adjGrp.Graph {
|
|
||||||
|
|
||||||
for _, char := range value {
|
|
||||||
if char != "" || char != " " {
|
|
||||||
avg += float64(len(char))
|
|
||||||
count++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
adjGrp.averageDegree = avg / count
|
|
||||||
|
|
||||||
return adjGrp.averageDegree
|
|
||||||
}
|
|
444
vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go
generated
vendored
444
vendor/github.com/nbutton23/zxcvbn-go/data/bindata.go
generated
vendored
File diff suppressed because one or more lines are too long
756
vendor/github.com/nbutton23/zxcvbn-go/data/data/Dvorak.json
generated
vendored
756
vendor/github.com/nbutton23/zxcvbn-go/data/data/Dvorak.json
generated
vendored
|
@ -1,756 +0,0 @@
|
||||||
{
|
|
||||||
"Graph": {
|
|
||||||
"0": [
|
|
||||||
"9(",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"[{",
|
|
||||||
"lL",
|
|
||||||
"rR"
|
|
||||||
],
|
|
||||||
"1": [
|
|
||||||
"`~",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"2@",
|
|
||||||
"'\"",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"2": [
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3#",
|
|
||||||
",<",
|
|
||||||
"'\""
|
|
||||||
],
|
|
||||||
"3": [
|
|
||||||
"2@",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4$",
|
|
||||||
".>",
|
|
||||||
",<"
|
|
||||||
],
|
|
||||||
"4": [
|
|
||||||
"3#",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"5%",
|
|
||||||
"pP",
|
|
||||||
".>"
|
|
||||||
],
|
|
||||||
"5": [
|
|
||||||
"4$",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"6^",
|
|
||||||
"yY",
|
|
||||||
"pP"
|
|
||||||
],
|
|
||||||
"6": [
|
|
||||||
"5%",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7&",
|
|
||||||
"fF",
|
|
||||||
"yY"
|
|
||||||
],
|
|
||||||
"7": [
|
|
||||||
"6^",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"8*",
|
|
||||||
"gG",
|
|
||||||
"fF"
|
|
||||||
],
|
|
||||||
"8": [
|
|
||||||
"7&",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"9(",
|
|
||||||
"cC",
|
|
||||||
"gG"
|
|
||||||
],
|
|
||||||
"9": [
|
|
||||||
"8*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"0)",
|
|
||||||
"rR",
|
|
||||||
"cC"
|
|
||||||
],
|
|
||||||
"!": [
|
|
||||||
"`~",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"2@",
|
|
||||||
"'\"",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"\"": [
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
"2@",
|
|
||||||
",<",
|
|
||||||
"aA",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"#": [
|
|
||||||
"2@",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4$",
|
|
||||||
".>",
|
|
||||||
",<"
|
|
||||||
],
|
|
||||||
"$": [
|
|
||||||
"3#",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"5%",
|
|
||||||
"pP",
|
|
||||||
".>"
|
|
||||||
],
|
|
||||||
"%": [
|
|
||||||
"4$",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"6^",
|
|
||||||
"yY",
|
|
||||||
"pP"
|
|
||||||
],
|
|
||||||
"&": [
|
|
||||||
"6^",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"8*",
|
|
||||||
"gG",
|
|
||||||
"fF"
|
|
||||||
],
|
|
||||||
"'": [
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
"2@",
|
|
||||||
",<",
|
|
||||||
"aA",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"(": [
|
|
||||||
"8*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"0)",
|
|
||||||
"rR",
|
|
||||||
"cC"
|
|
||||||
],
|
|
||||||
")": [
|
|
||||||
"9(",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"[{",
|
|
||||||
"lL",
|
|
||||||
"rR"
|
|
||||||
],
|
|
||||||
"*": [
|
|
||||||
"7&",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"9(",
|
|
||||||
"cC",
|
|
||||||
"gG"
|
|
||||||
],
|
|
||||||
"+": [
|
|
||||||
"/?",
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
"\\|",
|
|
||||||
null,
|
|
||||||
"-_"
|
|
||||||
],
|
|
||||||
",": [
|
|
||||||
"'\"",
|
|
||||||
"2@",
|
|
||||||
"3#",
|
|
||||||
".>",
|
|
||||||
"oO",
|
|
||||||
"aA"
|
|
||||||
],
|
|
||||||
"-": [
|
|
||||||
"sS",
|
|
||||||
"/?",
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"zZ"
|
|
||||||
],
|
|
||||||
".": [
|
|
||||||
",<",
|
|
||||||
"3#",
|
|
||||||
"4$",
|
|
||||||
"pP",
|
|
||||||
"eE",
|
|
||||||
"oO"
|
|
||||||
],
|
|
||||||
"/": [
|
|
||||||
"lL",
|
|
||||||
"[{",
|
|
||||||
"]}",
|
|
||||||
"=+",
|
|
||||||
"-_",
|
|
||||||
"sS"
|
|
||||||
],
|
|
||||||
":": [
|
|
||||||
null,
|
|
||||||
"aA",
|
|
||||||
"oO",
|
|
||||||
"qQ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
";": [
|
|
||||||
null,
|
|
||||||
"aA",
|
|
||||||
"oO",
|
|
||||||
"qQ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"<": [
|
|
||||||
"'\"",
|
|
||||||
"2@",
|
|
||||||
"3#",
|
|
||||||
".>",
|
|
||||||
"oO",
|
|
||||||
"aA"
|
|
||||||
],
|
|
||||||
"=": [
|
|
||||||
"/?",
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
"\\|",
|
|
||||||
null,
|
|
||||||
"-_"
|
|
||||||
],
|
|
||||||
">": [
|
|
||||||
",<",
|
|
||||||
"3#",
|
|
||||||
"4$",
|
|
||||||
"pP",
|
|
||||||
"eE",
|
|
||||||
"oO"
|
|
||||||
],
|
|
||||||
"?": [
|
|
||||||
"lL",
|
|
||||||
"[{",
|
|
||||||
"]}",
|
|
||||||
"=+",
|
|
||||||
"-_",
|
|
||||||
"sS"
|
|
||||||
],
|
|
||||||
"@": [
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3#",
|
|
||||||
",<",
|
|
||||||
"'\""
|
|
||||||
],
|
|
||||||
"A": [
|
|
||||||
null,
|
|
||||||
"'\"",
|
|
||||||
",<",
|
|
||||||
"oO",
|
|
||||||
";:",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"B": [
|
|
||||||
"xX",
|
|
||||||
"dD",
|
|
||||||
"hH",
|
|
||||||
"mM",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"C": [
|
|
||||||
"gG",
|
|
||||||
"8*",
|
|
||||||
"9(",
|
|
||||||
"rR",
|
|
||||||
"tT",
|
|
||||||
"hH"
|
|
||||||
],
|
|
||||||
"D": [
|
|
||||||
"iI",
|
|
||||||
"fF",
|
|
||||||
"gG",
|
|
||||||
"hH",
|
|
||||||
"bB",
|
|
||||||
"xX"
|
|
||||||
],
|
|
||||||
"E": [
|
|
||||||
"oO",
|
|
||||||
".>",
|
|
||||||
"pP",
|
|
||||||
"uU",
|
|
||||||
"jJ",
|
|
||||||
"qQ"
|
|
||||||
],
|
|
||||||
"F": [
|
|
||||||
"yY",
|
|
||||||
"6^",
|
|
||||||
"7&",
|
|
||||||
"gG",
|
|
||||||
"dD",
|
|
||||||
"iI"
|
|
||||||
],
|
|
||||||
"G": [
|
|
||||||
"fF",
|
|
||||||
"7&",
|
|
||||||
"8*",
|
|
||||||
"cC",
|
|
||||||
"hH",
|
|
||||||
"dD"
|
|
||||||
],
|
|
||||||
"H": [
|
|
||||||
"dD",
|
|
||||||
"gG",
|
|
||||||
"cC",
|
|
||||||
"tT",
|
|
||||||
"mM",
|
|
||||||
"bB"
|
|
||||||
],
|
|
||||||
"I": [
|
|
||||||
"uU",
|
|
||||||
"yY",
|
|
||||||
"fF",
|
|
||||||
"dD",
|
|
||||||
"xX",
|
|
||||||
"kK"
|
|
||||||
],
|
|
||||||
"J": [
|
|
||||||
"qQ",
|
|
||||||
"eE",
|
|
||||||
"uU",
|
|
||||||
"kK",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"K": [
|
|
||||||
"jJ",
|
|
||||||
"uU",
|
|
||||||
"iI",
|
|
||||||
"xX",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"L": [
|
|
||||||
"rR",
|
|
||||||
"0)",
|
|
||||||
"[{",
|
|
||||||
"/?",
|
|
||||||
"sS",
|
|
||||||
"nN"
|
|
||||||
],
|
|
||||||
"M": [
|
|
||||||
"bB",
|
|
||||||
"hH",
|
|
||||||
"tT",
|
|
||||||
"wW",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"N": [
|
|
||||||
"tT",
|
|
||||||
"rR",
|
|
||||||
"lL",
|
|
||||||
"sS",
|
|
||||||
"vV",
|
|
||||||
"wW"
|
|
||||||
],
|
|
||||||
"O": [
|
|
||||||
"aA",
|
|
||||||
",<",
|
|
||||||
".>",
|
|
||||||
"eE",
|
|
||||||
"qQ",
|
|
||||||
";:"
|
|
||||||
],
|
|
||||||
"P": [
|
|
||||||
".>",
|
|
||||||
"4$",
|
|
||||||
"5%",
|
|
||||||
"yY",
|
|
||||||
"uU",
|
|
||||||
"eE"
|
|
||||||
],
|
|
||||||
"Q": [
|
|
||||||
";:",
|
|
||||||
"oO",
|
|
||||||
"eE",
|
|
||||||
"jJ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"R": [
|
|
||||||
"cC",
|
|
||||||
"9(",
|
|
||||||
"0)",
|
|
||||||
"lL",
|
|
||||||
"nN",
|
|
||||||
"tT"
|
|
||||||
],
|
|
||||||
"S": [
|
|
||||||
"nN",
|
|
||||||
"lL",
|
|
||||||
"/?",
|
|
||||||
"-_",
|
|
||||||
"zZ",
|
|
||||||
"vV"
|
|
||||||
],
|
|
||||||
"T": [
|
|
||||||
"hH",
|
|
||||||
"cC",
|
|
||||||
"rR",
|
|
||||||
"nN",
|
|
||||||
"wW",
|
|
||||||
"mM"
|
|
||||||
],
|
|
||||||
"U": [
|
|
||||||
"eE",
|
|
||||||
"pP",
|
|
||||||
"yY",
|
|
||||||
"iI",
|
|
||||||
"kK",
|
|
||||||
"jJ"
|
|
||||||
],
|
|
||||||
"V": [
|
|
||||||
"wW",
|
|
||||||
"nN",
|
|
||||||
"sS",
|
|
||||||
"zZ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"W": [
|
|
||||||
"mM",
|
|
||||||
"tT",
|
|
||||||
"nN",
|
|
||||||
"vV",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"X": [
|
|
||||||
"kK",
|
|
||||||
"iI",
|
|
||||||
"dD",
|
|
||||||
"bB",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"Y": [
|
|
||||||
"pP",
|
|
||||||
"5%",
|
|
||||||
"6^",
|
|
||||||
"fF",
|
|
||||||
"iI",
|
|
||||||
"uU"
|
|
||||||
],
|
|
||||||
"Z": [
|
|
||||||
"vV",
|
|
||||||
"sS",
|
|
||||||
"-_",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"[": [
|
|
||||||
"0)",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"]}",
|
|
||||||
"/?",
|
|
||||||
"lL"
|
|
||||||
],
|
|
||||||
"\\": [
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"]": [
|
|
||||||
"[{",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"=+",
|
|
||||||
"/?"
|
|
||||||
],
|
|
||||||
"^": [
|
|
||||||
"5%",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7&",
|
|
||||||
"fF",
|
|
||||||
"yY"
|
|
||||||
],
|
|
||||||
"_": [
|
|
||||||
"sS",
|
|
||||||
"/?",
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"zZ"
|
|
||||||
],
|
|
||||||
"`": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"a": [
|
|
||||||
null,
|
|
||||||
"'\"",
|
|
||||||
",<",
|
|
||||||
"oO",
|
|
||||||
";:",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"xX",
|
|
||||||
"dD",
|
|
||||||
"hH",
|
|
||||||
"mM",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"c": [
|
|
||||||
"gG",
|
|
||||||
"8*",
|
|
||||||
"9(",
|
|
||||||
"rR",
|
|
||||||
"tT",
|
|
||||||
"hH"
|
|
||||||
],
|
|
||||||
"d": [
|
|
||||||
"iI",
|
|
||||||
"fF",
|
|
||||||
"gG",
|
|
||||||
"hH",
|
|
||||||
"bB",
|
|
||||||
"xX"
|
|
||||||
],
|
|
||||||
"e": [
|
|
||||||
"oO",
|
|
||||||
".>",
|
|
||||||
"pP",
|
|
||||||
"uU",
|
|
||||||
"jJ",
|
|
||||||
"qQ"
|
|
||||||
],
|
|
||||||
"f": [
|
|
||||||
"yY",
|
|
||||||
"6^",
|
|
||||||
"7&",
|
|
||||||
"gG",
|
|
||||||
"dD",
|
|
||||||
"iI"
|
|
||||||
],
|
|
||||||
"g": [
|
|
||||||
"fF",
|
|
||||||
"7&",
|
|
||||||
"8*",
|
|
||||||
"cC",
|
|
||||||
"hH",
|
|
||||||
"dD"
|
|
||||||
],
|
|
||||||
"h": [
|
|
||||||
"dD",
|
|
||||||
"gG",
|
|
||||||
"cC",
|
|
||||||
"tT",
|
|
||||||
"mM",
|
|
||||||
"bB"
|
|
||||||
],
|
|
||||||
"i": [
|
|
||||||
"uU",
|
|
||||||
"yY",
|
|
||||||
"fF",
|
|
||||||
"dD",
|
|
||||||
"xX",
|
|
||||||
"kK"
|
|
||||||
],
|
|
||||||
"j": [
|
|
||||||
"qQ",
|
|
||||||
"eE",
|
|
||||||
"uU",
|
|
||||||
"kK",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"k": [
|
|
||||||
"jJ",
|
|
||||||
"uU",
|
|
||||||
"iI",
|
|
||||||
"xX",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"l": [
|
|
||||||
"rR",
|
|
||||||
"0)",
|
|
||||||
"[{",
|
|
||||||
"/?",
|
|
||||||
"sS",
|
|
||||||
"nN"
|
|
||||||
],
|
|
||||||
"m": [
|
|
||||||
"bB",
|
|
||||||
"hH",
|
|
||||||
"tT",
|
|
||||||
"wW",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"n": [
|
|
||||||
"tT",
|
|
||||||
"rR",
|
|
||||||
"lL",
|
|
||||||
"sS",
|
|
||||||
"vV",
|
|
||||||
"wW"
|
|
||||||
],
|
|
||||||
"o": [
|
|
||||||
"aA",
|
|
||||||
",<",
|
|
||||||
".>",
|
|
||||||
"eE",
|
|
||||||
"qQ",
|
|
||||||
";:"
|
|
||||||
],
|
|
||||||
"p": [
|
|
||||||
".>",
|
|
||||||
"4$",
|
|
||||||
"5%",
|
|
||||||
"yY",
|
|
||||||
"uU",
|
|
||||||
"eE"
|
|
||||||
],
|
|
||||||
"q": [
|
|
||||||
";:",
|
|
||||||
"oO",
|
|
||||||
"eE",
|
|
||||||
"jJ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"r": [
|
|
||||||
"cC",
|
|
||||||
"9(",
|
|
||||||
"0)",
|
|
||||||
"lL",
|
|
||||||
"nN",
|
|
||||||
"tT"
|
|
||||||
],
|
|
||||||
"s": [
|
|
||||||
"nN",
|
|
||||||
"lL",
|
|
||||||
"/?",
|
|
||||||
"-_",
|
|
||||||
"zZ",
|
|
||||||
"vV"
|
|
||||||
],
|
|
||||||
"t": [
|
|
||||||
"hH",
|
|
||||||
"cC",
|
|
||||||
"rR",
|
|
||||||
"nN",
|
|
||||||
"wW",
|
|
||||||
"mM"
|
|
||||||
],
|
|
||||||
"u": [
|
|
||||||
"eE",
|
|
||||||
"pP",
|
|
||||||
"yY",
|
|
||||||
"iI",
|
|
||||||
"kK",
|
|
||||||
"jJ"
|
|
||||||
],
|
|
||||||
"v": [
|
|
||||||
"wW",
|
|
||||||
"nN",
|
|
||||||
"sS",
|
|
||||||
"zZ",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"w": [
|
|
||||||
"mM",
|
|
||||||
"tT",
|
|
||||||
"nN",
|
|
||||||
"vV",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"x": [
|
|
||||||
"kK",
|
|
||||||
"iI",
|
|
||||||
"dD",
|
|
||||||
"bB",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"y": [
|
|
||||||
"pP",
|
|
||||||
"5%",
|
|
||||||
"6^",
|
|
||||||
"fF",
|
|
||||||
"iI",
|
|
||||||
"uU"
|
|
||||||
],
|
|
||||||
"z": [
|
|
||||||
"vV",
|
|
||||||
"sS",
|
|
||||||
"-_",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"{": [
|
|
||||||
"0)",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"]}",
|
|
||||||
"/?",
|
|
||||||
"lL"
|
|
||||||
],
|
|
||||||
"|": [
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"}": [
|
|
||||||
"[{",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"=+",
|
|
||||||
"/?"
|
|
||||||
],
|
|
||||||
"~": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/English.json
generated
vendored
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/English.json
generated
vendored
File diff suppressed because one or more lines are too long
3819
vendor/github.com/nbutton23/zxcvbn-go/data/data/FemaleNames.json
generated
vendored
3819
vendor/github.com/nbutton23/zxcvbn-go/data/data/FemaleNames.json
generated
vendored
File diff suppressed because it is too large
Load diff
154
vendor/github.com/nbutton23/zxcvbn-go/data/data/Keypad.json
generated
vendored
154
vendor/github.com/nbutton23/zxcvbn-go/data/data/Keypad.json
generated
vendored
|
@ -1,154 +0,0 @@
|
||||||
{
|
|
||||||
"Graph": {
|
|
||||||
"0": [
|
|
||||||
null,
|
|
||||||
"1",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
".",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"1": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"2",
|
|
||||||
"0",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"2": [
|
|
||||||
"1",
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
"3",
|
|
||||||
".",
|
|
||||||
"0",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"3": [
|
|
||||||
"2",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
".",
|
|
||||||
"0"
|
|
||||||
],
|
|
||||||
"4": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7",
|
|
||||||
"8",
|
|
||||||
"5",
|
|
||||||
"2",
|
|
||||||
"1",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"5": [
|
|
||||||
"4",
|
|
||||||
"7",
|
|
||||||
"8",
|
|
||||||
"9",
|
|
||||||
"6",
|
|
||||||
"3",
|
|
||||||
"2",
|
|
||||||
"1"
|
|
||||||
],
|
|
||||||
"6": [
|
|
||||||
"5",
|
|
||||||
"8",
|
|
||||||
"9",
|
|
||||||
"+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3",
|
|
||||||
"2"
|
|
||||||
],
|
|
||||||
"7": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"/",
|
|
||||||
"8",
|
|
||||||
"5",
|
|
||||||
"4",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"8": [
|
|
||||||
"7",
|
|
||||||
null,
|
|
||||||
"/",
|
|
||||||
"*",
|
|
||||||
"9",
|
|
||||||
"6",
|
|
||||||
"5",
|
|
||||||
"4"
|
|
||||||
],
|
|
||||||
"9": [
|
|
||||||
"8",
|
|
||||||
"/",
|
|
||||||
"*",
|
|
||||||
"-",
|
|
||||||
"+",
|
|
||||||
null,
|
|
||||||
"6",
|
|
||||||
"5"
|
|
||||||
],
|
|
||||||
"*": [
|
|
||||||
"/",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"-",
|
|
||||||
"+",
|
|
||||||
"9",
|
|
||||||
"8"
|
|
||||||
],
|
|
||||||
"+": [
|
|
||||||
"9",
|
|
||||||
"*",
|
|
||||||
"-",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"6"
|
|
||||||
],
|
|
||||||
"-": [
|
|
||||||
"*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"+",
|
|
||||||
"9"
|
|
||||||
],
|
|
||||||
".": [
|
|
||||||
"0",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"/": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"*",
|
|
||||||
"9",
|
|
||||||
"8",
|
|
||||||
"7"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
51
vendor/github.com/nbutton23/zxcvbn-go/data/data/L33t.json
generated
vendored
51
vendor/github.com/nbutton23/zxcvbn-go/data/data/L33t.json
generated
vendored
|
@ -1,51 +0,0 @@
|
||||||
{
|
|
||||||
"graph": {
|
|
||||||
"a": [
|
|
||||||
"4",
|
|
||||||
"@"
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"8"
|
|
||||||
],
|
|
||||||
"c": [
|
|
||||||
"(",
|
|
||||||
"{",
|
|
||||||
"[",
|
|
||||||
"<"
|
|
||||||
],
|
|
||||||
"e": [
|
|
||||||
"3"
|
|
||||||
],
|
|
||||||
"g": [
|
|
||||||
"6",
|
|
||||||
"9"
|
|
||||||
],
|
|
||||||
"i": [
|
|
||||||
"1",
|
|
||||||
"!",
|
|
||||||
"|"
|
|
||||||
],
|
|
||||||
"l": [
|
|
||||||
"1",
|
|
||||||
"|",
|
|
||||||
"7"
|
|
||||||
],
|
|
||||||
"o": [
|
|
||||||
"0"
|
|
||||||
],
|
|
||||||
"s": [
|
|
||||||
"$",
|
|
||||||
"5"
|
|
||||||
],
|
|
||||||
"t": [
|
|
||||||
"+",
|
|
||||||
"7"
|
|
||||||
],
|
|
||||||
"x": [
|
|
||||||
"%"
|
|
||||||
],
|
|
||||||
"z": [
|
|
||||||
"2"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
164
vendor/github.com/nbutton23/zxcvbn-go/data/data/MacKeypad.json
generated
vendored
164
vendor/github.com/nbutton23/zxcvbn-go/data/data/MacKeypad.json
generated
vendored
|
@ -1,164 +0,0 @@
|
||||||
{
|
|
||||||
"Graph": {
|
|
||||||
"0": [
|
|
||||||
null,
|
|
||||||
"1",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
".",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"1": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"2",
|
|
||||||
"0",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"2": [
|
|
||||||
"1",
|
|
||||||
"4",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
"3",
|
|
||||||
".",
|
|
||||||
"0",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"3": [
|
|
||||||
"2",
|
|
||||||
"5",
|
|
||||||
"6",
|
|
||||||
"+",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
".",
|
|
||||||
"0"
|
|
||||||
],
|
|
||||||
"4": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7",
|
|
||||||
"8",
|
|
||||||
"5",
|
|
||||||
"2",
|
|
||||||
"1",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"5": [
|
|
||||||
"4",
|
|
||||||
"7",
|
|
||||||
"8",
|
|
||||||
"9",
|
|
||||||
"6",
|
|
||||||
"3",
|
|
||||||
"2",
|
|
||||||
"1"
|
|
||||||
],
|
|
||||||
"6": [
|
|
||||||
"5",
|
|
||||||
"8",
|
|
||||||
"9",
|
|
||||||
"-",
|
|
||||||
"+",
|
|
||||||
null,
|
|
||||||
"3",
|
|
||||||
"2"
|
|
||||||
],
|
|
||||||
"7": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"=",
|
|
||||||
"8",
|
|
||||||
"5",
|
|
||||||
"4",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"8": [
|
|
||||||
"7",
|
|
||||||
null,
|
|
||||||
"=",
|
|
||||||
"/",
|
|
||||||
"9",
|
|
||||||
"6",
|
|
||||||
"5",
|
|
||||||
"4"
|
|
||||||
],
|
|
||||||
"9": [
|
|
||||||
"8",
|
|
||||||
"=",
|
|
||||||
"/",
|
|
||||||
"*",
|
|
||||||
"-",
|
|
||||||
"+",
|
|
||||||
"6",
|
|
||||||
"5"
|
|
||||||
],
|
|
||||||
"*": [
|
|
||||||
"/",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"-",
|
|
||||||
"9"
|
|
||||||
],
|
|
||||||
"+": [
|
|
||||||
"6",
|
|
||||||
"9",
|
|
||||||
"-",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3"
|
|
||||||
],
|
|
||||||
"-": [
|
|
||||||
"9",
|
|
||||||
"/",
|
|
||||||
"*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"+",
|
|
||||||
"6"
|
|
||||||
],
|
|
||||||
".": [
|
|
||||||
"0",
|
|
||||||
"2",
|
|
||||||
"3",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"/": [
|
|
||||||
"=",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"*",
|
|
||||||
"-",
|
|
||||||
"9",
|
|
||||||
"8"
|
|
||||||
],
|
|
||||||
"=": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"/",
|
|
||||||
"9",
|
|
||||||
"8",
|
|
||||||
"7"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
1006
vendor/github.com/nbutton23/zxcvbn-go/data/data/MaleNames.json
generated
vendored
1006
vendor/github.com/nbutton23/zxcvbn-go/data/data/MaleNames.json
generated
vendored
File diff suppressed because it is too large
Load diff
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/Passwords.json
generated
vendored
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/Passwords.json
generated
vendored
File diff suppressed because one or more lines are too long
756
vendor/github.com/nbutton23/zxcvbn-go/data/data/Qwerty.json
generated
vendored
756
vendor/github.com/nbutton23/zxcvbn-go/data/data/Qwerty.json
generated
vendored
|
@ -1,756 +0,0 @@
|
||||||
{
|
|
||||||
"Graph": {
|
|
||||||
"!": [
|
|
||||||
"`~",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"2@",
|
|
||||||
"qQ",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"\"": [
|
|
||||||
";:",
|
|
||||||
"[{",
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"/?"
|
|
||||||
],
|
|
||||||
"#": [
|
|
||||||
"2@",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4$",
|
|
||||||
"eE",
|
|
||||||
"wW"
|
|
||||||
],
|
|
||||||
"$": [
|
|
||||||
"3#",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"5%",
|
|
||||||
"rR",
|
|
||||||
"eE"
|
|
||||||
],
|
|
||||||
"%": [
|
|
||||||
"4$",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"6^",
|
|
||||||
"tT",
|
|
||||||
"rR"
|
|
||||||
],
|
|
||||||
"&": [
|
|
||||||
"6^",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"8*",
|
|
||||||
"uU",
|
|
||||||
"yY"
|
|
||||||
],
|
|
||||||
"'": [
|
|
||||||
";:",
|
|
||||||
"[{",
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"/?"
|
|
||||||
],
|
|
||||||
"(": [
|
|
||||||
"8*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"0)",
|
|
||||||
"oO",
|
|
||||||
"iI"
|
|
||||||
],
|
|
||||||
")": [
|
|
||||||
"9(",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"-_",
|
|
||||||
"pP",
|
|
||||||
"oO"
|
|
||||||
],
|
|
||||||
"*": [
|
|
||||||
"7&",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"9(",
|
|
||||||
"iI",
|
|
||||||
"uU"
|
|
||||||
],
|
|
||||||
"+": [
|
|
||||||
"-_",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"]}",
|
|
||||||
"[{"
|
|
||||||
],
|
|
||||||
",": [
|
|
||||||
"mM",
|
|
||||||
"kK",
|
|
||||||
"lL",
|
|
||||||
".>",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"-": [
|
|
||||||
"0)",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"=+",
|
|
||||||
"[{",
|
|
||||||
"pP"
|
|
||||||
],
|
|
||||||
".": [
|
|
||||||
",<",
|
|
||||||
"lL",
|
|
||||||
";:",
|
|
||||||
"/?",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"/": [
|
|
||||||
".>",
|
|
||||||
";:",
|
|
||||||
"'\"",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"0": [
|
|
||||||
"9(",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"-_",
|
|
||||||
"pP",
|
|
||||||
"oO"
|
|
||||||
],
|
|
||||||
"1": [
|
|
||||||
"`~",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"2@",
|
|
||||||
"qQ",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"2": [
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3#",
|
|
||||||
"wW",
|
|
||||||
"qQ"
|
|
||||||
],
|
|
||||||
"3": [
|
|
||||||
"2@",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"4$",
|
|
||||||
"eE",
|
|
||||||
"wW"
|
|
||||||
],
|
|
||||||
"4": [
|
|
||||||
"3#",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"5%",
|
|
||||||
"rR",
|
|
||||||
"eE"
|
|
||||||
],
|
|
||||||
"5": [
|
|
||||||
"4$",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"6^",
|
|
||||||
"tT",
|
|
||||||
"rR"
|
|
||||||
],
|
|
||||||
"6": [
|
|
||||||
"5%",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7&",
|
|
||||||
"yY",
|
|
||||||
"tT"
|
|
||||||
],
|
|
||||||
"7": [
|
|
||||||
"6^",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"8*",
|
|
||||||
"uU",
|
|
||||||
"yY"
|
|
||||||
],
|
|
||||||
"8": [
|
|
||||||
"7&",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"9(",
|
|
||||||
"iI",
|
|
||||||
"uU"
|
|
||||||
],
|
|
||||||
"9": [
|
|
||||||
"8*",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"0)",
|
|
||||||
"oO",
|
|
||||||
"iI"
|
|
||||||
],
|
|
||||||
":": [
|
|
||||||
"lL",
|
|
||||||
"pP",
|
|
||||||
"[{",
|
|
||||||
"'\"",
|
|
||||||
"/?",
|
|
||||||
".>"
|
|
||||||
],
|
|
||||||
";": [
|
|
||||||
"lL",
|
|
||||||
"pP",
|
|
||||||
"[{",
|
|
||||||
"'\"",
|
|
||||||
"/?",
|
|
||||||
".>"
|
|
||||||
],
|
|
||||||
"<": [
|
|
||||||
"mM",
|
|
||||||
"kK",
|
|
||||||
"lL",
|
|
||||||
".>",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"=": [
|
|
||||||
"-_",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"]}",
|
|
||||||
"[{"
|
|
||||||
],
|
|
||||||
">": [
|
|
||||||
",<",
|
|
||||||
"lL",
|
|
||||||
";:",
|
|
||||||
"/?",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"?": [
|
|
||||||
".>",
|
|
||||||
";:",
|
|
||||||
"'\"",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"@": [
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"3#",
|
|
||||||
"wW",
|
|
||||||
"qQ"
|
|
||||||
],
|
|
||||||
"A": [
|
|
||||||
null,
|
|
||||||
"qQ",
|
|
||||||
"wW",
|
|
||||||
"sS",
|
|
||||||
"zZ",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"B": [
|
|
||||||
"vV",
|
|
||||||
"gG",
|
|
||||||
"hH",
|
|
||||||
"nN",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"C": [
|
|
||||||
"xX",
|
|
||||||
"dD",
|
|
||||||
"fF",
|
|
||||||
"vV",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"D": [
|
|
||||||
"sS",
|
|
||||||
"eE",
|
|
||||||
"rR",
|
|
||||||
"fF",
|
|
||||||
"cC",
|
|
||||||
"xX"
|
|
||||||
],
|
|
||||||
"E": [
|
|
||||||
"wW",
|
|
||||||
"3#",
|
|
||||||
"4$",
|
|
||||||
"rR",
|
|
||||||
"dD",
|
|
||||||
"sS"
|
|
||||||
],
|
|
||||||
"F": [
|
|
||||||
"dD",
|
|
||||||
"rR",
|
|
||||||
"tT",
|
|
||||||
"gG",
|
|
||||||
"vV",
|
|
||||||
"cC"
|
|
||||||
],
|
|
||||||
"G": [
|
|
||||||
"fF",
|
|
||||||
"tT",
|
|
||||||
"yY",
|
|
||||||
"hH",
|
|
||||||
"bB",
|
|
||||||
"vV"
|
|
||||||
],
|
|
||||||
"H": [
|
|
||||||
"gG",
|
|
||||||
"yY",
|
|
||||||
"uU",
|
|
||||||
"jJ",
|
|
||||||
"nN",
|
|
||||||
"bB"
|
|
||||||
],
|
|
||||||
"I": [
|
|
||||||
"uU",
|
|
||||||
"8*",
|
|
||||||
"9(",
|
|
||||||
"oO",
|
|
||||||
"kK",
|
|
||||||
"jJ"
|
|
||||||
],
|
|
||||||
"J": [
|
|
||||||
"hH",
|
|
||||||
"uU",
|
|
||||||
"iI",
|
|
||||||
"kK",
|
|
||||||
"mM",
|
|
||||||
"nN"
|
|
||||||
],
|
|
||||||
"K": [
|
|
||||||
"jJ",
|
|
||||||
"iI",
|
|
||||||
"oO",
|
|
||||||
"lL",
|
|
||||||
",<",
|
|
||||||
"mM"
|
|
||||||
],
|
|
||||||
"L": [
|
|
||||||
"kK",
|
|
||||||
"oO",
|
|
||||||
"pP",
|
|
||||||
";:",
|
|
||||||
".>",
|
|
||||||
",<"
|
|
||||||
],
|
|
||||||
"M": [
|
|
||||||
"nN",
|
|
||||||
"jJ",
|
|
||||||
"kK",
|
|
||||||
",<",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"N": [
|
|
||||||
"bB",
|
|
||||||
"hH",
|
|
||||||
"jJ",
|
|
||||||
"mM",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"O": [
|
|
||||||
"iI",
|
|
||||||
"9(",
|
|
||||||
"0)",
|
|
||||||
"pP",
|
|
||||||
"lL",
|
|
||||||
"kK"
|
|
||||||
],
|
|
||||||
"P": [
|
|
||||||
"oO",
|
|
||||||
"0)",
|
|
||||||
"-_",
|
|
||||||
"[{",
|
|
||||||
";:",
|
|
||||||
"lL"
|
|
||||||
],
|
|
||||||
"Q": [
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
"2@",
|
|
||||||
"wW",
|
|
||||||
"aA",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"R": [
|
|
||||||
"eE",
|
|
||||||
"4$",
|
|
||||||
"5%",
|
|
||||||
"tT",
|
|
||||||
"fF",
|
|
||||||
"dD"
|
|
||||||
],
|
|
||||||
"S": [
|
|
||||||
"aA",
|
|
||||||
"wW",
|
|
||||||
"eE",
|
|
||||||
"dD",
|
|
||||||
"xX",
|
|
||||||
"zZ"
|
|
||||||
],
|
|
||||||
"T": [
|
|
||||||
"rR",
|
|
||||||
"5%",
|
|
||||||
"6^",
|
|
||||||
"yY",
|
|
||||||
"gG",
|
|
||||||
"fF"
|
|
||||||
],
|
|
||||||
"U": [
|
|
||||||
"yY",
|
|
||||||
"7&",
|
|
||||||
"8*",
|
|
||||||
"iI",
|
|
||||||
"jJ",
|
|
||||||
"hH"
|
|
||||||
],
|
|
||||||
"V": [
|
|
||||||
"cC",
|
|
||||||
"fF",
|
|
||||||
"gG",
|
|
||||||
"bB",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"W": [
|
|
||||||
"qQ",
|
|
||||||
"2@",
|
|
||||||
"3#",
|
|
||||||
"eE",
|
|
||||||
"sS",
|
|
||||||
"aA"
|
|
||||||
],
|
|
||||||
"X": [
|
|
||||||
"zZ",
|
|
||||||
"sS",
|
|
||||||
"dD",
|
|
||||||
"cC",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"Y": [
|
|
||||||
"tT",
|
|
||||||
"6^",
|
|
||||||
"7&",
|
|
||||||
"uU",
|
|
||||||
"hH",
|
|
||||||
"gG"
|
|
||||||
],
|
|
||||||
"Z": [
|
|
||||||
null,
|
|
||||||
"aA",
|
|
||||||
"sS",
|
|
||||||
"xX",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"[": [
|
|
||||||
"pP",
|
|
||||||
"-_",
|
|
||||||
"=+",
|
|
||||||
"]}",
|
|
||||||
"'\"",
|
|
||||||
";:"
|
|
||||||
],
|
|
||||||
"\\": [
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"]": [
|
|
||||||
"[{",
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
"\\|",
|
|
||||||
null,
|
|
||||||
"'\""
|
|
||||||
],
|
|
||||||
"^": [
|
|
||||||
"5%",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"7&",
|
|
||||||
"yY",
|
|
||||||
"tT"
|
|
||||||
],
|
|
||||||
"_": [
|
|
||||||
"0)",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"=+",
|
|
||||||
"[{",
|
|
||||||
"pP"
|
|
||||||
],
|
|
||||||
"`": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"a": [
|
|
||||||
null,
|
|
||||||
"qQ",
|
|
||||||
"wW",
|
|
||||||
"sS",
|
|
||||||
"zZ",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"b": [
|
|
||||||
"vV",
|
|
||||||
"gG",
|
|
||||||
"hH",
|
|
||||||
"nN",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"c": [
|
|
||||||
"xX",
|
|
||||||
"dD",
|
|
||||||
"fF",
|
|
||||||
"vV",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"d": [
|
|
||||||
"sS",
|
|
||||||
"eE",
|
|
||||||
"rR",
|
|
||||||
"fF",
|
|
||||||
"cC",
|
|
||||||
"xX"
|
|
||||||
],
|
|
||||||
"e": [
|
|
||||||
"wW",
|
|
||||||
"3#",
|
|
||||||
"4$",
|
|
||||||
"rR",
|
|
||||||
"dD",
|
|
||||||
"sS"
|
|
||||||
],
|
|
||||||
"f": [
|
|
||||||
"dD",
|
|
||||||
"rR",
|
|
||||||
"tT",
|
|
||||||
"gG",
|
|
||||||
"vV",
|
|
||||||
"cC"
|
|
||||||
],
|
|
||||||
"g": [
|
|
||||||
"fF",
|
|
||||||
"tT",
|
|
||||||
"yY",
|
|
||||||
"hH",
|
|
||||||
"bB",
|
|
||||||
"vV"
|
|
||||||
],
|
|
||||||
"h": [
|
|
||||||
"gG",
|
|
||||||
"yY",
|
|
||||||
"uU",
|
|
||||||
"jJ",
|
|
||||||
"nN",
|
|
||||||
"bB"
|
|
||||||
],
|
|
||||||
"i": [
|
|
||||||
"uU",
|
|
||||||
"8*",
|
|
||||||
"9(",
|
|
||||||
"oO",
|
|
||||||
"kK",
|
|
||||||
"jJ"
|
|
||||||
],
|
|
||||||
"j": [
|
|
||||||
"hH",
|
|
||||||
"uU",
|
|
||||||
"iI",
|
|
||||||
"kK",
|
|
||||||
"mM",
|
|
||||||
"nN"
|
|
||||||
],
|
|
||||||
"k": [
|
|
||||||
"jJ",
|
|
||||||
"iI",
|
|
||||||
"oO",
|
|
||||||
"lL",
|
|
||||||
",<",
|
|
||||||
"mM"
|
|
||||||
],
|
|
||||||
"l": [
|
|
||||||
"kK",
|
|
||||||
"oO",
|
|
||||||
"pP",
|
|
||||||
";:",
|
|
||||||
".>",
|
|
||||||
",<"
|
|
||||||
],
|
|
||||||
"m": [
|
|
||||||
"nN",
|
|
||||||
"jJ",
|
|
||||||
"kK",
|
|
||||||
",<",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"n": [
|
|
||||||
"bB",
|
|
||||||
"hH",
|
|
||||||
"jJ",
|
|
||||||
"mM",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"o": [
|
|
||||||
"iI",
|
|
||||||
"9(",
|
|
||||||
"0)",
|
|
||||||
"pP",
|
|
||||||
"lL",
|
|
||||||
"kK"
|
|
||||||
],
|
|
||||||
"p": [
|
|
||||||
"oO",
|
|
||||||
"0)",
|
|
||||||
"-_",
|
|
||||||
"[{",
|
|
||||||
";:",
|
|
||||||
"lL"
|
|
||||||
],
|
|
||||||
"q": [
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
"2@",
|
|
||||||
"wW",
|
|
||||||
"aA",
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"r": [
|
|
||||||
"eE",
|
|
||||||
"4$",
|
|
||||||
"5%",
|
|
||||||
"tT",
|
|
||||||
"fF",
|
|
||||||
"dD"
|
|
||||||
],
|
|
||||||
"s": [
|
|
||||||
"aA",
|
|
||||||
"wW",
|
|
||||||
"eE",
|
|
||||||
"dD",
|
|
||||||
"xX",
|
|
||||||
"zZ"
|
|
||||||
],
|
|
||||||
"t": [
|
|
||||||
"rR",
|
|
||||||
"5%",
|
|
||||||
"6^",
|
|
||||||
"yY",
|
|
||||||
"gG",
|
|
||||||
"fF"
|
|
||||||
],
|
|
||||||
"u": [
|
|
||||||
"yY",
|
|
||||||
"7&",
|
|
||||||
"8*",
|
|
||||||
"iI",
|
|
||||||
"jJ",
|
|
||||||
"hH"
|
|
||||||
],
|
|
||||||
"v": [
|
|
||||||
"cC",
|
|
||||||
"fF",
|
|
||||||
"gG",
|
|
||||||
"bB",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"w": [
|
|
||||||
"qQ",
|
|
||||||
"2@",
|
|
||||||
"3#",
|
|
||||||
"eE",
|
|
||||||
"sS",
|
|
||||||
"aA"
|
|
||||||
],
|
|
||||||
"x": [
|
|
||||||
"zZ",
|
|
||||||
"sS",
|
|
||||||
"dD",
|
|
||||||
"cC",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"y": [
|
|
||||||
"tT",
|
|
||||||
"6^",
|
|
||||||
"7&",
|
|
||||||
"uU",
|
|
||||||
"hH",
|
|
||||||
"gG"
|
|
||||||
],
|
|
||||||
"z": [
|
|
||||||
null,
|
|
||||||
"aA",
|
|
||||||
"sS",
|
|
||||||
"xX",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"{": [
|
|
||||||
"pP",
|
|
||||||
"-_",
|
|
||||||
"=+",
|
|
||||||
"]}",
|
|
||||||
"'\"",
|
|
||||||
";:"
|
|
||||||
],
|
|
||||||
"|": [
|
|
||||||
"]}",
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
],
|
|
||||||
"}": [
|
|
||||||
"[{",
|
|
||||||
"=+",
|
|
||||||
null,
|
|
||||||
"\\|",
|
|
||||||
null,
|
|
||||||
"'\""
|
|
||||||
],
|
|
||||||
"~": [
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
null,
|
|
||||||
"1!",
|
|
||||||
null,
|
|
||||||
null
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/Surnames.json
generated
vendored
1
vendor/github.com/nbutton23/zxcvbn-go/data/data/Surnames.json
generated
vendored
File diff suppressed because one or more lines are too long
215
vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go
generated
vendored
215
vendor/github.com/nbutton23/zxcvbn-go/entropy/entropyCalculator.go
generated
vendored
|
@ -1,215 +0,0 @@
|
||||||
package entropy
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/adjacency"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/utils/math"
|
|
||||||
"math"
|
|
||||||
"regexp"
|
|
||||||
"unicode"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
START_UPPER string = `^[A-Z][^A-Z]+$`
|
|
||||||
END_UPPER string = `^[^A-Z]+[A-Z]$'`
|
|
||||||
ALL_UPPER string = `^[A-Z]+$`
|
|
||||||
NUM_YEARS = float64(119) // years match against 1900 - 2019
|
|
||||||
NUM_MONTHS = float64(12)
|
|
||||||
NUM_DAYS = float64(31)
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
KEYPAD_STARTING_POSITIONS = len(adjacency.AdjacencyGph["keypad"].Graph)
|
|
||||||
KEYPAD_AVG_DEGREE = adjacency.AdjacencyGph["keypad"].CalculateAvgDegree()
|
|
||||||
)
|
|
||||||
|
|
||||||
func DictionaryEntropy(match match.Match, rank float64) float64 {
|
|
||||||
baseEntropy := math.Log2(rank)
|
|
||||||
upperCaseEntropy := extraUpperCaseEntropy(match)
|
|
||||||
//TODO: L33t
|
|
||||||
return baseEntropy + upperCaseEntropy
|
|
||||||
}
|
|
||||||
|
|
||||||
func extraUpperCaseEntropy(match match.Match) float64 {
|
|
||||||
word := match.Token
|
|
||||||
|
|
||||||
allLower := true
|
|
||||||
|
|
||||||
for _, char := range word {
|
|
||||||
if unicode.IsUpper(char) {
|
|
||||||
allLower = false
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if allLower {
|
|
||||||
return float64(0)
|
|
||||||
}
|
|
||||||
|
|
||||||
//a capitalized word is the most common capitalization scheme,
|
|
||||||
//so it only doubles the search space (uncapitalized + capitalized): 1 extra bit of entropy.
|
|
||||||
//allcaps and end-capitalized are common enough too, underestimate as 1 extra bit to be safe.
|
|
||||||
|
|
||||||
for _, regex := range []string{START_UPPER, END_UPPER, ALL_UPPER} {
|
|
||||||
matcher := regexp.MustCompile(regex)
|
|
||||||
|
|
||||||
if matcher.MatchString(word) {
|
|
||||||
return float64(1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
//Otherwise calculate the number of ways to capitalize U+L uppercase+lowercase letters with U uppercase letters or
|
|
||||||
//less. Or, if there's more uppercase than lower (for e.g. PASSwORD), the number of ways to lowercase U+L letters
|
|
||||||
//with L lowercase letters or less.
|
|
||||||
|
|
||||||
countUpper, countLower := float64(0), float64(0)
|
|
||||||
for _, char := range word {
|
|
||||||
if unicode.IsUpper(char) {
|
|
||||||
countUpper++
|
|
||||||
} else if unicode.IsLower(char) {
|
|
||||||
countLower++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
totalLenght := countLower + countUpper
|
|
||||||
var possibililities float64
|
|
||||||
|
|
||||||
for i := float64(0); i <= math.Min(countUpper, countLower); i++ {
|
|
||||||
possibililities += float64(zxcvbn_math.NChoseK(totalLenght, i))
|
|
||||||
}
|
|
||||||
|
|
||||||
if possibililities < 1 {
|
|
||||||
return float64(1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return float64(math.Log2(possibililities))
|
|
||||||
}
|
|
||||||
|
|
||||||
func SpatialEntropy(match match.Match, turns int, shiftCount int) float64 {
|
|
||||||
var s, d float64
|
|
||||||
if match.DictionaryName == "qwerty" || match.DictionaryName == "dvorak" {
|
|
||||||
//todo: verify qwerty and dvorak have the same length and degree
|
|
||||||
s = float64(len(adjacency.BuildQwerty().Graph))
|
|
||||||
d = adjacency.BuildQwerty().CalculateAvgDegree()
|
|
||||||
} else {
|
|
||||||
s = float64(KEYPAD_STARTING_POSITIONS)
|
|
||||||
d = KEYPAD_AVG_DEGREE
|
|
||||||
}
|
|
||||||
|
|
||||||
possibilities := float64(0)
|
|
||||||
|
|
||||||
length := float64(len(match.Token))
|
|
||||||
|
|
||||||
//TODO: Should this be <= or just < ?
|
|
||||||
//Estimate the number of possible patterns w/ length L or less with t turns or less
|
|
||||||
for i := float64(2); i <= length+1; i++ {
|
|
||||||
possibleTurns := math.Min(float64(turns), i-1)
|
|
||||||
for j := float64(1); j <= possibleTurns+1; j++ {
|
|
||||||
x := zxcvbn_math.NChoseK(i-1, j-1) * s * math.Pow(d, j)
|
|
||||||
possibilities += x
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
entropy := math.Log2(possibilities)
|
|
||||||
//add extra entropu for shifted keys. ( % instead of 5 A instead of a)
|
|
||||||
//Math is similar to extra entropy for uppercase letters in dictionary matches.
|
|
||||||
|
|
||||||
if S := float64(shiftCount); S > float64(0) {
|
|
||||||
possibilities = float64(0)
|
|
||||||
U := length - S
|
|
||||||
|
|
||||||
for i := float64(0); i < math.Min(S, U)+1; i++ {
|
|
||||||
possibilities += zxcvbn_math.NChoseK(S+U, i)
|
|
||||||
}
|
|
||||||
|
|
||||||
entropy += math.Log2(possibilities)
|
|
||||||
}
|
|
||||||
|
|
||||||
return entropy
|
|
||||||
}
|
|
||||||
|
|
||||||
func RepeatEntropy(match match.Match) float64 {
|
|
||||||
cardinality := CalcBruteForceCardinality(match.Token)
|
|
||||||
entropy := math.Log2(cardinality * float64(len(match.Token)))
|
|
||||||
|
|
||||||
return entropy
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO: Validate against python
|
|
||||||
func CalcBruteForceCardinality(password string) float64 {
|
|
||||||
lower, upper, digits, symbols := float64(0), float64(0), float64(0), float64(0)
|
|
||||||
|
|
||||||
for _, char := range password {
|
|
||||||
if unicode.IsLower(char) {
|
|
||||||
lower = float64(26)
|
|
||||||
} else if unicode.IsDigit(char) {
|
|
||||||
digits = float64(10)
|
|
||||||
} else if unicode.IsUpper(char) {
|
|
||||||
upper = float64(26)
|
|
||||||
} else {
|
|
||||||
symbols = float64(33)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
cardinality := lower + upper + digits + symbols
|
|
||||||
return cardinality
|
|
||||||
}
|
|
||||||
|
|
||||||
func SequenceEntropy(match match.Match, dictionaryLength int, ascending bool) float64 {
|
|
||||||
firstChar := match.Token[0]
|
|
||||||
baseEntropy := float64(0)
|
|
||||||
if string(firstChar) == "a" || string(firstChar) == "1" {
|
|
||||||
baseEntropy = float64(0)
|
|
||||||
} else {
|
|
||||||
baseEntropy = math.Log2(float64(dictionaryLength))
|
|
||||||
//TODO: should this be just the first or any char?
|
|
||||||
if unicode.IsUpper(rune(firstChar)) {
|
|
||||||
baseEntropy++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if !ascending {
|
|
||||||
baseEntropy++
|
|
||||||
}
|
|
||||||
return baseEntropy + math.Log2(float64(len(match.Token)))
|
|
||||||
}
|
|
||||||
|
|
||||||
func ExtraLeetEntropy(match match.Match, password string) float64 {
|
|
||||||
var subsitutions float64
|
|
||||||
var unsub float64
|
|
||||||
subPassword := password[match.I:match.J]
|
|
||||||
for index, char := range subPassword {
|
|
||||||
if string(char) != string(match.Token[index]) {
|
|
||||||
subsitutions++
|
|
||||||
} else {
|
|
||||||
//TODO: Make this only true for 1337 chars that are not subs?
|
|
||||||
unsub++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
var possibilities float64
|
|
||||||
|
|
||||||
for i := float64(0); i <= math.Min(subsitutions, unsub)+1; i++ {
|
|
||||||
possibilities += zxcvbn_math.NChoseK(subsitutions+unsub, i)
|
|
||||||
}
|
|
||||||
|
|
||||||
if possibilities <= 1 {
|
|
||||||
return float64(1)
|
|
||||||
}
|
|
||||||
return math.Log2(possibilities)
|
|
||||||
}
|
|
||||||
|
|
||||||
func YearEntropy(dateMatch match.DateMatch) float64 {
|
|
||||||
return math.Log2(NUM_YEARS)
|
|
||||||
}
|
|
||||||
|
|
||||||
func DateEntropy(dateMatch match.DateMatch) float64 {
|
|
||||||
var entropy float64
|
|
||||||
if dateMatch.Year < 100 {
|
|
||||||
entropy = math.Log2(NUM_DAYS * NUM_MONTHS * 100)
|
|
||||||
} else {
|
|
||||||
entropy = math.Log2(NUM_DAYS * NUM_MONTHS * NUM_YEARS)
|
|
||||||
}
|
|
||||||
|
|
||||||
if dateMatch.Separator != "" {
|
|
||||||
entropy += 2 //add two bits for separator selection [/,-,.,etc]
|
|
||||||
}
|
|
||||||
return entropy
|
|
||||||
}
|
|
47
vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go
generated
vendored
47
vendor/github.com/nbutton23/zxcvbn-go/frequency/frequency.go
generated
vendored
|
@ -1,47 +0,0 @@
|
||||||
package frequency
|
|
||||||
|
|
||||||
import (
|
|
||||||
"encoding/json"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/data"
|
|
||||||
"log"
|
|
||||||
)
|
|
||||||
|
|
||||||
type FrequencyList struct {
|
|
||||||
Name string
|
|
||||||
List []string
|
|
||||||
}
|
|
||||||
|
|
||||||
var FrequencyLists = make(map[string]FrequencyList)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
maleFilePath := getAsset("data/MaleNames.json")
|
|
||||||
femaleFilePath := getAsset("data/FemaleNames.json")
|
|
||||||
surnameFilePath := getAsset("data/Surnames.json")
|
|
||||||
englishFilePath := getAsset("data/English.json")
|
|
||||||
passwordsFilePath := getAsset("data/Passwords.json")
|
|
||||||
|
|
||||||
FrequencyLists["MaleNames"] = GetStringListFromAsset(maleFilePath, "MaleNames")
|
|
||||||
FrequencyLists["FemaleNames"] = GetStringListFromAsset(femaleFilePath, "FemaleNames")
|
|
||||||
FrequencyLists["Surname"] = GetStringListFromAsset(surnameFilePath, "Surname")
|
|
||||||
FrequencyLists["English"] = GetStringListFromAsset(englishFilePath, "English")
|
|
||||||
FrequencyLists["Passwords"] = GetStringListFromAsset(passwordsFilePath, "Passwords")
|
|
||||||
|
|
||||||
}
|
|
||||||
func getAsset(name string) []byte {
|
|
||||||
data, err := zxcvbn_data.Asset(name)
|
|
||||||
if err != nil {
|
|
||||||
panic("Error getting asset " + name)
|
|
||||||
}
|
|
||||||
|
|
||||||
return data
|
|
||||||
}
|
|
||||||
func GetStringListFromAsset(data []byte, name string) FrequencyList {
|
|
||||||
|
|
||||||
var tempList FrequencyList
|
|
||||||
err := json.Unmarshal(data, &tempList)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
tempList.Name = name
|
|
||||||
return tempList
|
|
||||||
}
|
|
35
vendor/github.com/nbutton23/zxcvbn-go/match/match.go
generated
vendored
35
vendor/github.com/nbutton23/zxcvbn-go/match/match.go
generated
vendored
|
@ -1,35 +0,0 @@
|
||||||
package match
|
|
||||||
|
|
||||||
type Matches []Match
|
|
||||||
|
|
||||||
func (s Matches) Len() int {
|
|
||||||
return len(s)
|
|
||||||
}
|
|
||||||
func (s Matches) Swap(i, j int) {
|
|
||||||
s[i], s[j] = s[j], s[i]
|
|
||||||
}
|
|
||||||
func (s Matches) Less(i, j int) bool {
|
|
||||||
if s[i].I < s[j].I {
|
|
||||||
return true
|
|
||||||
} else if s[i].I == s[j].I {
|
|
||||||
return s[i].J < s[j].J
|
|
||||||
} else {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
type Match struct {
|
|
||||||
Pattern string
|
|
||||||
I, J int
|
|
||||||
Token string
|
|
||||||
DictionaryName string
|
|
||||||
Entropy float64
|
|
||||||
}
|
|
||||||
|
|
||||||
type DateMatch struct {
|
|
||||||
Pattern string
|
|
||||||
I, J int
|
|
||||||
Token string
|
|
||||||
Separator string
|
|
||||||
Day, Month, Year int64
|
|
||||||
}
|
|
189
vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go
generated
vendored
189
vendor/github.com/nbutton23/zxcvbn-go/matching/dateMatchers.go
generated
vendored
|
@ -1,189 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/entropy"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"regexp"
|
|
||||||
"strconv"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func checkDate(day, month, year int64) (bool, int64, int64, int64) {
|
|
||||||
if (12 <= month && month <= 31) && day <= 12 {
|
|
||||||
day, month = month, day
|
|
||||||
}
|
|
||||||
|
|
||||||
if day > 31 || month > 12 {
|
|
||||||
return false, 0, 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
if !((1900 <= year && year <= 2019) || (0 <= year && year <= 99)) {
|
|
||||||
return false, 0, 0, 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return true, day, month, year
|
|
||||||
}
|
|
||||||
func dateSepMatcher(password string) []match.Match {
|
|
||||||
dateMatches := dateSepMatchHelper(password)
|
|
||||||
|
|
||||||
var matches []match.Match
|
|
||||||
for _, dateMatch := range dateMatches {
|
|
||||||
match := match.Match{
|
|
||||||
I: dateMatch.I,
|
|
||||||
J: dateMatch.J,
|
|
||||||
Entropy: entropy.DateEntropy(dateMatch),
|
|
||||||
DictionaryName: "date_match",
|
|
||||||
Token: dateMatch.Token,
|
|
||||||
}
|
|
||||||
|
|
||||||
matches = append(matches, match)
|
|
||||||
}
|
|
||||||
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
func dateSepMatchHelper(password string) []match.DateMatch {
|
|
||||||
|
|
||||||
var matches []match.DateMatch
|
|
||||||
|
|
||||||
matcher := regexp.MustCompile(DATE_RX_YEAR_SUFFIX)
|
|
||||||
for _, v := range matcher.FindAllString(password, len(password)) {
|
|
||||||
splitV := matcher.FindAllStringSubmatch(v, len(v))
|
|
||||||
i := strings.Index(password, v)
|
|
||||||
j := i + len(v)
|
|
||||||
day, _ := strconv.ParseInt(splitV[0][4], 10, 16)
|
|
||||||
month, _ := strconv.ParseInt(splitV[0][2], 10, 16)
|
|
||||||
year, _ := strconv.ParseInt(splitV[0][6], 10, 16)
|
|
||||||
match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]}
|
|
||||||
matches = append(matches, match)
|
|
||||||
}
|
|
||||||
|
|
||||||
matcher = regexp.MustCompile(DATE_RX_YEAR_PREFIX)
|
|
||||||
for _, v := range matcher.FindAllString(password, len(password)) {
|
|
||||||
splitV := matcher.FindAllStringSubmatch(v, len(v))
|
|
||||||
i := strings.Index(password, v)
|
|
||||||
j := i + len(v)
|
|
||||||
day, _ := strconv.ParseInt(splitV[0][4], 10, 16)
|
|
||||||
month, _ := strconv.ParseInt(splitV[0][6], 10, 16)
|
|
||||||
year, _ := strconv.ParseInt(splitV[0][2], 10, 16)
|
|
||||||
match := match.DateMatch{Day: day, Month: month, Year: year, Separator: splitV[0][5], I: i, J: j, Token: password[i:j]}
|
|
||||||
matches = append(matches, match)
|
|
||||||
}
|
|
||||||
|
|
||||||
var out []match.DateMatch
|
|
||||||
for _, match := range matches {
|
|
||||||
if valid, day, month, year := checkDate(match.Day, match.Month, match.Year); valid {
|
|
||||||
match.Pattern = "date"
|
|
||||||
match.Day = day
|
|
||||||
match.Month = month
|
|
||||||
match.Year = year
|
|
||||||
out = append(out, match)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return out
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
type DateMatchCandidate struct {
|
|
||||||
DayMonth string
|
|
||||||
Year string
|
|
||||||
I, J int
|
|
||||||
}
|
|
||||||
|
|
||||||
type DateMatchCandidateTwo struct {
|
|
||||||
Day string
|
|
||||||
Month string
|
|
||||||
Year string
|
|
||||||
I, J int
|
|
||||||
}
|
|
||||||
|
|
||||||
func dateWithoutSepMatch(password string) []match.Match {
|
|
||||||
dateMatches := dateWithoutSepMatchHelper(password)
|
|
||||||
|
|
||||||
var matches []match.Match
|
|
||||||
for _, dateMatch := range dateMatches {
|
|
||||||
match := match.Match{
|
|
||||||
I: dateMatch.I,
|
|
||||||
J: dateMatch.J,
|
|
||||||
Entropy: entropy.DateEntropy(dateMatch),
|
|
||||||
DictionaryName: "date_match",
|
|
||||||
Token: dateMatch.Token,
|
|
||||||
}
|
|
||||||
|
|
||||||
matches = append(matches, match)
|
|
||||||
}
|
|
||||||
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
|
|
||||||
//TODO Has issues with 6 digit dates
|
|
||||||
func dateWithoutSepMatchHelper(password string) (matches []match.DateMatch) {
|
|
||||||
matcher := regexp.MustCompile(DATE_WITHOUT_SEP_MATCH)
|
|
||||||
for _, v := range matcher.FindAllString(password, len(password)) {
|
|
||||||
i := strings.Index(password, v)
|
|
||||||
j := i + len(v)
|
|
||||||
length := len(v)
|
|
||||||
lastIndex := length - 1
|
|
||||||
var candidatesRoundOne []DateMatchCandidate
|
|
||||||
|
|
||||||
if length <= 6 {
|
|
||||||
//2-digit year prefix
|
|
||||||
candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[2:], v[0:2], i, j))
|
|
||||||
|
|
||||||
//2-digityear suffix
|
|
||||||
candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-2], v[lastIndex-2:], i, j))
|
|
||||||
}
|
|
||||||
if length >= 6 {
|
|
||||||
//4-digit year prefix
|
|
||||||
candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[4:], v[0:4], i, j))
|
|
||||||
|
|
||||||
//4-digit year sufix
|
|
||||||
candidatesRoundOne = append(candidatesRoundOne, buildDateMatchCandidate(v[0:lastIndex-3], v[lastIndex-3:], i, j))
|
|
||||||
}
|
|
||||||
|
|
||||||
var candidatesRoundTwo []DateMatchCandidateTwo
|
|
||||||
for _, c := range candidatesRoundOne {
|
|
||||||
if len(c.DayMonth) == 2 {
|
|
||||||
candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:1], c.Year, c.I, c.J))
|
|
||||||
} else if len(c.DayMonth) == 3 {
|
|
||||||
candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:2], c.Year, c.I, c.J))
|
|
||||||
candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:0], c.DayMonth[1:3], c.Year, c.I, c.J))
|
|
||||||
} else if len(c.DayMonth) == 4 {
|
|
||||||
candidatesRoundTwo = append(candidatesRoundTwo, buildDateMatchCandidateTwo(c.DayMonth[0:2], c.DayMonth[2:4], c.Year, c.I, c.J))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, candidate := range candidatesRoundTwo {
|
|
||||||
intDay, err := strconv.ParseInt(candidate.Day, 10, 16)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
intMonth, err := strconv.ParseInt(candidate.Month, 10, 16)
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
intYear, err := strconv.ParseInt(candidate.Year, 10, 16)
|
|
||||||
if err != nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if ok, _, _, _ := checkDate(intDay, intMonth, intYear); ok {
|
|
||||||
matches = append(matches, match.DateMatch{Token: password, Pattern: "date", Day: intDay, Month: intMonth, Year: intYear, I: i, J: j})
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildDateMatchCandidate(dayMonth, year string, i, j int) DateMatchCandidate {
|
|
||||||
return DateMatchCandidate{DayMonth: dayMonth, Year: year, I: i, J: j}
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildDateMatchCandidateTwo(day, month string, year string, i, j int) DateMatchCandidateTwo {
|
|
||||||
|
|
||||||
return DateMatchCandidateTwo{Day: day, Month: month, Year: year, I: i, J: j}
|
|
||||||
}
|
|
54
vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go
generated
vendored
54
vendor/github.com/nbutton23/zxcvbn-go/matching/dictionaryMatch.go
generated
vendored
|
@ -1,54 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/entropy"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func buildDictMatcher(dictName string, rankedDict map[string]int) func(password string) []match.Match {
|
|
||||||
return func(password string) []match.Match {
|
|
||||||
matches := dictionaryMatch(password, dictName, rankedDict)
|
|
||||||
for _, v := range matches {
|
|
||||||
v.DictionaryName = dictName
|
|
||||||
}
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
func dictionaryMatch(password string, dictionaryName string, rankedDict map[string]int) []match.Match {
|
|
||||||
length := len(password)
|
|
||||||
var results []match.Match
|
|
||||||
pwLower := strings.ToLower(password)
|
|
||||||
|
|
||||||
for i := 0; i < length; i++ {
|
|
||||||
for j := i; j < length; j++ {
|
|
||||||
word := pwLower[i : j+1]
|
|
||||||
if val, ok := rankedDict[word]; ok {
|
|
||||||
matchDic := match.Match{Pattern: "dictionary",
|
|
||||||
DictionaryName: dictionaryName,
|
|
||||||
I: i,
|
|
||||||
J: j,
|
|
||||||
Token: password[i : j+1],
|
|
||||||
}
|
|
||||||
matchDic.Entropy = entropy.DictionaryEntropy(matchDic, float64(val))
|
|
||||||
|
|
||||||
results = append(results, matchDic)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return results
|
|
||||||
}
|
|
||||||
|
|
||||||
func buildRankedDict(unrankedList []string) map[string]int {
|
|
||||||
|
|
||||||
result := make(map[string]int)
|
|
||||||
|
|
||||||
for i, v := range unrankedList {
|
|
||||||
result[strings.ToLower(v)] = i + 1
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
}
|
|
68
vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go
generated
vendored
68
vendor/github.com/nbutton23/zxcvbn-go/matching/leet.go
generated
vendored
|
@ -1,68 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/entropy"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func l33tMatch(password string) []match.Match {
|
|
||||||
|
|
||||||
substitutions := relevantL33tSubtable(password)
|
|
||||||
|
|
||||||
permutations := getAllPermutationsOfLeetSubstitutions(password, substitutions)
|
|
||||||
|
|
||||||
var matches []match.Match
|
|
||||||
|
|
||||||
for _, permutation := range permutations {
|
|
||||||
for _, mather := range DICTIONARY_MATCHERS {
|
|
||||||
matches = append(matches, mather(permutation)...)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, match := range matches {
|
|
||||||
match.Entropy += entropy.ExtraLeetEntropy(match, password)
|
|
||||||
match.DictionaryName = match.DictionaryName + "_3117"
|
|
||||||
}
|
|
||||||
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
|
|
||||||
func getAllPermutationsOfLeetSubstitutions(password string, substitutionsMap map[string][]string) []string {
|
|
||||||
|
|
||||||
var permutations []string
|
|
||||||
|
|
||||||
for index, char := range password {
|
|
||||||
for value, splice := range substitutionsMap {
|
|
||||||
for _, sub := range splice {
|
|
||||||
if string(char) == sub {
|
|
||||||
var permutation string
|
|
||||||
permutation = password[:index] + value + password[index+1:]
|
|
||||||
|
|
||||||
permutations = append(permutations, permutation)
|
|
||||||
if index < len(permutation) {
|
|
||||||
tempPermutations := getAllPermutationsOfLeetSubstitutions(permutation[index+1:], substitutionsMap)
|
|
||||||
for _, temp := range tempPermutations {
|
|
||||||
permutations = append(permutations, permutation[:index+1]+temp)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return permutations
|
|
||||||
}
|
|
||||||
|
|
||||||
func relevantL33tSubtable(password string) map[string][]string {
|
|
||||||
relevantSubs := make(map[string][]string)
|
|
||||||
for key, values := range L33T_TABLE.Graph {
|
|
||||||
for _, value := range values {
|
|
||||||
if strings.Contains(password, value) {
|
|
||||||
relevantSubs[key] = append(relevantSubs[key], value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return relevantSubs
|
|
||||||
}
|
|
77
vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go
generated
vendored
77
vendor/github.com/nbutton23/zxcvbn-go/matching/matching.go
generated
vendored
|
@ -1,77 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/adjacency"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/frequency"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"sort"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
DICTIONARY_MATCHERS []func(password string) []match.Match
|
|
||||||
MATCHERS []func(password string) []match.Match
|
|
||||||
ADJACENCY_GRAPHS []adjacency.AdjacencyGraph
|
|
||||||
L33T_TABLE adjacency.AdjacencyGraph
|
|
||||||
|
|
||||||
SEQUENCES map[string]string
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
DATE_RX_YEAR_SUFFIX string = `((\d{1,2})(\s|-|\/|\\|_|\.)(\d{1,2})(\s|-|\/|\\|_|\.)(19\d{2}|200\d|201\d|\d{2}))`
|
|
||||||
DATE_RX_YEAR_PREFIX string = `((19\d{2}|200\d|201\d|\d{2})(\s|-|/|\\|_|\.)(\d{1,2})(\s|-|/|\\|_|\.)(\d{1,2}))`
|
|
||||||
DATE_WITHOUT_SEP_MATCH string = `\d{4,8}`
|
|
||||||
)
|
|
||||||
|
|
||||||
func init() {
|
|
||||||
loadFrequencyList()
|
|
||||||
}
|
|
||||||
|
|
||||||
func Omnimatch(password string, userInputs []string) (matches []match.Match) {
|
|
||||||
|
|
||||||
//Can I run into the issue where nil is not equal to nil?
|
|
||||||
if DICTIONARY_MATCHERS == nil || ADJACENCY_GRAPHS == nil {
|
|
||||||
loadFrequencyList()
|
|
||||||
}
|
|
||||||
|
|
||||||
if userInputs != nil {
|
|
||||||
userInputMatcher := buildDictMatcher("user_inputs", buildRankedDict(userInputs))
|
|
||||||
matches = userInputMatcher(password)
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, matcher := range MATCHERS {
|
|
||||||
matches = append(matches, matcher(password)...)
|
|
||||||
}
|
|
||||||
sort.Sort(match.Matches(matches))
|
|
||||||
return matches
|
|
||||||
}
|
|
||||||
|
|
||||||
func loadFrequencyList() {
|
|
||||||
|
|
||||||
for n, list := range frequency.FrequencyLists {
|
|
||||||
DICTIONARY_MATCHERS = append(DICTIONARY_MATCHERS, buildDictMatcher(n, buildRankedDict(list.List)))
|
|
||||||
}
|
|
||||||
|
|
||||||
L33T_TABLE = adjacency.AdjacencyGph["l33t"]
|
|
||||||
|
|
||||||
ADJACENCY_GRAPHS = append(ADJACENCY_GRAPHS, adjacency.AdjacencyGph["qwerty"])
|
|
||||||
ADJACENCY_GRAPHS = append(ADJACENCY_GRAPHS, adjacency.AdjacencyGph["dvorak"])
|
|
||||||
ADJACENCY_GRAPHS = append(ADJACENCY_GRAPHS, adjacency.AdjacencyGph["keypad"])
|
|
||||||
ADJACENCY_GRAPHS = append(ADJACENCY_GRAPHS, adjacency.AdjacencyGph["macKeypad"])
|
|
||||||
|
|
||||||
//l33tFilePath, _ := filepath.Abs("adjacency/L33t.json")
|
|
||||||
//L33T_TABLE = adjacency.GetAdjancencyGraphFromFile(l33tFilePath, "l33t")
|
|
||||||
|
|
||||||
SEQUENCES = make(map[string]string)
|
|
||||||
SEQUENCES["lower"] = "abcdefghijklmnopqrstuvwxyz"
|
|
||||||
SEQUENCES["upper"] = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
|
|
||||||
SEQUENCES["digits"] = "0123456789"
|
|
||||||
|
|
||||||
MATCHERS = append(MATCHERS, DICTIONARY_MATCHERS...)
|
|
||||||
MATCHERS = append(MATCHERS, spatialMatch)
|
|
||||||
MATCHERS = append(MATCHERS, repeatMatch)
|
|
||||||
MATCHERS = append(MATCHERS, sequenceMatch)
|
|
||||||
MATCHERS = append(MATCHERS, l33tMatch)
|
|
||||||
MATCHERS = append(MATCHERS, dateSepMatcher)
|
|
||||||
MATCHERS = append(MATCHERS, dateWithoutSepMatch)
|
|
||||||
|
|
||||||
}
|
|
59
vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go
generated
vendored
59
vendor/github.com/nbutton23/zxcvbn-go/matching/repeatMatch.go
generated
vendored
|
@ -1,59 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/entropy"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func repeatMatch(password string) []match.Match {
|
|
||||||
var matches []match.Match
|
|
||||||
|
|
||||||
//Loop through password. if current == prev currentStreak++ else if currentStreak > 2 {buildMatch; currentStreak = 1} prev = current
|
|
||||||
var current, prev string
|
|
||||||
currentStreak := 1
|
|
||||||
var i int
|
|
||||||
var char rune
|
|
||||||
for i, char = range password {
|
|
||||||
current = string(char)
|
|
||||||
if i == 0 {
|
|
||||||
prev = current
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.ToLower(current) == strings.ToLower(prev) {
|
|
||||||
currentStreak++
|
|
||||||
|
|
||||||
} else if currentStreak > 2 {
|
|
||||||
iPos := i - currentStreak
|
|
||||||
jPos := i - 1
|
|
||||||
matchRepeat := match.Match{
|
|
||||||
Pattern: "repeat",
|
|
||||||
I: iPos,
|
|
||||||
J: jPos,
|
|
||||||
Token: password[iPos : jPos+1],
|
|
||||||
DictionaryName: prev}
|
|
||||||
matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat)
|
|
||||||
matches = append(matches, matchRepeat)
|
|
||||||
currentStreak = 1
|
|
||||||
} else {
|
|
||||||
currentStreak = 1
|
|
||||||
}
|
|
||||||
|
|
||||||
prev = current
|
|
||||||
}
|
|
||||||
|
|
||||||
if currentStreak > 2 {
|
|
||||||
iPos := i - currentStreak + 1
|
|
||||||
jPos := i
|
|
||||||
matchRepeat := match.Match{
|
|
||||||
Pattern: "repeat",
|
|
||||||
I: iPos,
|
|
||||||
J: jPos,
|
|
||||||
Token: password[iPos : jPos+1],
|
|
||||||
DictionaryName: prev}
|
|
||||||
matchRepeat.Entropy = entropy.RepeatEntropy(matchRepeat)
|
|
||||||
matches = append(matches, matchRepeat)
|
|
||||||
}
|
|
||||||
return matches
|
|
||||||
}
|
|
68
vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go
generated
vendored
68
vendor/github.com/nbutton23/zxcvbn-go/matching/sequenceMatch.go
generated
vendored
|
@ -1,68 +0,0 @@
|
||||||
package matching
|
|
||||||
|
|
||||||
import (
|
|
||||||
"github.com/nbutton23/zxcvbn-go/entropy"
|
|
||||||
"github.com/nbutton23/zxcvbn-go/match"
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func sequenceMatch(password string) []match.Match {
|
|
||||||
var matches []match.Match
|
|
||||||
for i := 0; i < len(password); {
|
|
||||||
j := i + 1
|
|
||||||
var seq string
|
|
||||||
var seqName string
|
|
||||||
seqDirection := 0
|
|
||||||
for seqCandidateName, seqCandidate := range SEQUENCES {
|
|
||||||
iN := strings.Index(seqCandidate, string(password[i]))
|
|
||||||
var jN int
|
|
||||||
if j < len(password) {
|
|
||||||
jN = strings.Index(seqCandidate, string(password[j]))
|
|
||||||
} else {
|
|
||||||
jN = -1
|
|
||||||
}
|
|
||||||
|
|
||||||
if iN > -1 && jN > -1 {
|
|
||||||
direction := jN - iN
|
|
||||||
if direction == 1 || direction == -1 {
|
|
||||||
seq = seqCandidate
|
|
||||||
seqName = seqCandidateName
|
|
||||||
seqDirection = direction
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
if seq != "" {
|
|
||||||
for {
|
|
||||||
var prevN, curN int
|
|
||||||
if j < len(password) {
|
|
||||||
prevChar, curChar := password[j-1], password[j]
|
|
||||||
prevN, curN = strings.Index(seq, string(prevChar)), strings.Index(seq, string(curChar))
|
|
||||||
}
|
|
||||||
|
|
||||||
if j == len(password) || curN-prevN != seqDirection {
|
|
||||||
if j-i > 2 {
|
|
||||||
matchSequence := match.Match{
|
|
||||||
Pattern: "sequence",
|
|
||||||
I: i,
|
|
||||||
J: j - 1,
|
|
||||||
Token: password[i:j],
|
|
||||||
DictionaryName: seqName,
|
|
||||||
}
|
|
||||||
|
|
||||||
matchSequence.Entropy = entropy.SequenceEntropy(matchSequence, len(seq), (seqDirection == 1))
|
|
||||||
matches = append(matches, matchSequence)
|
|
||||||
}
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
j += 1
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
i = j
|
|
||||||
}
|
|
||||||
return matches
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue