build(deps): bump github.com/golangci/golangci-lint/v2 in /tools

Bumps [github.com/golangci/golangci-lint/v2](https://github.com/golangci/golangci-lint) from 2.9.0 to 2.11.4.
- [Release notes](https://github.com/golangci/golangci-lint/releases)
- [Changelog](https://github.com/golangci/golangci-lint/blob/main/CHANGELOG.md)
- [Commits](https://github.com/golangci/golangci-lint/compare/v2.9.0...v2.11.4)

---
updated-dependencies:
- dependency-name: github.com/golangci/golangci-lint/v2
  dependency-version: 2.11.4
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
dependabot[bot]
2026-04-14 09:27:55 +00:00
committed by GitHub
parent 2518b6a12a
commit 9b9455d350
401 changed files with 23671 additions and 47322 deletions
+19 -19
View File
@@ -3,7 +3,7 @@ module github.com/containers/gvisor-tap-vsock/tools
go 1.25.0
require (
github.com/golangci/golangci-lint/v2 v2.9.0
github.com/golangci/golangci-lint/v2 v2.11.4
github.com/randall77/makefat v0.0.0-20210315173500-7ddd0e42c844
)
@@ -24,12 +24,12 @@ require (
github.com/BurntSushi/toml v1.6.0 // indirect
github.com/Djarvur/go-err113 v0.1.1 // indirect
github.com/Masterminds/semver/v3 v3.4.0 // indirect
github.com/MirrexOne/unqueryvet v1.5.3 // indirect
github.com/MirrexOne/unqueryvet v1.5.4 // indirect
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 // indirect
github.com/alecthomas/chroma/v2 v2.23.1 // indirect
github.com/alecthomas/go-check-sumtype v0.3.1 // indirect
github.com/alexkohler/nakedret/v2 v2.0.6 // indirect
github.com/alexkohler/prealloc v1.0.2 // indirect
github.com/alexkohler/prealloc v1.1.0 // indirect
github.com/alfatraining/structtag v1.0.0 // indirect
github.com/alingse/asasalint v0.0.11 // indirect
github.com/alingse/nilnesserr v0.2.0 // indirect
@@ -62,7 +62,7 @@ require (
github.com/denis-tingaikin/go-header v0.5.0 // indirect
github.com/dlclark/regexp2 v1.11.5 // indirect
github.com/ettle/strcase v0.2.0 // indirect
github.com/fatih/color v1.18.0 // indirect
github.com/fatih/color v1.19.0 // indirect
github.com/fatih/structtag v1.2.0 // indirect
github.com/firefart/nonamedreturns v1.0.6 // indirect
github.com/fsnotify/fsnotify v1.5.4 // indirect
@@ -79,7 +79,7 @@ require (
github.com/go-viper/mapstructure/v2 v2.5.0 // indirect
github.com/go-xmlfmt/xmlfmt v1.1.3 // indirect
github.com/gobwas/glob v0.2.3 // indirect
github.com/godoc-lint/godoc-lint v0.11.1 // indirect
github.com/godoc-lint/godoc-lint v0.11.2 // indirect
github.com/gofrs/flock v0.13.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/golangci/asciicheck v0.5.0 // indirect
@@ -109,7 +109,7 @@ require (
github.com/jjti/go-spancheck v0.6.5 // indirect
github.com/julz/importas v0.2.0 // indirect
github.com/karamaru-alpha/copyloopvar v1.2.2 // indirect
github.com/kisielk/errcheck v1.9.0 // indirect
github.com/kisielk/errcheck v1.10.0 // indirect
github.com/kkHAIKE/contextcheck v1.1.6 // indirect
github.com/kulti/thelper v0.7.1 // indirect
github.com/kunwardeep/paralleltest v1.0.15 // indirect
@@ -133,7 +133,7 @@ require (
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.16 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.1 // indirect
github.com/mgechev/revive v1.14.0 // indirect
github.com/mgechev/revive v1.15.0 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moricho/tparallel v0.3.2 // indirect
@@ -141,7 +141,7 @@ require (
github.com/nakabonne/nestif v0.3.1 // indirect
github.com/nishanths/exhaustive v0.12.0 // indirect
github.com/nishanths/predeclared v0.2.2 // indirect
github.com/nunnatsa/ginkgolinter v0.22.0 // indirect
github.com/nunnatsa/ginkgolinter v0.23.0 // indirect
github.com/pelletier/go-toml v1.9.5 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
@@ -158,15 +158,15 @@ require (
github.com/rivo/uniseg v0.4.7 // indirect
github.com/rogpeppe/go-internal v1.14.1 // indirect
github.com/ryancurrah/gomodguard v1.4.1 // indirect
github.com/ryanrolds/sqlclosecheck v0.5.1 // indirect
github.com/ryanrolds/sqlclosecheck v0.6.0 // indirect
github.com/sanposhiho/wastedassign/v2 v2.1.0 // indirect
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 // indirect
github.com/sashamelentyev/interfacebloat v1.1.0 // indirect
github.com/sashamelentyev/usestdlibvars v1.29.0 // indirect
github.com/securego/gosec/v2 v2.22.11 // indirect
github.com/securego/gosec/v2 v2.24.8-0.20260309165252-619ce2117e08 // indirect
github.com/sirupsen/logrus v1.9.4 // indirect
github.com/sivchari/containedctx v1.0.3 // indirect
github.com/sonatard/noctx v0.4.0 // indirect
github.com/sonatard/noctx v0.5.1 // indirect
github.com/sourcegraph/go-diff v0.7.0 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.5.0 // indirect
@@ -186,7 +186,7 @@ require (
github.com/tommy-muehle/go-mnd/v2 v2.5.1 // indirect
github.com/ultraware/funlen v0.2.0 // indirect
github.com/ultraware/whitespace v0.2.0 // indirect
github.com/uudashr/gocognit v1.2.0 // indirect
github.com/uudashr/gocognit v1.2.1 // indirect
github.com/uudashr/iface v1.4.1 // indirect
github.com/xen0n/gosmopolitan v1.3.0 // indirect
github.com/xo/terminfo v0.0.0-20220910002029-abceb7e1c41e // indirect
@@ -201,17 +201,17 @@ require (
go.uber.org/multierr v1.10.0 // indirect
go.uber.org/zap v1.27.0 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/exp/typeparams v0.0.0-20251125195548-87e1e737ad39 // indirect
golang.org/x/mod v0.33.0 // indirect
golang.org/x/sync v0.19.0 // indirect
golang.org/x/sys v0.41.0 // indirect
golang.org/x/text v0.33.0 // indirect
golang.org/x/tools v0.42.0 // indirect
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 // indirect
golang.org/x/mod v0.34.0 // indirect
golang.org/x/sync v0.20.0 // indirect
golang.org/x/sys v0.42.0 // indirect
golang.org/x/text v0.34.0 // indirect
golang.org/x/tools v0.43.0 // indirect
google.golang.org/protobuf v1.36.8 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
honnef.co/go/tools v0.6.1 // indirect
honnef.co/go/tools v0.7.0 // indirect
mvdan.cc/gofumpt v0.9.2 // indirect
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 // indirect
)
+46 -46
View File
@@ -65,8 +65,8 @@ github.com/Djarvur/go-err113 v0.1.1 h1:eHfopDqXRwAi+YmCUas75ZE0+hoBHJ2GQNLYRSxao
github.com/Djarvur/go-err113 v0.1.1/go.mod h1:IaWJdYFLg76t2ihfflPZnM1LIQszWOsFDh2hhhAVF6k=
github.com/Masterminds/semver/v3 v3.4.0 h1:Zog+i5UMtVoCU8oKka5P7i9q9HgrJeGzI9SA1Xbatp0=
github.com/Masterminds/semver/v3 v3.4.0/go.mod h1:4V+yj/TJE1HU9XfppCwVMZq3I84lprf4nC11bSS5beM=
github.com/MirrexOne/unqueryvet v1.5.3 h1:LpT3rsH+IY3cQddWF9bg4C7jsbASdGnrOSofY8IPEiw=
github.com/MirrexOne/unqueryvet v1.5.3/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
github.com/MirrexOne/unqueryvet v1.5.4 h1:38QOxShO7JmMWT+eCdDMbcUgGCOeJphVkzzRgyLJgsQ=
github.com/MirrexOne/unqueryvet v1.5.4/go.mod h1:fs9Zq6eh1LRIhsDIsxf9PONVUjYdFHdtkHIgZdJnyPU=
github.com/OpenPeeDeeP/depguard/v2 v2.2.1 h1:vckeWVESWp6Qog7UZSARNqfu/cZqvki8zsuj3piCMx4=
github.com/OpenPeeDeeP/depguard/v2 v2.2.1/go.mod h1:q4DKzC4UcVaAvcfd41CZh0PWpGgzrVxUYBlgKNGquUo=
github.com/alecthomas/assert/v2 v2.11.0 h1:2Q9r3ki8+JYXvGsDyBXwH3LcJ+WK5D0gc5E8vS6K3D0=
@@ -84,8 +84,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
github.com/alexkohler/nakedret/v2 v2.0.6 h1:ME3Qef1/KIKr3kWX3nti3hhgNxw6aqN5pZmQiFSsuzQ=
github.com/alexkohler/nakedret/v2 v2.0.6/go.mod h1:l3RKju/IzOMQHmsEvXwkqMDzHHvurNQfAgE1eVmT40Q=
github.com/alexkohler/prealloc v1.0.2 h1:MPo8cIkGkZytq7WNH9UHv3DIX1mPz1RatPXnZb0zHWQ=
github.com/alexkohler/prealloc v1.0.2/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
github.com/alexkohler/prealloc v1.1.0 h1:cKGRBqlXw5iyQGLYhrXrDlcHxugXpTq4tQ5c91wkf8M=
github.com/alexkohler/prealloc v1.1.0/go.mod h1:fT39Jge3bQrfA7nPMDngUfvUbQGQeJyGQnR+913SCig=
github.com/alfatraining/structtag v1.0.0 h1:2qmcUqNcCoyVJ0up879K614L9PazjBSFruTB0GOFjCc=
github.com/alfatraining/structtag v1.0.0/go.mod h1:p3Xi5SwzTi+Ryj64DqjLWz7XurHxbGsq6y3ubePJPus=
github.com/alingse/asasalint v0.0.11 h1:SFwnQXJ49Kx/1GghOFz1XGqHYKp21Kq1nHad/0WQRnw=
@@ -168,8 +168,8 @@ github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1m
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/ettle/strcase v0.2.0 h1:fGNiVF21fHXpX1niBgk0aROov1LagYsOwV/xqKDKR/Q=
github.com/ettle/strcase v0.2.0/go.mod h1:DajmHElDSaX76ITe3/VHVyMin4LWSJN5Z909Wp+ED1A=
github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM=
github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU=
github.com/fatih/color v1.19.0 h1:Zp3PiM21/9Ld6FzSKyL5c/BULoe/ONr9KlbYVOfG8+w=
github.com/fatih/color v1.19.0/go.mod h1:zNk67I0ZUT1bEGsSGyCZYZNrHuTkJJB+r6Q9VuMi0LE=
github.com/fatih/structtag v1.2.0 h1:/OdNE99OxoI/PqaW/SuSK9uxxT3f/tcSZgon/ssNSx4=
github.com/fatih/structtag v1.2.0/go.mod h1:mBJUNpUnHmRKrKlQQlmCrh5PuhftFbNv8Ys4/aAZl94=
github.com/firefart/nonamedreturns v1.0.6 h1:vmiBcKV/3EqKY3ZiPxCINmpS431OcE1S47AQUwhrg8E=
@@ -225,8 +225,8 @@ github.com/go-xmlfmt/xmlfmt v1.1.3 h1:t8Ey3Uy7jDSEisW2K3somuMKIpzktkWptA0iFCnRUW
github.com/go-xmlfmt/xmlfmt v1.1.3/go.mod h1:aUCEOzzezBEjDBbFBoSiya/gduyIiWYRP6CnSFIV8AM=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/godoc-lint/godoc-lint v0.11.1 h1:z9as8Qjiy6miRIa3VRymTa+Gt2RLnGICVikcvlUVOaA=
github.com/godoc-lint/godoc-lint v0.11.1/go.mod h1:BAqayheFSuZrEAqCRxgw9MyvsM+S/hZwJbU1s/ejRj8=
github.com/godoc-lint/godoc-lint v0.11.2 h1:Bp0FkJWoSdNsBikdNgIcgtaoo+xz6I/Y9s5WSBQUeeM=
github.com/godoc-lint/godoc-lint v0.11.2/go.mod h1:iVpGdL1JCikNH2gGeAn3Hh+AgN5Gx/I/cxV+91L41jo=
github.com/gofrs/flock v0.13.0 h1:95JolYOvGMqeH31+FC7D2+uULf6mG61mEZ/A8dRYMzw=
github.com/gofrs/flock v0.13.0/go.mod h1:jxeyy9R1auM5S6JYDBhDt+E2TCo7DkratH4Pgi8P+Z0=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
@@ -267,8 +267,8 @@ github.com/golangci/go-printf-func-name v0.1.1 h1:hIYTFJqAGp1iwoIfsNTpoq1xZAarog
github.com/golangci/go-printf-func-name v0.1.1/go.mod h1:Es64MpWEZbh0UBtTAICOZiB+miW53w/K9Or/4QogJss=
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d h1:viFft9sS/dxoYY0aiOTsLKO2aZQAPT4nlQCsimGcSGE=
github.com/golangci/gofmt v0.0.0-20250106114630-d62b90e6713d/go.mod h1:ivJ9QDg0XucIkmwhzCDsqcnxxlDStoTl89jDMIoNxKY=
github.com/golangci/golangci-lint/v2 v2.9.0 h1:x5RRwa/jpNEjOnbSQSTam47QBsB50NMzCCUKe4GY/0U=
github.com/golangci/golangci-lint/v2 v2.9.0/go.mod h1:gAPaJitu6HsyiuwJYO+WtKT9WmCpQZHR+gkXlCw1bRY=
github.com/golangci/golangci-lint/v2 v2.11.4 h1:GK+UlZBN5y7rh2PBnHA93XLSX6RaF7uhzJQ3JwU1wuA=
github.com/golangci/golangci-lint/v2 v2.11.4/go.mod h1:ODQDCASMA3VqfZYIbbQLpTRTzV7O/vjmIRF6u8NyFwI=
github.com/golangci/golines v0.15.0 h1:Qnph25g8Y1c5fdo1X7GaRDGgnMHgnxh4Gk4VfPTtRx0=
github.com/golangci/golines v0.15.0/go.mod h1:AZjXd23tbHMpowhtnGlj9KCNsysj72aeZVVHnVcZx10=
github.com/golangci/misspell v0.8.0 h1:qvxQhiE2/5z+BVRo1kwYA8yGz+lOlu5Jfvtx2b04Jbg=
@@ -306,8 +306,8 @@ github.com/google/pprof v0.0.0-20200212024743-f11f1df84d12/go.mod h1:ZgVRPoUq/hf
github.com/google/pprof v0.0.0-20200229191704-1ebb73c60ed3/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200430221834-fc25d7d30c6d/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20200708004538-1a94d8640e99/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6 h1:EEHtgt9IwisQ2AZ4pIsMjahcegHh6rmhqxzIRQIyepY=
github.com/google/pprof v0.0.0-20250820193118-f64d9cf942d6/go.mod h1:I6V7YzU0XDpsHqbsyrghnFZLO1gwK6NPTNvmetQIk9U=
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83 h1:z2ogiKUYzX5Is6zr/vP9vJGqPwcdqsWjOt+V8J7+bTc=
github.com/google/pprof v0.0.0-20260115054156-294ebfa9ad83/go.mod h1:MxpfABSjhmINe3F1It9d+8exIHFvUqtLIRCdOGNXqiI=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
@@ -362,8 +362,8 @@ github.com/julz/importas v0.2.0 h1:y+MJN/UdL63QbFJHws9BVC5RpA2iq0kpjrFajTGivjQ=
github.com/julz/importas v0.2.0/go.mod h1:pThlt589EnCYtMnmhmRYY/qn9lCf/frPOK+WMx3xiJY=
github.com/karamaru-alpha/copyloopvar v1.2.2 h1:yfNQvP9YaGQR7VaWLYcfZUlRP2eo2vhExWKxD/fP6q0=
github.com/karamaru-alpha/copyloopvar v1.2.2/go.mod h1:oY4rGZqZ879JkJMtX3RRkcXRkmUvH0x35ykgaKgsgJY=
github.com/kisielk/errcheck v1.9.0 h1:9xt1zI9EBfcYBvdU1nVrzMzzUPUtPKs9bVSIM3TAb3M=
github.com/kisielk/errcheck v1.9.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
github.com/kisielk/errcheck v1.10.0 h1:Lvs/YAHP24YKg08LA8oDw2z9fJVme090RAXd90S+rrw=
github.com/kisielk/errcheck v1.10.0/go.mod h1:kQxWMMVZgIkDq7U8xtG/n2juOjbLgZtedi0D+/VL/i8=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/kkHAIKE/contextcheck v1.1.6 h1:7HIyRcnyzxL9Lz06NGhiKvenXq7Zw6Q0UQu/ttjfJCE=
github.com/kkHAIKE/contextcheck v1.1.6/go.mod h1:3dDbMRNBFaq8HFXWC1JyvDSPm43CmE6IuHam8Wr0rkg=
@@ -423,8 +423,8 @@ github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6T
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mgechev/revive v1.14.0 h1:CC2Ulb3kV7JFYt+izwORoS3VT/+Plb8BvslI/l1yZsc=
github.com/mgechev/revive v1.14.0/go.mod h1:MvnujelCZBZCaoDv5B3foPo6WWgULSSFxvfxp7GsPfo=
github.com/mgechev/revive v1.15.0 h1:vJ0HzSBzfNyPbHKolgiFjHxLek9KUijhqh42yGoqZ8Q=
github.com/mgechev/revive v1.15.0/go.mod h1:LlAKO3QQe9OJ0pVZzI2GPa8CbXGZ/9lNpCGvK4T/a8A=
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
@@ -446,12 +446,12 @@ github.com/nishanths/exhaustive v0.12.0 h1:vIY9sALmw6T/yxiASewa4TQcFsVYZQQRUQJhK
github.com/nishanths/exhaustive v0.12.0/go.mod h1:mEZ95wPIZW+x8kC4TgC+9YCUgiST7ecevsVDTgc2obs=
github.com/nishanths/predeclared v0.2.2 h1:V2EPdZPliZymNAn79T8RkNApBjMmVKh5XRpLm/w98Vk=
github.com/nishanths/predeclared v0.2.2/go.mod h1:RROzoN6TnGQupbC+lqggsOlcgysk3LMK/HI84Mp280c=
github.com/nunnatsa/ginkgolinter v0.22.0 h1:o9g7JN6efdBxAHhejvPkodEjWsOBze9zDnPePsvC/Qg=
github.com/nunnatsa/ginkgolinter v0.22.0/go.mod h1:zIFAk36fhcHQIiYOGXLbrGTXz7cvpufhRYem6ToCVnY=
github.com/onsi/ginkgo/v2 v2.27.2 h1:LzwLj0b89qtIy6SSASkzlNvX6WktqurSHwkk2ipF/Ns=
github.com/onsi/ginkgo/v2 v2.27.2/go.mod h1:ArE1D/XhNXBXCBkKOLkbsb2c81dQHCRcF5zwn/ykDRo=
github.com/onsi/gomega v1.38.2 h1:eZCjf2xjZAqe+LeWvKb5weQ+NcPwX84kqJ0cZNxok2A=
github.com/onsi/gomega v1.38.2/go.mod h1:W2MJcYxRGV63b418Ai34Ud0hEdTVXq9NW9+Sx6uXf3k=
github.com/nunnatsa/ginkgolinter v0.23.0 h1:x3o4DGYOWbBMP/VdNQKgSj+25aJKx2Pe6lHr8gBcgf8=
github.com/nunnatsa/ginkgolinter v0.23.0/go.mod h1:9qN1+0akwXEccwV1CAcCDfcoBlWXHB+ML9884pL4SZ4=
github.com/onsi/ginkgo/v2 v2.28.1 h1:S4hj+HbZp40fNKuLUQOYLDgZLwNUVn19N3Atb98NCyI=
github.com/onsi/ginkgo/v2 v2.28.1/go.mod h1:CLtbVInNckU3/+gC8LzkGUb9oF+e8W8TdUsxPwvdOgE=
github.com/onsi/gomega v1.39.1 h1:1IJLAad4zjPn2PsnhH70V4DKRFlrCzGBNrNaru+Vf28=
github.com/onsi/gomega v1.39.1/go.mod h1:hL6yVALoTOxeWudERyfppUcZXjMwIMLnuSfruD2lcfg=
github.com/otiai10/copy v1.2.0/go.mod h1:rrF5dJ5F0t/EWSYODDu4j9/vEeYHMkc8jt0zJChqQWw=
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
@@ -513,8 +513,8 @@ github.com/rogpeppe/go-internal v1.14.1/go.mod h1:MaRKkUm5W0goXpeCfT7UZI6fk/L7L7
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryancurrah/gomodguard v1.4.1 h1:eWC8eUMNZ/wM/PWuZBv7JxxqT5fiIKSIyTvjb7Elr+g=
github.com/ryancurrah/gomodguard v1.4.1/go.mod h1:qnMJwV1hX9m+YJseXEBhd2s90+1Xn6x9dLz11ualI1I=
github.com/ryanrolds/sqlclosecheck v0.5.1 h1:dibWW826u0P8jNLsLN+En7+RqWWTYrjCB9fJfSfdyCU=
github.com/ryanrolds/sqlclosecheck v0.5.1/go.mod h1:2g3dUjoS6AL4huFdv6wn55WpLIDjY7ZgUR4J8HOO/XQ=
github.com/ryanrolds/sqlclosecheck v0.6.0 h1:pEyL9okISdg1F1SEpJNlrEotkTGerv5BMk7U4AG0eVg=
github.com/ryanrolds/sqlclosecheck v0.6.0/go.mod h1:xyX16hsDaCMXHrMJ3JMzGf5OpDfHTOTTQrT7HOFUmeU=
github.com/sanposhiho/wastedassign/v2 v2.1.0 h1:crurBF7fJKIORrV85u9UUpePDYGWnwvv3+A96WvwXT0=
github.com/sanposhiho/wastedassign/v2 v2.1.0/go.mod h1:+oSmSC+9bQ+VUAxA66nBb0Z7N8CK7mscKTDYC6aIek4=
github.com/santhosh-tekuri/jsonschema/v6 v6.0.2 h1:KRzFb2m7YtdldCEkzs6KqmJw4nqEVZGK7IN2kJkjTuQ=
@@ -523,8 +523,8 @@ github.com/sashamelentyev/interfacebloat v1.1.0 h1:xdRdJp0irL086OyW1H/RTZTr1h/tM
github.com/sashamelentyev/interfacebloat v1.1.0/go.mod h1:+Y9yU5YdTkrNvoX0xHc84dxiN1iBi9+G8zZIhPVoNjQ=
github.com/sashamelentyev/usestdlibvars v1.29.0 h1:8J0MoRrw4/NAXtjQqTHrbW9NN+3iMf7Knkq057v4XOQ=
github.com/sashamelentyev/usestdlibvars v1.29.0/go.mod h1:8PpnjHMk5VdeWlVb4wCdrB8PNbLqZ3wBZTZWkrpZZL8=
github.com/securego/gosec/v2 v2.22.11 h1:tW+weM/hCM/GX3iaCV91d5I6hqaRT2TPsFM1+USPXwg=
github.com/securego/gosec/v2 v2.22.11/go.mod h1:KE4MW/eH0GLWztkbt4/7XpyH0zJBBnu7sYB4l6Wn7Mw=
github.com/securego/gosec/v2 v2.24.8-0.20260309165252-619ce2117e08 h1:AoLtJX4WUtZkhhUUMFy3GgecAALp/Mb4S1iyQOA2s0U=
github.com/securego/gosec/v2 v2.24.8-0.20260309165252-619ce2117e08/go.mod h1:+XLCJiRE95ga77XInNELh2M6zQP+PdqiT9Zpm0D9Wpk=
github.com/sergi/go-diff v1.2.0 h1:XU+rvMAioB0UC3q1MFrIQy4Vo5/4VsRDQQXHsEya6xQ=
github.com/sergi/go-diff v1.2.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM=
github.com/shurcooL/go v0.0.0-20180423040247-9e1955d9fb6e/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
@@ -536,8 +536,8 @@ github.com/sirupsen/logrus v1.9.4 h1:TsZE7l11zFCLZnZ+teH4Umoq5BhEIfIzfRDZ1Uzql2w
github.com/sirupsen/logrus v1.9.4/go.mod h1:ftWc9WdOfJ0a92nsE2jF5u5ZwH8Bv2zdeOC42RjbV2g=
github.com/sivchari/containedctx v1.0.3 h1:x+etemjbsh2fB5ewm5FeLNi5bUjK0V8n0RB+Wwfd0XE=
github.com/sivchari/containedctx v1.0.3/go.mod h1:c1RDvCbnJLtH4lLcYD/GqwiBSSf4F5Qk0xld2rBqzJ4=
github.com/sonatard/noctx v0.4.0 h1:7MC/5Gg4SQ4lhLYR6mvOP6mQVSxCrdyiExo7atBs27o=
github.com/sonatard/noctx v0.4.0/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
github.com/sonatard/noctx v0.5.1 h1:wklWg9c9ZYugOAk7qG4yP4PBrlQsmSLPTvW1K4PRQMs=
github.com/sonatard/noctx v0.5.1/go.mod h1:64XdbzFb18XL4LporKXp8poqZtPKbCrqQ402CV+kJas=
github.com/sourcegraph/go-diff v0.7.0 h1:9uLlrd5T46OXs5qpp8L/MTltk0zikUGi0sNNyCpA8G0=
github.com/sourcegraph/go-diff v0.7.0/go.mod h1:iBszgVvyxdc8SFZ7gm69go2KDdt3ag071iBaWPF6cjs=
github.com/spf13/afero v1.15.0 h1:b/YBCLWAJdFWJTN9cLhiXXcD7mzKn9Dm86dNnfyQw1I=
@@ -587,8 +587,8 @@ github.com/ultraware/funlen v0.2.0 h1:gCHmCn+d2/1SemTdYMiKLAHFYxTYz7z9VIDRaTGyLk
github.com/ultraware/funlen v0.2.0/go.mod h1:ZE0q4TsJ8T1SQcjmkhN/w+MceuatI6pBFSxxyteHIJA=
github.com/ultraware/whitespace v0.2.0 h1:TYowo2m9Nfj1baEQBjuHzvMRbp19i+RCcRYrSWoFa+g=
github.com/ultraware/whitespace v0.2.0/go.mod h1:XcP1RLD81eV4BW8UhQlpaR+SDc2givTvyI8a586WjW8=
github.com/uudashr/gocognit v1.2.0 h1:3BU9aMr1xbhPlvJLSydKwdLN3tEUUrzPSSM8S4hDYRA=
github.com/uudashr/gocognit v1.2.0/go.mod h1:k/DdKPI6XBZO1q7HgoV2juESI2/Ofj9AcHPZhBBdrTU=
github.com/uudashr/gocognit v1.2.1 h1:CSJynt5txTnORn/DkhiB4mZjwPuifyASC8/6Q0I/QS4=
github.com/uudashr/gocognit v1.2.1/go.mod h1:acaubQc6xYlXFEMb9nWX2dYBzJ/bIjEkc1zzvyIZg5Q=
github.com/uudashr/iface v1.4.1 h1:J16Xl1wyNX9ofhpHmQ9h9gk5rnv2A6lX/2+APLTo0zU=
github.com/uudashr/iface v1.4.1/go.mod h1:pbeBPlbuU2qkNDn0mmfrxP2X+wjPMIQAy+r1MBXSXtg=
github.com/xen0n/gosmopolitan v1.3.0 h1:zAZI1zefvo7gcpbCOrPSHJZJYA9ZgLfJqtKzZ5pHqQM=
@@ -656,8 +656,8 @@ golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/y
golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8=
golang.org/x/exp/typeparams v0.0.0-20220428152302-39d4317da171/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
golang.org/x/exp/typeparams v0.0.0-20230203172020-98cc5a0785f9/go.mod h1:AbB0pIl9nAr9wVwH+Z2ZpaocVmF5I4GyWCDIsVjR0bk=
golang.org/x/exp/typeparams v0.0.0-20251125195548-87e1e737ad39 h1:yzGKB4T4r1nFi65o7dQ96ERTfU2trk8Ige9aqqADqf4=
golang.org/x/exp/typeparams v0.0.0-20251125195548-87e1e737ad39/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms=
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358 h1:qWFG1Dj7TBjOjOvhEOkmyGPVoquqUKnIU0lEVLp8xyk=
golang.org/x/exp/typeparams v0.0.0-20260209203927-2842357ff358/go.mod h1:4Mzdyp/6jzw9auFDJ3OMF5qksa7UvPnzKqTVGcb04ms=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
@@ -685,8 +685,8 @@ golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/mod v0.33.0 h1:tHFzIWbBifEmbwtGz65eaWyGiGZatSrT9prnU8DbVL8=
golang.org/x/mod v0.33.0/go.mod h1:swjeQEj+6r7fODbD2cqrnje9PnziFuw4bmLbBZFrQ5w=
golang.org/x/mod v0.34.0 h1:xIHgNUUnW6sYkcM5Jleh05DvLOtwc6RitGHbDk4akRI=
golang.org/x/mod v0.34.0/go.mod h1:ykgH52iCZe79kzLLMhyCUzhMci+nQj+0XkbXpNYtVjY=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -725,8 +725,8 @@ golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
golang.org/x/net v0.15.0/go.mod h1:idbUs1IY1+zTqbi8yxTbhexhEEk5ur9LInksu6HrEpk=
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.50.0 h1:ucWh9eiCGyDR3vtzso0WMQinm2Dnt8cFMuQa9K33J60=
golang.org/x/net v0.50.0/go.mod h1:UgoSli3F/pBgdJBHCTc+tp3gmrU4XswgGRgtnwWTfyM=
golang.org/x/net v0.52.0 h1:He/TN1l0e4mmR3QqHMT2Xab3Aj3L9qjbhRm78/6jrW0=
golang.org/x/net v0.52.0/go.mod h1:R1MAz7uMZxVMualyPXb+VaqGSa3LIaUqk0eEt3w36Sw=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -748,8 +748,8 @@ golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sync v0.19.0 h1:vV+1eWNmZ5geRlYjzm2adRgW2/mcpevXNg50YZtPCE4=
golang.org/x/sync v0.19.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI=
golang.org/x/sync v0.20.0 h1:e0PTpb7pjO8GAtTs2dQ6jYa5BWYlMuX047Dco/pItO4=
golang.org/x/sync v0.20.0/go.mod h1:9xrNwdLfx4jkKbNva9FpL6vEN7evnE43NNNJQ2LF3+0=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -800,8 +800,8 @@ golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.41.0 h1:Ivj+2Cp/ylzLiEU89QhWblYnOE9zerudt9Ftecq2C6k=
golang.org/x/sys v0.41.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks=
golang.org/x/sys v0.42.0 h1:omrd2nAlyT5ESRdCLYdm3+fMfNFE/+Rf4bDIQImRJeo=
golang.org/x/sys v0.42.0/go.mod h1:4GL1E5IUh+htKOUEOaiffhrAeqysfVGipDYzABqnCmw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
@@ -818,8 +818,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.33.0 h1:B3njUFyqtHDUI5jMn1YIr5B0IE2U0qck04r6d4KPAxE=
golang.org/x/text v0.33.0/go.mod h1:LuMebE6+rBincTi9+xWTY8TztLzKHc/9C1uBCG27+q8=
golang.org/x/text v0.34.0 h1:oL/Qq0Kdaqxa1KbNeMKwQq0reLCCaFtqu2eNuSeNHbk=
golang.org/x/text v0.34.0/go.mod h1:homfLqTYRFyVYemLBFl5GgL/DWEiH5wcsQ5gSh1yziA=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -873,8 +873,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
golang.org/x/tools v0.42.0 h1:uNgphsn75Tdz5Ji2q36v/nsFSfR/9BRFvqhGBaJGd5k=
golang.org/x/tools v0.42.0/go.mod h1:Ma6lCIwGZvHK6XtgbswSoWroEkhugApmsXyrUmBhfr0=
golang.org/x/tools v0.43.0 h1:12BdW9CeB3Z+J/I/wj34VMl8X+fEXBxVR90JeMX5E7s=
golang.org/x/tools v0.43.0/go.mod h1:uHkMso649BX2cZK6+RpuIPXS3ho2hZo4FVwfoy1vIk0=
golang.org/x/tools/go/expect v0.1.1-deprecated h1:jpBZDwmgPhXsKZC6WhL20P4b/wmnpsEAGHaNy0n/rJM=
golang.org/x/tools/go/expect v0.1.1-deprecated/go.mod h1:eihoPOH+FgIqa3FpoTwguz/bVUSGBlGQU67vpBeOrBY=
golang.org/x/tools/go/packages/packagestest v0.1.1-deprecated h1:1h2MnaIAIXISqTFKdENegdpAgUXz6NrPEsbIeWaBRvM=
@@ -985,8 +985,8 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWh
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
honnef.co/go/tools v0.6.1 h1:R094WgE8K4JirYjBaOpz/AvTyUu/3wbmAoskKN/pxTI=
honnef.co/go/tools v0.6.1/go.mod h1:3puzxxljPCe8RGJX7BIy1plGbxEOZni5mR2aXe3/uk4=
honnef.co/go/tools v0.7.0 h1:w6WUp1VbkqPEgLz4rkBzH/CSU6HkoqNLp6GstyTx3lU=
honnef.co/go/tools v0.7.0/go.mod h1:pm29oPxeP3P82ISxZDgIYeOaf9ta6Pi0EWvCFoLG2vc=
mvdan.cc/gofumpt v0.9.2 h1:zsEMWL8SVKGHNztrx6uZrXdp7AX8r421Vvp23sz7ik4=
mvdan.cc/gofumpt v0.9.2/go.mod h1:iB7Hn+ai8lPvofHd9ZFGVg2GOr8sBUw1QUWjNbmIL/s=
mvdan.cc/unparam v0.0.0-20251027182757-5beb8c8f8f15 h1:ssMzja7PDPJV8FStj7hq9IKiuiKhgz9ErWw+m68e7DI=
@@ -1,8 +1,10 @@
package analyzer
import (
"fmt"
"go/ast"
"go/token"
"go/types"
"regexp"
"strings"
@@ -33,6 +35,8 @@ type SQLInjectionScanner struct {
userInputPatterns []string
// httpInputFuncs are functions that read HTTP input
httpInputFuncs map[string]map[string]bool
// pass is the current analysis pass
pass *analysis.Pass
}
// SQLInjectionViolation represents a detected SQL injection vulnerability.
@@ -126,7 +130,7 @@ func NewSQLInjectionScanner() *SQLInjectionScanner {
"user", "input", "param", "query", "search", "filter",
"id", "name", "email", "password", "username", "request",
"body", "form", "data", "value", "arg", "args",
"q", "s", "term", "keyword", "text", "content",
"term", "keyword", "text", "content",
},
httpInputFuncs: map[string]map[string]bool{
"http": {
@@ -171,6 +175,7 @@ var placeholderPattern = regexp.MustCompile(`(\?|\$\d+|:\w+|@\w+)`)
// ScanFile scans a file for SQL injection vulnerabilities.
func (s *SQLInjectionScanner) ScanFile(pass *analysis.Pass, file *ast.File) []SQLInjectionViolation {
s.pass = pass
var violations []SQLInjectionViolation
ast.Inspect(file, func(n ast.Node) bool {
@@ -200,100 +205,115 @@ func (s *SQLInjectionScanner) checkCallExpr(call *ast.CallExpr) *SQLInjectionVio
return nil
}
// Ignore *sql.Stmt calls since they don't take queries
if s.isStmtMethod(call) {
return nil
}
// If this is a parameterized query (first arg is string literal with placeholders,
// subsequent args are parameters), it's safe
if s.isParameterizedQuery(call) {
return nil
}
// Check arguments for dangerous patterns
for _, arg := range call.Args {
// Pattern 1: fmt.Sprintf result used as query
if innerCall, ok := arg.(*ast.CallExpr); ok {
if s.isDangerousFormatCall(innerCall) {
if s.containsUserInput(innerCall) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "SQL INJECTION: fmt.Sprintf with user input passed to " + methodName + "()",
Severity: SQLISeverityCritical,
VulnType: "sprintf",
Suggestion: "Use parameterized queries with placeholders (?, $1, :name)",
CodeFix: s.generateParameterizedFix(methodName, arg),
}
}
// Even without detected user input, format strings are suspicious
// Determine which argument is the query string
queryIdx := 0
if strings.HasSuffix(methodName, "Context") {
queryIdx = 1
}
if len(call.Args) <= queryIdx {
return nil
}
// Only check the query argument for dangerous patterns
arg := call.Args[queryIdx]
// Pattern 1: fmt.Sprintf result used as query
if innerCall, ok := arg.(*ast.CallExpr); ok {
if s.isDangerousFormatCall(innerCall) {
if s.containsUserInput(innerCall) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "potential SQL injection: fmt.Sprintf result passed to " + methodName + "() - use parameterized queries",
Severity: SQLISeverityHigh,
Message: "SQL INJECTION: fmt.Sprintf with user input passed to " + methodName + "()",
Severity: SQLISeverityCritical,
VulnType: "sprintf",
Suggestion: "Replace fmt.Sprintf with parameterized query using placeholders",
Suggestion: "Use parameterized queries with placeholders (?, $1, :name)",
CodeFix: s.generateParameterizedFix(methodName, arg),
}
}
// Check for HTTP input functions
if s.isHTTPInputCall(innerCall) {
// Even without detected user input, format strings are suspicious
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "potential SQL injection: fmt.Sprintf result passed to " + methodName + "() - use parameterized queries",
Severity: SQLISeverityHigh,
VulnType: "sprintf",
Suggestion: "Replace fmt.Sprintf with parameterized query using placeholders",
CodeFix: s.generateParameterizedFix(methodName, arg),
}
}
// Check for HTTP input functions
if s.isHTTPInputCall(innerCall) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "SQL INJECTION: HTTP input directly used in " + methodName + "()",
Severity: SQLISeverityCritical,
VulnType: "tainted",
Suggestion: "Never use HTTP input directly in SQL - always use parameterized queries",
}
}
}
// Pattern 2: String concatenation used as query
if binExpr, ok := arg.(*ast.BinaryExpr); ok {
if binExpr.Op == token.ADD {
if s.containsTaintedVariable(binExpr) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "SQL INJECTION: HTTP input directly used in " + methodName + "()",
Message: "SQL INJECTION: string concatenation with user input in " + methodName + "()",
Severity: SQLISeverityCritical,
VulnType: "tainted",
Suggestion: "Never use HTTP input directly in SQL - always use parameterized queries",
VulnType: "concat",
Suggestion: "Use parameterized queries instead of string concatenation",
CodeFix: "Replace: db." + methodName + "(\"SELECT * FROM users WHERE id = \" + id)\nWith: db." + methodName + "(\"SELECT * FROM users WHERE id = ?\", id)",
}
}
}
// Pattern 2: String concatenation used as query
if binExpr, ok := arg.(*ast.BinaryExpr); ok {
if binExpr.Op == token.ADD {
if s.containsTaintedVariable(binExpr) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "SQL INJECTION: string concatenation with user input in " + methodName + "()",
Severity: SQLISeverityCritical,
VulnType: "concat",
Suggestion: "Use parameterized queries instead of string concatenation",
CodeFix: "Replace: db." + methodName + "(\"SELECT * FROM users WHERE id = \" + id)\nWith: db." + methodName + "(\"SELECT * FROM users WHERE id = ?\", id)",
}
}
if s.containsStringVariable(binExpr) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "potential SQL injection: string concatenation in " + methodName + "()",
Severity: SQLISeverityHigh,
VulnType: "concat",
Suggestion: "Use parameterized queries instead of string concatenation",
}
}
}
}
// Pattern 3: Tainted variable used directly
if ident, ok := arg.(*ast.Ident); ok {
if s.isTaintedVariable(ident.Name) {
if s.containsStringVariable(binExpr) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "SQL INJECTION: potentially tainted variable '" + ident.Name + "' used in " + methodName + "()",
Message: "potential SQL injection: string concatenation in " + methodName + "()",
Severity: SQLISeverityHigh,
VulnType: "tainted",
Suggestion: "Validate and sanitize '" + ident.Name + "' or use parameterized queries",
VulnType: "concat",
Suggestion: "Use parameterized queries instead of string concatenation",
}
}
if s.mightBeDynamicQuery(ident) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "review SQL query in " + methodName + "(): ensure '" + ident.Name + "' is not built with user input",
Severity: SQLISeverityMedium,
VulnType: "variable",
Suggestion: "Audit the construction of '" + ident.Name + "' to ensure it doesn't contain user input",
}
}
}
// Pattern 3: Tainted variable used directly
if ident := getIdent(arg); ident != nil {
if s.isTaintedVariable(ident.Name) && !s.isConstant(arg) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: fmt.Sprintf("SQL INJECTION: potentially tainted variable '%s' used in %s ()", ident.Name, methodName),
Severity: SQLISeverityHigh,
VulnType: "tainted",
Suggestion: fmt.Sprintf("Validate and sanitize '%s' or use parameterized queries", ident.Name),
}
}
if s.mightBeDynamicQuery(ident) && !s.isConstant(arg) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
Message: "review SQL query in " + methodName + "(): ensure '" + ident.Name + "' is not built with user input",
Severity: SQLISeverityMedium,
VulnType: "variable",
Suggestion: "Audit the construction of '" + ident.Name + "' to ensure it doesn't contain user input",
}
}
}
@@ -320,8 +340,8 @@ func (s *SQLInjectionScanner) checkORMRawMethod(call *ast.CallExpr, methodName s
// Check if first argument is a string literal (safe) or variable (needs review)
if _, ok := firstArg.(*ast.BasicLit); !ok {
// Not a string literal - might be dangerous
if ident, ok := firstArg.(*ast.Ident); ok {
if s.isTaintedVariable(ident.Name) {
if ident := getIdent(firstArg); ident != nil {
if s.isTaintedVariable(ident.Name) && !s.isConstant(firstArg) {
return &SQLInjectionViolation{
Pos: call.Pos(),
End: call.End(),
@@ -365,6 +385,78 @@ func (s *SQLInjectionScanner) isHTTPInputCall(call *ast.CallExpr) bool {
return httpMethods[methodName]
}
// isConstant checks if an expression refers to a constant.
func (s *SQLInjectionScanner) isConstant(expr ast.Expr) bool {
if expr == nil {
return false
}
if s.pass != nil && s.pass.TypesInfo != nil {
if ident := getIdent(expr); ident != nil {
if obj := s.pass.TypesInfo.ObjectOf(ident); obj != nil {
_, ok := obj.(*types.Const)
return ok
}
}
}
if ident, ok := expr.(*ast.Ident); ok {
if ident.Obj != nil && ident.Obj.Kind == ast.Con {
return true
}
}
return false
}
// getIdent extracts an identifier from an expression (Ident or SelectorExpr).
func getIdent(expr ast.Expr) *ast.Ident {
switch e := expr.(type) {
case *ast.Ident:
return e
case *ast.SelectorExpr:
return e.Sel
default:
return nil
}
}
// getConstantValue attempts to get the string value of a constant expression.
func (s *SQLInjectionScanner) getConstantValue(expr ast.Expr) (string, bool) {
if expr == nil {
return "", false
}
if s.pass != nil && s.pass.TypesInfo != nil {
if ident := getIdent(expr); ident != nil {
if obj := s.pass.TypesInfo.ObjectOf(ident); obj != nil {
if c, ok := obj.(*types.Const); ok {
val := c.Val().ExactString()
// Remove quotes if it's a string constant
if strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"") {
return val[1 : len(val)-1], true
}
return val, true
}
}
}
}
if ident, ok := expr.(*ast.Ident); ok {
if ident.Obj != nil && ident.Obj.Kind == ast.Con {
if vs, ok := ident.Obj.Decl.(*ast.ValueSpec); ok {
for i, name := range vs.Names {
if name.Name == ident.Name && i < len(vs.Values) {
if lit, ok := vs.Values[i].(*ast.BasicLit); ok && lit.Kind == token.STRING {
val := lit.Value
if strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"") {
return val[1 : len(val)-1], true
}
return val, true
}
}
}
}
}
}
return "", false
}
// isTaintedVariable checks if a variable name suggests user input.
func (s *SQLInjectionScanner) isTaintedVariable(name string) bool {
lowerName := strings.ToLower(name)
@@ -382,7 +474,7 @@ func (s *SQLInjectionScanner) containsTaintedVariable(expr ast.Expr) bool {
ast.Inspect(expr, func(n ast.Node) bool {
if ident, ok := n.(*ast.Ident); ok {
if s.isTaintedVariable(ident.Name) {
if s.isTaintedVariable(ident.Name) && !s.isConstant(ident) {
hasTainted = true
return false
}
@@ -398,23 +490,62 @@ func (s *SQLInjectionScanner) MarkVariableAsTainted(name string) {
s.taintedVariables[name] = true
}
// isStmtMethod checks if a call is a method on a prepared statement (e.g., *sql.Stmt).
func (s *SQLInjectionScanner) isStmtMethod(call *ast.CallExpr) bool {
if sel, ok := call.Fun.(*ast.SelectorExpr); ok {
if s.pass != nil && s.pass.TypesInfo != nil {
if selObj := s.pass.TypesInfo.Uses[sel.Sel]; selObj != nil {
if sig, ok := selObj.Type().(*types.Signature); ok {
if recv := sig.Recv(); recv != nil {
recvType := recv.Type().String()
if strings.Contains(recvType, "sql.Stmt") || strings.Contains(recvType, "sqlx.Stmt") || strings.Contains(recvType, "NamedStmt") {
return true
}
}
}
}
} else {
// Fallback heuristic: if the receiver is named "stmt", assume it's a statement
if ident, ok := sel.X.(*ast.Ident); ok {
name := strings.ToLower(ident.Name)
if strings.Contains(name, "stmt") {
return true
}
}
}
}
return false
}
// isParameterizedQuery checks if a call uses parameterized query syntax.
// A parameterized query has a string literal with placeholders (?, $1, :name, @param)
// as the first argument, with subsequent arguments providing the values.
// A parameterized query has a string literal or constant with placeholders (?, $1, :name, @param)
// as the query argument, with subsequent arguments providing the values.
func (s *SQLInjectionScanner) isParameterizedQuery(call *ast.CallExpr) bool {
if len(call.Args) < 2 {
methodName := s.getMethodName(call)
queryIdx := 0
// Context-aware methods usually have the query as the second argument
if strings.HasSuffix(methodName, "Context") {
queryIdx = 1
}
if len(call.Args) <= queryIdx+1 {
return false
}
// First argument should be a string literal
firstArg := call.Args[0]
lit, ok := firstArg.(*ast.BasicLit)
if !ok || lit.Kind != token.STRING {
// Query argument should be a string literal or constant
queryArg := call.Args[queryIdx]
var queryStr string
if lit, ok := queryArg.(*ast.BasicLit); ok && lit.Kind == token.STRING {
queryStr = lit.Value
} else if val, ok := s.getConstantValue(queryArg); ok {
queryStr = val
}
if queryStr == "" {
return false
}
// Check if the string contains placeholder patterns
queryStr := lit.Value
return placeholderPattern.MatchString(queryStr)
}
@@ -482,6 +613,10 @@ func (s *SQLInjectionScanner) containsUserInput(call *ast.CallExpr) bool {
}
// If argument is not a literal, it might be user input
if _, ok := arg.(*ast.BasicLit); !ok {
// Check if it's a constant (including exported constants from other packages)
if s.isConstant(arg) {
continue
}
return true
}
}
@@ -497,6 +632,9 @@ func (s *SQLInjectionScanner) containsStringVariable(expr ast.Expr) bool {
switch node := n.(type) {
case *ast.Ident:
// Check if this is a variable (not a constant)
if s.isConstant(node) {
return true
}
if node.Obj != nil {
hasVariable = true
return false
@@ -527,11 +665,19 @@ func (s *SQLInjectionScanner) isSQLStringConcat(expr *ast.BinaryExpr) bool {
// isQueryString checks if an expression looks like a SQL query string.
func (s *SQLInjectionScanner) isQueryString(expr ast.Expr) bool {
var val string
if lit, ok := expr.(*ast.BasicLit); ok && lit.Kind == token.STRING {
value := strings.ToUpper(lit.Value)
return sqlPattern.MatchString(value)
val = lit.Value
} else if v, ok := s.getConstantValue(expr); ok {
val = v
}
return false
if val == "" {
return false
}
value := strings.ToUpper(val)
return sqlPattern.MatchString(value)
}
// mightBeDynamicQuery checks if an identifier might be a dynamically built query.
@@ -583,6 +729,7 @@ func ScanFileAST(fset *token.FileSet, file *ast.File) []SQLInjectionViolation {
// ScanFileNoPass scans a file for SQL injection vulnerabilities without analysis.Pass.
// This is a method version for testing purposes.
func (s *SQLInjectionScanner) ScanFileNoPass(fset *token.FileSet, file *ast.File) []SQLInjectionViolation {
s.pass = nil
var violations []SQLInjectionViolation
ast.Inspect(file, func(n ast.Node) bool {
+46
View File
@@ -39,7 +39,33 @@ func addIntExpr(x, y ast.Expr) ast.Expr {
return &ast.BinaryExpr{X: x, Op: token.ADD, Y: y}
}
func incIntExpr(x ast.Expr) ast.Expr {
if x == nil {
return nil
}
xInt, xOK := intValue(x)
if xOK {
return intExpr(xInt + 1)
}
if binary, ok := x.(*ast.BinaryExpr); ok && binary.Op == token.SUB {
if yInt, yOK := intValue(binary.Y); yOK && yInt == 1 {
return binary.X
}
}
return &ast.BinaryExpr{X: x, Op: token.ADD, Y: intExpr(1)}
}
func subIntExpr(x, y ast.Expr) ast.Expr {
if binary, ok := x.(*ast.BinaryExpr); ok && binary.Op == token.ADD {
if exprEqual(binary.X, y) {
return binary.Y
}
if exprEqual(binary.Y, y) {
return binary.X
}
}
if unary, ok := y.(*ast.UnaryExpr); ok && unary.Op == token.SUB {
y = unary.X
} else {
@@ -78,6 +104,26 @@ func mulIntExpr(x, y ast.Expr) ast.Expr {
return &ast.BinaryExpr{X: x, Op: token.MUL, Y: y}
}
func divIntExpr(x, y ast.Expr) (ast.Expr, bool) {
if x == nil || y == nil {
return nil, false
}
xInt, xOK := intValue(x)
yInt, yOK := intValue(y)
if xOK && yOK {
return intExpr(xInt / yInt), xInt%yInt != 0
}
if yOK && yInt == 0 {
return nil, false
}
if (xOK && xInt == 0) || (yOK && yInt == 1) {
return x, false
}
return &ast.BinaryExpr{X: x, Op: token.QUO, Y: y}, true
}
func intExpr(n int) *ast.BasicLit {
return &ast.BasicLit{Kind: token.INT, Value: strconv.Itoa(n)}
}
+247 -90
View File
@@ -5,6 +5,7 @@ import (
"go/ast"
"go/format"
"go/token"
"go/types"
"strconv"
"golang.org/x/tools/go/analysis"
@@ -17,6 +18,8 @@ type sliceDeclaration struct {
lenExpr ast.Expr // Initial length of this slice.
exclude bool // Whether this slice has been disqualified due to an unsupported pattern.
hasReturn bool // Whether a return statement has been found after the first append. Any subsequent appends will disqualify this slice in simple mode.
assigning bool // Whether this slice is currently being assigned the result of an append.
detached bool // Whether this slice has been appended without reassignment. Will be disqualified if this happens more than once.
}
type sliceAppend struct {
@@ -29,27 +32,27 @@ type returnsVisitor struct {
simple bool
includeRangeLoops bool
includeForLoops bool
pass *analysis.Pass
// visitor fields
sliceDeclarations []*sliceDeclaration
sliceAppends []*sliceAppend
loopVars []ast.Expr
preallocHints []analysis.Diagnostic
level int // Current nesting level. Loops do not increment the level.
hasReturn bool // Whether a return statement has been found. Slices appended before and after a return are disqualified in simple mode.
hasGoto bool // Whether a goto statement has been found. Goto disqualifies pending and subsequent slices in simple mode.
hasBranch bool // Whether a branch statement has been found. Loops with branch statements are unsupported in simple mode.
}
func Check(files []*ast.File, simple, includeRangeLoops, includeForLoops bool) []analysis.Diagnostic {
func Check(pass *analysis.Pass, simple, includeRangeLoops, includeForLoops bool) {
retVis := &returnsVisitor{
simple: simple,
includeRangeLoops: includeRangeLoops,
includeForLoops: includeForLoops,
pass: pass,
}
for _, f := range files {
for _, f := range pass.Files {
ast.Walk(retVis, f)
}
return retVis.preallocHints
}
func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
@@ -116,7 +119,7 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
buf.Truncate(undo)
}
}
v.preallocHints = append(v.preallocHints, analysis.Diagnostic{
v.pass.Report(analysis.Diagnostic{
Pos: sliceDecl.pos,
Message: buf.String(),
})
@@ -137,15 +140,21 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
return nil
case *ast.ValueSpec:
_, isArrayType := inferExprType(s.Type).(*ast.ArrayType)
var isArrayOrSlice bool
if t := v.pass.TypesInfo.TypeOf(s.Type); t != nil {
switch t.Underlying().(type) {
case *types.Array, *types.Slice:
isArrayOrSlice = true
}
}
for i, name := range s.Names {
var lenExpr ast.Expr
if i >= len(s.Values) {
if !isArrayType {
if !isArrayOrSlice {
continue
}
lenExpr = intExpr(0)
} else if lenExpr = isCreateArray(s.Values[i]); lenExpr == nil {
} else if lenExpr = v.isCreateArray(s.Values[i]); lenExpr == nil {
if id, ok := s.Values[i].(*ast.Ident); !ok || id.Name != "nil" {
continue
}
@@ -174,7 +183,7 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
if !ok {
continue
}
if lenExpr := isCreateArray(s.Rhs[i]); lenExpr != nil {
if lenExpr := v.isCreateArray(s.Rhs[i]); lenExpr != nil {
v.sliceDeclarations = append(v.sliceDeclarations, &sliceDeclaration{name: ident.Name, pos: s.Pos(), level: v.level, lenExpr: lenExpr})
} else {
declIdx := -1
@@ -199,7 +208,7 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
if len(expr.Args) >= 2 && !sliceDecl.hasReturn && sliceDecl.level == v.level {
if funIdent, ok := expr.Fun.(*ast.Ident); ok && funIdent.Name == "append" {
if rhsIdent, ok := expr.Args[0].(*ast.Ident); ok && ident.Name == rhsIdent.Name {
v.sliceAppends = append(v.sliceAppends, &sliceAppend{index: declIdx, countExpr: appendCount(expr)})
sliceDecl.assigning = true
continue
}
}
@@ -209,6 +218,45 @@ func (v *returnsVisitor) Visit(node ast.Node) ast.Visitor {
}
}
case *ast.CallExpr:
if funIdent, ok := s.Fun.(*ast.Ident); ok && funIdent.Name == "append" && len(s.Args) >= 2 {
if rhsIdent, ok := s.Args[0].(*ast.Ident); ok {
declIdx := -1
for i := len(v.sliceDeclarations) - 1; i >= 0; i-- {
if v.sliceDeclarations[i].name == rhsIdent.Name {
declIdx = i
break
}
}
if declIdx < 0 {
return v
}
sliceDecl := v.sliceDeclarations[declIdx]
if sliceDecl.exclude {
return v
}
if sliceDecl.hasReturn || sliceDecl.level != v.level || sliceDecl.detached {
sliceDecl.exclude = true
return v
}
countExpr := v.appendCount(s)
if countExpr != nil && (hasAny(countExpr, v.loopVars) || hasVarReference(countExpr, sliceDecl.name)) {
// exclude slice if append count references it
sliceDecl.exclude = true
return v
}
if sliceDecl.assigning {
sliceDecl.assigning = false
} else {
sliceDecl.detached = true
}
v.sliceAppends = append(v.sliceAppends, &sliceAppend{index: declIdx, countExpr: countExpr})
}
}
case *ast.RangeStmt:
return v.walkRange(s)
@@ -288,17 +336,26 @@ func (v *returnsVisitor) walkRange(stmt *ast.RangeStmt) ast.Visitor {
}
}
} else {
for i := range v.sliceDeclarations {
for i, sliceDecl := range v.sliceDeclarations {
if sliceDecl.exclude {
continue
}
prev := -1
for j := len(v.sliceAppends) - 1; j >= appendIdx; j-- {
if v.sliceAppends[j] != nil && v.sliceAppends[j].index == i {
if prev >= 0 {
if prev < 0 {
if loopCountExpr == nil {
// make appends indeterminate if the loop count is indeterminate
v.sliceAppends[j].countExpr = nil
} else if hasVarReference(loopCountExpr, sliceDecl.name) {
// exclude slice if loop count references it
sliceDecl.exclude = true
break
}
} else {
// consolidate appends to the same slice
v.sliceAppends[j].countExpr = addIntExpr(v.sliceAppends[j].countExpr, v.sliceAppends[prev].countExpr)
v.sliceAppends[prev] = nil
} else if loopCountExpr == nil {
// make appends indeterminate if the loop count is indeterminate
v.sliceAppends[j].countExpr = nil
}
prev = j
}
@@ -347,17 +404,26 @@ func (v *returnsVisitor) walkFor(stmt *ast.ForStmt) ast.Visitor {
}
}
} else {
for i := range v.sliceDeclarations {
for i, sliceDecl := range v.sliceDeclarations {
if sliceDecl.exclude {
continue
}
prev := -1
for j := len(v.sliceAppends) - 1; j >= appendIdx; j-- {
if v.sliceAppends[j] != nil && v.sliceAppends[j].index == i {
if prev >= 0 {
if prev < 0 {
if loopCountExpr == nil {
// make appends indeterminate if the loop count is indeterminate
v.sliceAppends[j].countExpr = nil
} else if hasVarReference(loopCountExpr, sliceDecl.name) {
// exclude slice if loop count references it
sliceDecl.exclude = true
break
}
} else {
// consolidate appends to the same slice
v.sliceAppends[j].countExpr = addIntExpr(v.sliceAppends[j].countExpr, v.sliceAppends[prev].countExpr)
v.sliceAppends[prev] = nil
} else if loopCountExpr == nil {
// make appends indeterminate if the loop count is indeterminate
v.sliceAppends[j].countExpr = nil
}
prev = j
}
@@ -379,12 +445,15 @@ func (v *returnsVisitor) walkSwitchSelect(body *ast.BlockStmt) ast.Visitor {
return nil
}
func isCreateArray(expr ast.Expr) ast.Expr {
func (v *returnsVisitor) isCreateArray(expr ast.Expr) ast.Expr {
switch e := expr.(type) {
case *ast.CompositeLit:
// []any{...}
if _, ok := inferExprType(e.Type).(*ast.ArrayType); ok {
return intExpr(len(e.Elts))
if t := v.pass.TypesInfo.TypeOf(e); t != nil {
switch t.Underlying().(type) {
case *types.Array, *types.Slice:
return intExpr(len(e.Elts))
}
}
case *ast.CallExpr:
switch len(e.Args) {
@@ -394,8 +463,10 @@ func isCreateArray(expr ast.Expr) ast.Expr {
if !ok || arg.Name != "nil" {
return nil
}
if _, ok = inferExprType(e.Fun).(*ast.ArrayType); ok {
return intExpr(0)
if t := v.pass.TypesInfo.TypeOf(e.Fun); t != nil {
if _, ok := t.Underlying().(*types.Slice); ok {
return intExpr(0)
}
}
case 2:
// make([]any, n)
@@ -421,37 +492,41 @@ func (v *returnsVisitor) rangeLoopCount(stmt *ast.RangeStmt) (ast.Expr, bool) {
}
} else if len(call.Args) >= 2 {
if funIdent, ok := call.Fun.(*ast.Ident); ok && funIdent.Name == "append" {
return addIntExpr(sliceLength(call.Args[0]), appendCount(call)), true
return addIntExpr(v.sliceLength(call.Args[0]), v.appendCount(call)), true
}
}
}
xType := inferExprType(x)
xType := v.pass.TypesInfo.TypeOf(x)
if xType != nil {
xType = xType.Underlying()
}
switch xType := xType.(type) {
case *ast.ChanType, *ast.FuncType:
case *types.Chan, *types.Signature:
return nil, false
case *ast.ArrayType:
case *types.Array:
if _, ok := stmt.X.(*ast.CompositeLit); ok && xType.Len() >= 0 {
return intExpr(int(xType.Len())), true
}
case *types.Slice:
if lit, ok := stmt.X.(*ast.CompositeLit); ok {
if xType.Len != nil {
return xType.Len, true
}
return intExpr(len(lit.Elts)), true
}
case *ast.MapType:
case *types.Map:
if lit, ok := x.(*ast.CompositeLit); ok {
return intExpr(len(lit.Elts)), true
}
case *ast.StarExpr:
if xType, ok := xType.X.(*ast.ArrayType); !ok || xType.Len == nil {
case *types.Pointer:
if xType, ok := xType.Elem().(*types.Array); !ok {
return nil, true
} else if unary, ok := x.(*ast.UnaryExpr); ok && unary.Op == token.AND {
if _, ok := unary.X.(*ast.CompositeLit); ok {
return xType.Len, true
if _, ok := unary.X.(*ast.CompositeLit); ok && xType.Len() >= 0 {
return intExpr(int(xType.Len())), true
}
}
case *ast.Ident:
if xType.Name == "string" {
case *types.Basic:
if xType.Info()&types.IsString != 0 {
if lit, ok := x.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if str, err := strconv.Unquote(lit.Value); err == nil {
return intExpr(len(str)), true
@@ -466,12 +541,11 @@ func (v *returnsVisitor) rangeLoopCount(stmt *ast.RangeStmt) (ast.Expr, bool) {
return nil, true
}
if ident, ok := xType.(*ast.Ident); ok {
switch ident.Name {
case "byte", "rune", "int", "int8", "int16", "int32", "int64",
"uint", "uint8", "uint16", "uint32", "uint64", "uintptr":
if xType, ok := xType.(*types.Basic); ok {
switch {
case xType.Info()&types.IsInteger != 0:
return x, true
case "string":
case xType.Info()&types.IsString != 0:
default:
return nil, true
}
@@ -491,14 +565,14 @@ func (v *returnsVisitor) rangeLoopCount(stmt *ast.RangeStmt) (ast.Expr, bool) {
return &ast.CallExpr{Fun: ast.NewIdent("len"), Args: []ast.Expr{x}}, true
}
func appendCount(expr *ast.CallExpr) ast.Expr {
func (v *returnsVisitor) appendCount(expr *ast.CallExpr) ast.Expr {
if expr.Ellipsis.IsValid() {
return sliceLength(expr.Args[1])
return v.sliceLength(expr.Args[1])
}
return intExpr(len(expr.Args) - 1)
}
func sliceLength(expr ast.Expr) ast.Expr {
func (v *returnsVisitor) sliceLength(expr ast.Expr) ast.Expr {
if call, ok := expr.(*ast.CallExpr); ok {
if len(call.Args) == 1 {
if _, ok := call.Fun.(*ast.ArrayType); ok {
@@ -506,18 +580,22 @@ func sliceLength(expr ast.Expr) ast.Expr {
}
} else if len(call.Args) >= 2 {
if funIdent, ok := call.Fun.(*ast.Ident); ok && funIdent.Name == "append" {
return addIntExpr(sliceLength(call.Args[0]), appendCount(call))
return addIntExpr(v.sliceLength(call.Args[0]), v.appendCount(call))
}
}
}
switch xType := inferExprType(expr).(type) {
case *ast.ArrayType:
xType := v.pass.TypesInfo.TypeOf(expr)
if xType == nil {
return nil
}
switch xType := xType.Underlying().(type) {
case *types.Array, *types.Slice:
if lit, ok := expr.(*ast.CompositeLit); ok {
return intExpr(len(lit.Elts))
}
case *ast.Ident:
if xType.Name == "string" {
case *types.Basic:
if xType.Info()&types.IsString != 0 {
if lit, ok := expr.(*ast.BasicLit); ok && lit.Kind == token.STRING {
if str, err := strconv.Unquote(lit.Value); err == nil {
return intExpr(len(str))
@@ -558,55 +636,96 @@ func (v *returnsVisitor) forLoopCount(stmt *ast.ForStmt) (ast.Expr, bool) {
return nil, false
}
initStmt, ok := stmt.Init.(*ast.AssignStmt)
if !ok {
initAssign, ok := stmt.Init.(*ast.AssignStmt)
if !ok || len(initAssign.Lhs) != len(initAssign.Rhs) {
return nil, true
}
postStmt, ok := stmt.Post.(*ast.IncDecStmt)
if !ok {
return nil, true
}
postIdent, ok := postStmt.X.(*ast.Ident)
if !ok {
return nil, true
}
index := -1
for i := range initStmt.Lhs {
if ident, ok := initStmt.Lhs[i].(*ast.Ident); ok && ident.Name == postIdent.Name {
index = i
break
for i, lhs := range initAssign.Lhs {
initIdent, ok := lhs.(*ast.Ident)
if !ok {
continue
}
}
if index < 0 {
return nil, true
}
lower := initStmt.Rhs[index]
if hasCall(lower) {
return nil, true
}
var reverse bool
var step ast.Expr
switch s := stmt.Post.(type) {
case *ast.IncDecStmt:
if isIdentName(s.X, initIdent.Name) {
reverse = s.Tok == token.DEC
step = intExpr(1)
}
upper, op := forLoopUpperBound(stmt.Cond, postIdent.Name)
case *ast.AssignStmt:
if len(s.Lhs) != len(s.Rhs) {
return nil, true
}
if postStmt.Tok == token.INC {
if op == token.GTR || op == token.GEQ {
return nil, false
for i, lhs := range s.Lhs {
if !isIdentName(lhs, initIdent.Name) {
continue
}
switch s.Tok {
case token.ADD_ASSIGN, token.SUB_ASSIGN:
step = s.Rhs[i]
reverse = s.Tok == token.SUB_ASSIGN
case token.ASSIGN:
if rhsBinary, ok := s.Rhs[i].(*ast.BinaryExpr); ok {
reverse = s.Tok == token.SUB
if rhsBinary.Op == token.ADD || reverse {
if isIdentName(rhsBinary.X, initIdent.Name) {
step = rhsBinary.Y
} else if isIdentName(rhsBinary.Y, initIdent.Name) {
step = rhsBinary.X
}
}
} else {
return nil, false
}
default:
return nil, false
}
if step != nil {
break
}
}
}
} else {
if op == token.LSS || op == token.LEQ {
return nil, false
if step == nil {
continue
}
lower, upper = upper, lower
lower := initAssign.Rhs[i]
if hasCall(lower) {
continue // NATO: this should trigger another attempt
}
upper, op := forLoopUpperBound(stmt.Cond, initIdent.Name)
if !reverse {
if op == token.GTR || op == token.GEQ {
return nil, false
}
} else {
if op == token.LSS || op == token.LEQ {
return nil, false
}
lower, upper = upper, lower
}
if op == token.LEQ || op == token.GEQ {
upper = incIntExpr(upper)
}
countExpr, rounded := divIntExpr(subIntExpr(upper, lower), step)
if rounded {
// extra capacity in case non-unary step increment is rounded down
countExpr = incIntExpr(countExpr)
}
return countExpr, true
}
countExpr := subIntExpr(upper, lower)
if op == token.LEQ || op == token.GEQ {
countExpr = addIntExpr(countExpr, intExpr(1))
}
return countExpr, true
return nil, true
}
func forLoopUpperBound(expr ast.Expr, name string) (ast.Expr, token.Token) {
@@ -675,6 +794,11 @@ func forLoopUpperBound(expr ast.Expr, name string) (ast.Expr, token.Token) {
return nil, 0
}
func isIdentName(expr ast.Expr, name string) bool {
ident, ok := expr.(*ast.Ident)
return ok && ident.Name == name
}
func hasAny(node ast.Node, exprs []ast.Expr) bool {
var found bool
ast.Inspect(node, func(node ast.Node) bool {
@@ -713,6 +837,11 @@ func hasCall(expr ast.Expr) bool {
// allow cheap pure built-in functions
return true
}
case *ast.SelectorExpr:
// allow argument-less methods
if len(call.Args) == 0 {
return true
}
}
found = true
}
@@ -720,3 +849,31 @@ func hasCall(expr ast.Expr) bool {
})
return found
}
func hasVarReference(expr ast.Expr, name string) bool {
found := false
ast.Inspect(expr, func(node ast.Node) bool {
switch n := node.(type) {
case *ast.SelectorExpr:
// process target expression, ignore field selector
found = hasVarReference(n.X, name)
return false
case *ast.CallExpr:
// process args, ignore function name
for _, arg := range n.Args {
if found = hasVarReference(arg, name); found {
break
}
}
return false
case *ast.KeyValueExpr:
// process value, ignore key
found = hasVarReference(n.Value, name)
return false
case *ast.Ident:
found = n.Name == name
}
return !found
})
return found
}
-376
View File
@@ -1,376 +0,0 @@
package pkg
import (
"go/ast"
"go/token"
)
func inferExprType(expr ast.Expr) ast.Expr {
switch e := expr.(type) {
case *ast.ArrayType, *ast.StructType, *ast.FuncType, *ast.InterfaceType, *ast.MapType, *ast.ChanType:
return e
case *ast.ParenExpr:
return inferExprType(e.X)
case *ast.SliceExpr:
return inferExprType(e.X)
case *ast.TypeAssertExpr:
return inferExprType(e.Type)
case *ast.CompositeLit:
return inferExprType(e.Type)
case *ast.Ellipsis:
return &ast.ArrayType{Elt: e.Elt}
case *ast.FuncLit:
return &ast.FuncType{Results: e.Type.Results}
case *ast.BasicLit:
return inferBasicType(e)
case *ast.BinaryExpr:
return inferBinaryType(e)
case *ast.StarExpr:
return inferStarType(e)
case *ast.UnaryExpr:
return inferUnaryType(e)
case *ast.CallExpr:
return inferCallType(e)
case *ast.IndexExpr:
return inferIndexType(e)
case *ast.IndexListExpr:
return inferIndexListType(e)
case *ast.SelectorExpr:
return inferSelectorType(e)
case *ast.Ident:
return inferIdentType(e)
default:
return nil
}
}
func inferBasicType(basic *ast.BasicLit) ast.Expr {
switch basic.Kind {
case token.INT:
return ast.NewIdent("int")
case token.FLOAT:
return ast.NewIdent("float64")
case token.IMAG:
return ast.NewIdent("imag")
case token.CHAR:
return ast.NewIdent("char")
case token.STRING:
return ast.NewIdent("string")
default:
return nil
}
}
func inferBinaryType(binary *ast.BinaryExpr) ast.Expr {
switch binary.Op {
case token.EQL, token.NEQ, token.LSS, token.LEQ, token.GTR, token.GEQ:
return ast.NewIdent("bool")
default:
if x := inferExprType(binary.X); x != nil {
return x
}
return inferExprType(binary.Y)
}
}
func inferStarType(star *ast.StarExpr) ast.Expr {
switch x := inferExprType(star.X).(type) {
case nil:
return nil
case *ast.StarExpr:
return inferExprType(x.X)
default:
return &ast.StarExpr{X: x}
}
}
func inferUnaryType(unary *ast.UnaryExpr) ast.Expr {
if x := inferExprType(unary.X); x != nil {
switch unary.Op {
case token.AND:
return &ast.StarExpr{X: x}
case token.ARROW:
if ct, ok := x.(*ast.ChanType); ok {
return inferExprType(ct.Value)
}
return x
default:
return x
}
}
return nil
}
func inferCallType(call *ast.CallExpr) ast.Expr {
if id, ok := call.Fun.(*ast.Ident); ok && id.Obj == nil {
switch id.Name {
case "len", "cap", "copy":
return ast.NewIdent("int")
case "real", "imag":
return ast.NewIdent("float64")
case "complex":
return ast.NewIdent("complex64")
case "recover":
return ast.NewIdent("any")
case "make", "min", "max":
if len(call.Args) > 0 {
return inferExprType(call.Args[0])
}
case "new":
if len(call.Args) > 0 {
if arg := inferExprType(call.Args[0]); arg != nil {
return &ast.StarExpr{X: arg}
}
}
case "append":
if len(call.Args) > 0 {
if arg := inferExprType(call.Args[0]); arg != nil {
return arg
}
return &ast.ArrayType{}
}
}
}
fun := inferExprType(call.Fun)
if ft, ok := fun.(*ast.FuncType); ok && len(ft.Results.List) > 0 {
return inferExprType(ft.Results.List[0].Type)
}
return fun
}
func inferIndexType(index *ast.IndexExpr) ast.Expr {
if selector, ok := index.X.(*ast.SelectorExpr); ok && selector.Sel != nil && selector.Sel.Name == "Seq" {
if ident, ok := selector.X.(*ast.Ident); ok && ident.Name == "iter" {
return &ast.FuncType{
Params: &ast.FieldList{List: []*ast.Field{{
Names: []*ast.Ident{{Name: "yield"}},
Type: &ast.FuncType{
Params: &ast.FieldList{List: []*ast.Field{{Type: ast.NewIdent("V")}}},
Results: &ast.FieldList{List: []*ast.Field{{Type: ast.NewIdent("bool")}}},
},
}}},
}
}
}
switch x := inferExprType(index.X).(type) {
case *ast.ArrayType:
return inferExprType(x.Elt)
case *ast.MapType:
return inferExprType(x.Value)
default:
return x
}
}
func inferIndexListType(index *ast.IndexListExpr) ast.Expr {
if selector, ok := index.X.(*ast.SelectorExpr); ok && selector.Sel != nil && selector.Sel.Name == "Seq2" {
if ident, ok := selector.X.(*ast.Ident); ok && ident.Name == "iter" {
return &ast.FuncType{
Params: &ast.FieldList{List: []*ast.Field{{
Names: []*ast.Ident{{Name: "yield"}},
Type: &ast.FuncType{
Params: &ast.FieldList{List: []*ast.Field{
{Type: ast.NewIdent("K")},
{Type: ast.NewIdent("V")},
}},
Results: &ast.FieldList{List: []*ast.Field{{Type: ast.NewIdent("bool")}}},
},
}}},
}
}
}
x := inferExprType(index.X)
if at, ok := x.(*ast.ArrayType); ok {
return inferExprType(at.Elt)
}
return x
}
func inferSelectorType(sel *ast.SelectorExpr) ast.Expr {
x := inferExprType(sel.X)
if se, ok := x.(*ast.StarExpr); ok {
x = se.X
}
switch x := x.(type) {
case *ast.StructType:
for _, field := range x.Fields.List {
for _, name := range field.Names {
if name.Name == sel.Sel.Name {
return inferExprType(field.Type)
}
}
}
case *ast.InterfaceType:
for _, method := range x.Methods.List {
for _, name := range method.Names {
if name.Name == sel.Sel.Name {
return inferExprType(method.Type)
}
}
}
}
return nil
}
func inferIdentType(ident *ast.Ident) ast.Expr {
if ident.Obj == nil {
switch ident.Name {
case "bool", "byte", "comparable", "error", "rune", "string", "any",
"int", "int8", "int16", "int32", "int64",
"uint", "uint8", "uint16", "uint32", "uint64", "uintptr",
"float32", "float64", "complex64", "complex128":
return ident
case "nil":
return ast.NewIdent("any")
case "true", "false":
return ast.NewIdent("bool")
case "iota":
return ast.NewIdent("int")
}
} else {
switch decl := ident.Obj.Decl.(type) {
case *ast.Field:
return inferExprType(decl.Type)
case *ast.FuncDecl:
return inferExprType(decl.Type)
case *ast.TypeSpec:
// abort when recursive pointer type detected
t := decl.Type
for {
if star, ok := t.(*ast.StarExpr); ok {
t = star.X
} else if t == ident {
return nil
} else {
break
}
}
return inferExprType(decl.Type)
case *ast.ValueSpec:
return inferValueType(decl, ident.Name)
case *ast.AssignStmt:
return inferAssignType(decl, ident.Name)
}
}
return nil
}
func inferValueType(value *ast.ValueSpec, name string) ast.Expr {
if value.Type != nil {
return inferExprType(value.Type)
}
index := -1
for i := range value.Names {
if value.Names[i].Name == name {
index = i
}
}
if index < 0 {
return nil
}
if len(value.Names) == len(value.Values) {
return inferExprType(value.Values[index])
}
return inferAssignMultiType(value.Values[0], index)
}
func inferAssignType(assign *ast.AssignStmt, name string) ast.Expr {
index := -1
for i := range assign.Lhs {
if id, ok := assign.Lhs[i].(*ast.Ident); ok && id.Name == name {
index = i
}
}
if index < 0 {
return nil
}
if len(assign.Rhs) == 1 {
if ue, ok := assign.Rhs[0].(*ast.UnaryExpr); ok && ue.Op == token.RANGE {
switch rhs := inferExprType(assign.Rhs[0]).(type) {
case *ast.ArrayType:
switch index {
case 0:
return ast.NewIdent("int")
case 1:
return inferExprType(rhs.Elt)
}
case *ast.MapType:
switch index {
case 0:
return inferExprType(rhs.Key)
case 1:
return inferExprType(rhs.Value)
}
case *ast.Ident:
if rhs.Name == "string" {
switch index {
case 0:
return ast.NewIdent("int")
case 1:
return ast.NewIdent("rune")
}
}
case *ast.ChanType:
if index == 0 {
return inferExprType(rhs.Value)
}
}
}
}
if len(assign.Lhs) == len(assign.Rhs) {
return inferExprType(assign.Rhs[index])
}
return inferAssignMultiType(assign.Rhs[0], index)
}
func inferAssignMultiType(rhs ast.Expr, index int) ast.Expr {
switch rhs := rhs.(type) {
case *ast.TypeAssertExpr:
switch index {
case 0:
return inferExprType(rhs.Type)
case 1:
return ast.NewIdent("bool")
}
case *ast.CallExpr:
if fun, ok := inferExprType(rhs.Fun).(*ast.FuncType); ok {
for _, res := range fun.Results.List {
for range res.Names {
if index == 0 {
return inferExprType(res.Type)
}
index--
}
}
}
case *ast.IndexExpr:
if mt, ok := inferExprType(rhs.X).(*ast.MapType); ok {
switch index {
case 0:
return inferExprType(mt.Value)
case 1:
return ast.NewIdent("bool")
}
}
case *ast.UnaryExpr:
if ct, ok := inferExprType(rhs.X).(*ast.ChanType); ok {
switch index {
case 0:
return inferExprType(ct.Value)
case 1:
return ast.NewIdent("bool")
}
}
}
return nil
}
+80 -32
View File
@@ -19,15 +19,15 @@ var (
// set (regardless of its value). This is a global option and affects all
// colors. For more control over each color block use the methods
// DisableColor() individually.
NoColor = noColorIsSet() || os.Getenv("TERM") == "dumb" ||
(!isatty.IsTerminal(os.Stdout.Fd()) && !isatty.IsCygwinTerminal(os.Stdout.Fd()))
NoColor = noColorIsSet() || os.Getenv("TERM") == "dumb" || !stdoutIsTerminal()
// Output defines the standard output of the print functions. By default,
// os.Stdout is used.
Output = colorable.NewColorableStdout()
// stdOut() is used.
Output = stdOut()
// Error defines a color supporting writer for os.Stderr.
Error = colorable.NewColorableStderr()
// Error defines the standard error of the print functions. By default,
// stdErr() is used.
Error = stdErr()
// colorsCache is used to reduce the count of created Color objects and
// allows to reuse already created objects with required Attribute.
@@ -40,6 +40,33 @@ func noColorIsSet() bool {
return os.Getenv("NO_COLOR") != ""
}
// stdoutIsTerminal returns true if os.Stdout is a terminal.
// Returns false if os.Stdout is nil (e.g., when running as a Windows service).
func stdoutIsTerminal() bool {
if os.Stdout == nil {
return false
}
return isatty.IsTerminal(os.Stdout.Fd()) || isatty.IsCygwinTerminal(os.Stdout.Fd())
}
// stdOut returns a writer for color output.
// Returns io.Discard if os.Stdout is nil (e.g., when running as a Windows service).
func stdOut() io.Writer {
if os.Stdout == nil {
return io.Discard
}
return colorable.NewColorableStdout()
}
// stdErr returns a writer for color error output.
// Returns io.Discard if os.Stderr is nil (e.g., when running as a Windows service).
func stdErr() io.Writer {
if os.Stderr == nil {
return io.Discard
}
return colorable.NewColorableStderr()
}
// Color defines a custom color object which is defined by SGR parameters.
type Color struct {
params []Attribute
@@ -220,26 +247,30 @@ func (c *Color) unset() {
// a low-level function, and users should use the higher-level functions, such
// as color.Fprint, color.Print, etc.
func (c *Color) SetWriter(w io.Writer) *Color {
_, _ = c.setWriter(w)
return c
}
func (c *Color) setWriter(w io.Writer) (int, error) {
if c.isNoColorSet() {
return c
return 0, nil
}
fmt.Fprint(w, c.format())
return c
return fmt.Fprint(w, c.format())
}
// UnsetWriter resets all escape attributes and clears the output with the give
// io.Writer. Usually should be called after SetWriter().
func (c *Color) UnsetWriter(w io.Writer) {
_, _ = c.unsetWriter(w)
}
func (c *Color) unsetWriter(w io.Writer) (int, error) {
if c.isNoColorSet() {
return
return 0, nil
}
if NoColor {
return
}
fmt.Fprintf(w, "%s[%dm", escape, Reset)
return fmt.Fprintf(w, "%s[%dm", escape, Reset)
}
// Add is used to chain SGR parameters. Use as many as parameters to combine
@@ -255,10 +286,20 @@ func (c *Color) Add(value ...Attribute) *Color {
// On Windows, users should wrap w with colorable.NewColorable() if w is of
// type *os.File.
func (c *Color) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
c.SetWriter(w)
defer c.UnsetWriter(w)
n, err = c.setWriter(w)
if err != nil {
return n, err
}
return fmt.Fprint(w, a...)
nn, err := fmt.Fprint(w, a...)
n += nn
if err != nil {
return
}
nn, err = c.unsetWriter(w)
n += nn
return n, err
}
// Print formats using the default formats for its operands and writes to
@@ -278,10 +319,20 @@ func (c *Color) Print(a ...interface{}) (n int, err error) {
// On Windows, users should wrap w with colorable.NewColorable() if w is of
// type *os.File.
func (c *Color) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
c.SetWriter(w)
defer c.UnsetWriter(w)
n, err = c.setWriter(w)
if err != nil {
return n, err
}
return fmt.Fprintf(w, format, a...)
nn, err := fmt.Fprintf(w, format, a...)
n += nn
if err != nil {
return
}
nn, err = c.unsetWriter(w)
n += nn
return n, err
}
// Printf formats according to a format specifier and writes to standard output.
@@ -475,29 +526,26 @@ func (c *Color) Equals(c2 *Color) bool {
if c == nil || c2 == nil {
return false
}
if len(c.params) != len(c2.params) {
return false
}
counts := make(map[Attribute]int, len(c.params))
for _, attr := range c.params {
if !c2.attrExists(attr) {
counts[attr]++
}
for _, attr := range c2.params {
if counts[attr] == 0 {
return false
}
counts[attr]--
}
return true
}
func (c *Color) attrExists(a Attribute) bool {
for _, attr := range c.params {
if attr == a {
return true
}
}
return false
}
func boolPtr(v bool) *bool {
return &v
}
+3
View File
@@ -9,6 +9,9 @@ import (
func init() {
// Opt-in for ansi color support for current process.
// https://learn.microsoft.com/en-us/windows/console/console-virtual-terminal-sequences#output-sequences
if os.Stdout == nil {
return
}
var outMode uint32
out := windows.Handle(os.Stdout.Fd())
if err := windows.GetConsoleMode(out, &outMode); err != nil {
@@ -9,9 +9,10 @@ import (
"strings"
"unicode/utf8"
"golang.org/x/tools/go/analysis"
"github.com/godoc-lint/godoc-lint/pkg/model"
"github.com/godoc-lint/godoc-lint/pkg/util"
"golang.org/x/tools/go/analysis"
)
const maxLenRule = model.MaxLenRule
@@ -43,8 +43,10 @@ func (r *PkgDocChecker) Apply(actx *model.AnalysisContext) error {
return nil
}
const commandPkgName = "main"
const commandTestPkgName = "main_test"
const (
commandPkgName = "main"
commandTestPkgName = "main_test"
)
func checkPkgDocRule(actx *model.AnalysisContext) {
if !actx.Config.IsAnyRuleApplicable(model.RuleSet{}.Add(pkgDocRule)) {
@@ -115,7 +117,7 @@ func checkPkgDocRule(actx *model.AnalysisContext) {
}
}
func checkPkgDocPrefix(text string, packageName string) (string, bool) {
func checkPkgDocPrefix(text, packageName string) (string, bool) {
expectedPrefix := "Package " + packageName
if !strings.HasPrefix(text, expectedPrefix) {
return expectedPrefix, false
@@ -117,10 +117,12 @@ func (r *StartWithNameChecker) Apply(actx *model.AnalysisContext) error {
return nil
}
var startPattern = regexp.MustCompile(`^(?:(A|a|AN|An|an|THE|The|the) )?(?P<symbol_name>.+?)\b`)
var startPatternSymbolNameIndex = startPattern.SubexpIndex("symbol_name")
var (
startPattern = regexp.MustCompile(`^(?:(A|a|AN|An|an|THE|The|the) )?(?P<symbol_name>.+?)\b`)
startPatternSymbolNameIndex = startPattern.SubexpIndex("symbol_name")
)
func matchSymbolName(text string, symbol string) bool {
func matchSymbolName(text, symbol string) bool {
head := strings.SplitN(text, "\n", 2)[0]
head, _ = strings.CutPrefix(head, "\r")
head = strings.SplitN(head, " ", 2)[0]
File diff suppressed because one or more lines are too long
@@ -5,6 +5,7 @@ package stdlib_doclink
import (
"fmt"
gdc "go/doc/comment"
"maps"
"regexp"
"slices"
"strconv"
@@ -190,7 +191,7 @@ type potentialDoclink struct {
var potentialDoclinkRE = regexp.MustCompile(`(?m)(?:^|\s)(\*?)([a-zA-Z_][a-zA-Z0-9_]*(?:/[a-zA-Z_][a-zA-Z0-9_]*)*)\.([a-zA-Z0-9_]+)(?:\.([a-zA-Z0-9_]+))?\b`)
func findPotentialDoclinks(pi *packageImports, text string) []potentialDoclink {
func findPotentialDoclinks(pi *packageImports, text string) []*potentialDoclink {
stdlib := stdlib()
m := make(map[string]*potentialDoclink, 5)
@@ -230,7 +231,7 @@ func findPotentialDoclinks(pi *packageImports, text string) []potentialDoclink {
kind: kind,
}
}
m[originalNoStar].count = m[originalNoStar].count + 1
m[originalNoStar].count++
} else if pkg != "" && name1 != "" && name2 == "" {
// pkg.name (= pkg.name1)
@@ -259,7 +260,7 @@ func findPotentialDoclinks(pi *packageImports, text string) []potentialDoclink {
kind: kind,
}
}
m[originalNoStar].count = m[originalNoStar].count + 1
m[originalNoStar].count++
}
}
@@ -267,14 +268,9 @@ func findPotentialDoclinks(pi *packageImports, text string) []potentialDoclink {
return nil
}
result := make([]potentialDoclink, 0, len(m))
for _, v := range m {
result = append(result, *v)
}
slices.SortFunc(result, func(a, b potentialDoclink) int {
return slices.SortedFunc(maps.Values(m), func(a, b *potentialDoclink) int {
return strings.Compare(a.originalNoStar, b.originalNoStar)
})
return result
}
// tryResolveImportPath tries to resolve the given package alias/name to its
+1 -1
View File
@@ -319,7 +319,7 @@ func transferPrimitiveOptions(target *model.RuleOptions, source *PlainRuleOption
transferIfNotNil(&target.NoUnusedLinkIncludeTests, source.NoUnusedLinkIncludeTests)
}
func transferIfNotNil[T any](dst *T, src *T) {
func transferIfNotNil[T any](dst, src *T) {
if src == nil {
return
}
+4 -2
View File
@@ -54,8 +54,10 @@ func (i *Inspector) GetAnalyzer() *analysis.Analyzer {
return i.analyzer
}
var topLevelOrphanCommentGroupPattern = regexp.MustCompile(`(?m)(?:^//.*\r?\n)+(?:\r?\n|\z)`)
var disableDirectivePattern = regexp.MustCompile(`(?m)//godoclint:disable(?: *([^\r\n]+))?\r?$`)
var (
topLevelOrphanCommentGroupPattern = regexp.MustCompile(`(?m)(?:^//.*\r?\n)+(?:\r?\n|\z)`)
disableDirectivePattern = regexp.MustCompile(`(?m)//godoclint:disable(?: *([^\r\n]+))?\r?$`)
)
func (i *Inspector) run(pass *analysis.Pass) (any, error) {
if len(pass.Files) == 0 {
@@ -1,11 +0,0 @@
# analysisflags
Extracted from `/go/analysis/internal/analysisflags` (related to `checker`).
This is just a copy of the code without any changes.
## History
- https://github.com/golangci/golangci-lint/pull/6076
- sync with https://github.com/golang/tools/blob/v0.37.0/go/analysis/internal/analysisflags
- https://github.com/golangci/golangci-lint/pull/5576
- sync with https://github.com/golang/tools/blob/v0.28.0/go/analysis/internal/analysisflags
@@ -1,11 +0,0 @@
# analysisinternal
Extracted from `/internal/analysisinternal/` (related to `checker`).
This is just a copy of the code without any changes.
## History
- https://github.com/golangci/golangci-lint/pull/6076
- sync with https://github.com/golang/tools/blob/v0.37.0/internal/analysisinternal/
- https://github.com/golangci/golangci-lint/pull/5576
- sync with https://github.com/golang/tools/blob/v0.28.0/internal/analysisinternal/
@@ -2,9 +2,9 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package analysisinternal provides gopls' internal analyses with a
// number of helper functions that operate on typed syntax trees.
package analysisinternal
package driverutil
// This file defines helpers for implementing [analysis.Pass.ReadFile].
import (
"fmt"
@@ -0,0 +1,25 @@
# driverutil
Extracted from `/internal/analysis/driverutil/` (related to `checker`).
This is just a copy of `readfile.go` and `url.go` without any changes.
Previously, it was `analysisinternal` and `analysisflags` packages.
## History
- https://github.com/golangci/golangci-lint/pull/6434
- sync with https://github.com/golang/tools/blob/v0.43.0/internal/analysis/driverutil/readfile.go
## analysisinternal History
- https://github.com/golangci/golangci-lint/pull/6076
- sync with https://github.com/golang/tools/blob/v0.37.0/internal/analysisinternal/
- https://github.com/golangci/golangci-lint/pull/5576
- sync with https://github.com/golang/tools/blob/v0.28.0/internal/analysisinternal/
## analysisflags History
- https://github.com/golangci/golangci-lint/pull/6076
- sync with https://github.com/golang/tools/blob/v0.37.0/go/analysis/internal/analysisflags
- https://github.com/golangci/golangci-lint/pull/5576
- sync with https://github.com/golang/tools/blob/v0.28.0/go/analysis/internal/analysisflags
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package analysisflags
package driverutil
import (
"fmt"
@@ -270,13 +270,9 @@ func (r *runner) analyze(pkgs []*packages.Package, analyzers []*analysis.Analyze
for _, lp := range loadingPackages {
if lp.isInitial {
wg.Add(1)
go func(lp *loadingPackage) {
wg.Go(func() {
lp.analyzeRecursive(ctx, cancel, r.loadMode, loadSem)
wg.Done()
}(lp)
})
}
}
@@ -63,9 +63,10 @@ func (act *action) markDepsForAnalyzingSource() {
// Horizontal deps (analyzer.Requires) must be loaded from source and analyzed before analyzing
// this action.
for _, dep := range act.Deps {
if dep.Package == act.Package {
if dep.Package == act.Package && !dep.needAnalyzeSource {
// Analyze source only for horizontal dependencies, e.g. from "buildssa".
dep.needAnalyzeSource = true // can't be set in parallel
dep.markDepsForAnalyzingSource()
}
}
}
@@ -96,23 +96,22 @@ func (act *action) loadPersistedFacts() bool {
for _, f := range facts {
if f.Path == "" { // this is a package fact
key := packageFactKey{act.Package.Types, act.factType(f.Fact)}
key := packageFactKey{pkg: act.Package.Types, typ: act.factType(f.Fact)}
act.packageFacts[key] = f.Fact
continue
}
obj, err := objectpath.Object(act.Package.Types, objectpath.Path(f.Path))
if err != nil {
// Be lenient about these errors. For example, when
// analyzing io/ioutil from source, we may get a fact
// for methods on the devNull type, and objectpath
// will happily create a path for them. However, when
// we later load io/ioutil from export data, the path
// no longer resolves.
// Be lenient about these errors.
// For example, when analyzing io/ioutil from source,
// we may get a fact for methods on the devNull type,
// and objectpath will happily create a path for them.
// However,
// when we later load io/ioutil from export data,
// the path no longer resolves.
//
// If an exported type embeds the unexported type,
// then (part of) the unexported type will become part
// of the type information and our path will resolve
// again.
// then (part of) the unexported type will become part of the type information and our path will resolve again.
continue
}
factKey := objectFactKey{obj, act.factType(f.Fact)}
@@ -2,7 +2,7 @@
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//
// Altered copy of https://github.com/golang/tools/blob/v0.28.0/go/analysis/internal/checker/checker.go
// Altered copy of https://github.com/golang/tools/blob/v0.43.0/go/analysis/checker/checker.go
package goanalysis
@@ -19,8 +19,7 @@ import (
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"github.com/golangci/golangci-lint/v2/internal/x/tools/analysisflags"
"github.com/golangci/golangci-lint/v2/internal/x/tools/analysisinternal"
"github.com/golangci/golangci-lint/v2/internal/x/tools/driverutil"
"github.com/golangci/golangci-lint/v2/pkg/goanalysis/pkgerrors"
)
@@ -134,9 +133,7 @@ func (act *action) analyze() {
module := &analysis.Module{} // possibly empty (non nil) in go/analysis drivers.
if mod := act.Package.Module; mod != nil {
module.Path = mod.Path
module.Version = mod.Version
module.GoVersion = mod.GoVersion
module = analysisModuleFromPackagesModule(mod)
}
// Run the analysis.
@@ -161,7 +158,7 @@ func (act *action) analyze() {
AllObjectFacts: act.AllObjectFacts,
AllPackageFacts: act.AllPackageFacts,
}
pass.ReadFile = analysisinternal.CheckedReadFile(pass, os.ReadFile)
pass.ReadFile = driverutil.CheckedReadFile(pass, os.ReadFile)
act.pass = pass
act.runner.passToPkgGuard.Lock()
@@ -199,7 +196,7 @@ func (act *action) analyze() {
// resolve diagnostic URLs
for i := range act.Diagnostics {
url, err := analysisflags.ResolveURL(act.Analyzer, act.Diagnostics[i])
url, err := driverutil.ResolveURL(act.Analyzer, act.Diagnostics[i])
if err != nil {
return nil, err
}
@@ -324,7 +321,7 @@ func exportedFrom(obj types.Object, pkg *types.Package) bool {
switch obj := obj.(type) {
case *types.Func:
return obj.Exported() && obj.Pkg() == pkg ||
obj.Type().(*types.Signature).Recv() != nil
obj.Signature().Recv() != nil
case *types.Var:
if obj.IsField() {
return true
@@ -387,8 +384,8 @@ func (act *action) exportObjectFact(obj types.Object, fact analysis.Fact) {
// See documentation at AllObjectFacts field of [analysis.Pass].
func (act *action) AllObjectFacts() []analysis.ObjectFact {
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
for k := range act.objectFacts {
facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
for k, fact := range act.objectFacts {
facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: fact})
}
return facts
}
@@ -427,7 +424,7 @@ func (act *action) exportPackageFact(fact analysis.Fact) {
// NOTE(ldez) altered: add receiver to handle logs.
func (act *action) factType(fact analysis.Fact) reflect.Type {
t := reflect.TypeOf(fact)
if t.Kind() != reflect.Ptr {
if t.Kind() != reflect.Pointer {
act.runner.log.Fatalf("invalid Fact type: got %T, want pointer", fact)
}
return t
@@ -445,3 +442,30 @@ func (act *action) AllPackageFacts() []analysis.PackageFact {
}
return facts
}
// NOTE(ldez) no alteration.
func analysisModuleFromPackagesModule(mod *packages.Module) *analysis.Module {
if mod == nil {
return nil
}
var modErr *analysis.ModuleError
if mod.Error != nil {
modErr = &analysis.ModuleError{
Err: mod.Error.Err,
}
}
return &analysis.Module{
Path: mod.Path,
Version: mod.Version,
Replace: analysisModuleFromPackagesModule(mod.Replace),
Time: mod.Time,
Main: mod.Main,
Indirect: mod.Indirect,
Dir: mod.Dir,
GoMod: mod.GoMod,
GoVersion: mod.GoVersion,
Error: modErr,
}
}
@@ -46,14 +46,10 @@ func (lp *loadingPackage) analyzeRecursive(ctx context.Context, cancel context.C
// Load the direct dependencies, in parallel.
var wg sync.WaitGroup
wg.Add(len(lp.imports))
for _, imp := range lp.imports {
go func(imp *loadingPackage) {
wg.Go(func() {
imp.analyzeRecursive(ctx, cancel, loadMode, loadSem)
wg.Done()
}(imp)
})
}
wg.Wait()
@@ -517,7 +513,7 @@ func sizeOfValueTreeBytes(v any) int {
func sizeOfReflectValueTreeBytes(rv reflect.Value, visitedPtrs map[uintptr]struct{}) int {
switch rv.Kind() {
case reflect.Ptr:
case reflect.Pointer:
ptrSize := int(rv.Type().Size())
if rv.IsNil() {
return ptrSize
@@ -25,17 +25,15 @@ func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.
perPkgIssues[issue.Pkg] = append(perPkgIssues[issue.Pkg], issue)
}
var savedIssuesCount int64 = 0
var savedIssuesCount int64
lintResKey := getIssuesCacheKey(analyzers)
workerCount := runtime.GOMAXPROCS(-1)
var wg sync.WaitGroup
wg.Add(workerCount)
pkgCh := make(chan *packages.Package, len(allPkgs))
for range workerCount {
go func() {
defer wg.Done()
wg.Go(func() {
for pkg := range pkgCh {
pkgIssues := perPkgIssues[pkg]
encodedIssues := make([]EncodingIssue, 0, len(pkgIssues))
@@ -59,7 +57,7 @@ func saveIssuesToCache(allPkgs []*packages.Package, pkgsFromCache map[*packages.
issuesCacheDebugf("Saved package %s issues (%d) to cache", pkg, len(pkgIssues))
}
}
}()
})
}
for _, pkg := range allPkgs {
@@ -94,12 +92,10 @@ func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
workerCount := runtime.GOMAXPROCS(-1)
var wg sync.WaitGroup
wg.Add(workerCount)
pkgCh := make(chan *packages.Package, len(pkgs))
for range workerCount {
go func() {
defer wg.Done()
wg.Go(func() {
for pkg := range pkgCh {
var pkgIssues []*EncodingIssue
err := lintCtx.PkgCache.Get(pkg, cache.HashModeNeedAllDeps, lintResKey, &pkgIssues)
@@ -128,7 +124,7 @@ func loadIssuesFromCache(pkgs []*packages.Package, lintCtx *linter.Context,
}
cacheRes.issues = issues
}
}()
})
}
for _, pkg := range pkgs {
@@ -97,6 +97,9 @@ func (c *Runner) walk(root string, stdout *os.File) error {
return err
}
//nolint:gosec // See explanation below.
// `path` contains the `root` but when using `r, err := os.OpenRoot(root)`, this part is not inside the file tree of `r`.
// `filepath.Rel()` can be used but it seems overkill in the context and doesn't work well with a file.
in, err := os.Open(path)
if err != nil {
return err
@@ -14,18 +14,10 @@ func New(settings *config.PreallocSettings) *goanalysis.Linter {
Name: "prealloc",
Doc: "Find slice declarations that could potentially be pre-allocated",
Run: func(pass *analysis.Pass) (any, error) {
runPreAlloc(pass, settings)
pkg.Check(pass, settings.Simple, settings.RangeLoops, settings.ForLoops)
return nil, nil
},
}).
WithLoadMode(goanalysis.LoadModeSyntax)
}
func runPreAlloc(pass *analysis.Pass, settings *config.PreallocSettings) {
hints := pkg.Check(pass.Files, settings.Simple, settings.RangeLoops, settings.ForLoops)
for _, hint := range hints {
pass.Report(hint)
}
WithLoadMode(goanalysis.LoadModeTypesInfo)
}
@@ -272,7 +272,7 @@ func safeTomlSlice(r []any) []any {
}
// This element is not exported by revive, so we need copy the code.
// Extracted from https://github.com/mgechev/revive/blob/v1.13.0/config/config.go#L16
// Extracted from https://github.com/mgechev/revive/blob/v1.15.0/config/config.go#L16
var defaultRules = []lint.Rule{
&rule.VarDeclarationsRule{},
&rule.PackageCommentsRule{},
@@ -324,6 +324,7 @@ var allRules = append([]lint.Rule{
&rule.EnforceRepeatedArgTypeStyleRule{},
&rule.EnforceSliceStyleRule{},
&rule.EnforceSwitchStyleRule{},
&rule.EpochNamingRule{},
&rule.FileHeaderRule{},
&rule.FileLengthLimitRule{},
&rule.FilenameFormatRule{},
@@ -350,6 +351,7 @@ var allRules = append([]lint.Rule{
&rule.NestedStructs{},
&rule.OptimizeOperandsOrderRule{},
&rule.PackageDirectoryMismatchRule{},
&rule.PackageNamingRule{},
&rule.RangeValAddress{},
&rule.RangeValInClosureRule{},
&rule.RedundantBuildTagRule{},
@@ -374,6 +376,7 @@ var allRules = append([]lint.Rule{
&rule.UseFmtPrintRule{},
&rule.UselessBreak{},
&rule.UselessFallthroughRule{},
&rule.UseSlicesSort{},
&rule.UseWaitGroupGoRule{},
&rule.WaitGroupByValueRule{},
}, defaultRules...)
@@ -8,6 +8,6 @@ import (
func New() *goanalysis.Linter {
return goanalysis.
NewLinterFromAnalyzer(analyzer.NewAnalyzer()).
NewLinterFromAnalyzer(analyzer.NewDeferOnlyAnalyzer()).
WithLoadMode(goanalysis.LoadModeTypesInfo)
}
@@ -132,7 +132,7 @@ func NewLinterBuilder() *LinterBuilder {
}
// Build loads all the "internal" linters.
// The configuration is use for the linter settings.
// The configuration is used for the linter settings.
func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
if cfg == nil {
return nil, nil
@@ -557,6 +557,7 @@ func (LinterBuilder) Build(cfg *config.Config) ([]*linter.Config, error) {
linter.NewConfig(prealloc.New(&cfg.Linters.Settings.Prealloc)).
WithSince("v1.19.0").
WithLoadForGoAnalysis().
WithURL("https://github.com/alexkohler/prealloc"),
linter.NewConfig(predeclared.New(&cfg.Linters.Settings.Predeclared)).
@@ -32,7 +32,7 @@ func NewExclusionRules(log logutils.Log, lines *fsutils.LineCache, cfg *config.L
skippedCounter: map[string]int{},
}
excludeRules := slices.Concat(slices.Clone(cfg.Rules), getLinterExclusionPresets(cfg.Presets))
excludeRules := slices.Concat(cfg.Rules, getLinterExclusionPresets(cfg.Presets))
p.rules = parseRules(excludeRules, "", newExcludeRule)
@@ -3,6 +3,7 @@ package processors
import (
"go/parser"
"go/token"
"slices"
"strings"
"sync"
"time"
@@ -41,14 +42,14 @@ func NewFilenameUnadjuster(pkgs []*packages.Package, log logutils.Log) *Filename
startedAt := time.Now()
var wg sync.WaitGroup
wg.Add(len(pkgs))
for _, pkg := range pkgs {
go func(pkg *packages.Package) {
// It's important to call func here to run GC
processUnadjusterPkg(&m, pkg, log)
wg.Done()
}(pkg)
for chunk := range slices.Chunk(pkgs, len(pkgs)/2000+1) {
wg.Go(func() {
for _, pkg := range chunk {
// It's important to call func here to run GC
processUnadjusterPkg(&m, pkg, log)
}
})
}
wg.Wait()
+3
View File
@@ -17,6 +17,9 @@ var DefaultExcludedSymbols = []string{
"(*bytes.Buffer).WriteRune",
"(*bytes.Buffer).WriteString",
// crypto
"crypto/rand.Read", // https://github.com/golang/go/issues/66821
// fmt
"fmt.Print",
"fmt.Printf",
+13 -1
View File
@@ -1,4 +1,4 @@
// Package config implements revive's configuration data structures and related methods
// Package config implements revive's configuration data structures and related methods.
package config
import (
@@ -118,6 +118,7 @@ var allRules = append([]lint.Rule{
&rule.UnnecessaryIfRule{},
&rule.EpochNamingRule{},
&rule.UseSlicesSort{},
&rule.PackageNamingRule{},
}, defaultRules...)
// allFormatters is a list of all available formatters to output the linting results.
@@ -204,6 +205,13 @@ func parseConfig(data []byte, config *lint.Config) error {
return nil
}
func validateConfig(config *lint.Config) error {
if config.EnableAllRules && config.EnableDefaultRules {
return errors.New("config options enableAllRules and enableDefaultRules cannot be combined")
}
return nil
}
func normalizeConfig(config *lint.Config) {
if len(config.Rules) == 0 {
config.Rules = map[string]lint.RuleConfig{}
@@ -262,6 +270,10 @@ func GetConfig(configPath string) (*lint.Config, error) {
config = defaultConfig()
}
if err := validateConfig(config); err != nil {
return nil, err
}
normalizeConfig(config)
return config, nil
}
+1 -1
View File
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
// Checkstyle is an implementation of the Formatter interface
// Checkstyle is an implementation of the [lint.Formatter] interface
// which formats the errors to Checkstyle-like format.
type Checkstyle struct {
Metadata lint.FormatterMetadata
+5 -2
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// Default is an implementation of the Formatter interface
// Default is an implementation of the [lint.Formatter] interface
// which formats the errors to text.
type Default struct {
Metadata lint.FormatterMetadata
@@ -23,7 +23,10 @@ func (*Default) Format(failures <-chan lint.Failure, _ lint.Config) (string, err
var buf bytes.Buffer
prefix := ""
for failure := range failures {
fmt.Fprintf(&buf, "%s%v: %s", prefix, failure.Position.Start, failure.Failure)
_, err := fmt.Fprintf(&buf, "%s%v: %s", prefix, failure.Position.Start, failure.Failure)
if err != nil {
return "", err
}
prefix = "\n"
}
return buf.String(), nil
+52 -37
View File
@@ -14,8 +14,8 @@ import (
"github.com/mgechev/revive/lint"
)
// Friendly is an implementation of the Formatter interface
// which formats the errors to JSON.
// Friendly is an implementation of the [lint.Formatter] interface
// which formats the errors to a friendly, human-readable format.
type Friendly struct {
Metadata lint.FormatterMetadata
}
@@ -32,9 +32,17 @@ func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (str
warningMap := map[string]int{}
totalErrors := 0
totalWarnings := 0
warningEmoji := color.YellowString("⚠")
errorEmoji := color.RedString("✘")
for failure := range failures {
sev := severity(config, failure)
f.printFriendlyFailure(&buf, failure, sev)
firstCol := warningEmoji
if sev == lint.SeverityError {
firstCol = errorEmoji
}
if err := f.printFriendlyFailure(&buf, firstCol, failure); err != nil {
return "", err
}
switch sev {
case lint.SeverityWarning:
warningMap[failure.RuleName]++
@@ -45,33 +53,38 @@ func (f *Friendly) Format(failures <-chan lint.Failure, config lint.Config) (str
}
}
f.printSummary(&buf, totalErrors, totalWarnings)
f.printStatistics(&buf, color.RedString("Errors:"), errorMap)
f.printStatistics(&buf, color.YellowString("Warnings:"), warningMap)
emoji := warningEmoji
if totalErrors > 0 {
emoji = errorEmoji
}
if err := f.printSummary(&buf, emoji, totalErrors, totalWarnings); err != nil {
return "", err
}
if err := f.printStatistics(&buf, color.RedString("Errors:"), errorMap); err != nil {
return "", err
}
if err := f.printStatistics(&buf, color.YellowString("Warnings:"), warningMap); err != nil {
return "", err
}
return buf.String(), nil
}
func (f *Friendly) printFriendlyFailure(sb *strings.Builder, failure lint.Failure, severity lint.Severity) {
f.printHeaderRow(sb, failure, severity)
f.printFilePosition(sb, failure)
sb.WriteString("\n\n")
}
var (
errorEmoji = color.RedString("✘")
warningEmoji = color.YellowString("⚠")
)
func (*Friendly) printHeaderRow(sb *strings.Builder, failure lint.Failure, severity lint.Severity) {
emoji := warningEmoji
if severity == lint.SeverityError {
emoji = errorEmoji
func (f *Friendly) printFriendlyFailure(sb *strings.Builder, firstColumn string, failure lint.Failure) error {
f.printHeaderRow(sb, firstColumn, failure)
if err := f.printFilePosition(sb, failure); err != nil {
return err
}
sb.WriteString(table([][]string{{emoji, ruleDescriptionURL(failure.RuleName), color.GreenString(failure.Failure)}}))
_, err := sb.WriteString("\n\n")
return err
}
func (*Friendly) printFilePosition(sb *strings.Builder, failure lint.Failure) {
fmt.Fprintf(sb, " %s:%d:%d", failure.Filename(), failure.Position.Start.Line, failure.Position.Start.Column)
func (*Friendly) printHeaderRow(sb *strings.Builder, firstColumn string, failure lint.Failure) {
sb.WriteString(table([][]string{{firstColumn, ruleDescriptionURL(failure.RuleName), color.GreenString(failure.Failure)}}))
}
func (*Friendly) printFilePosition(sb *strings.Builder, failure lint.Failure) error {
_, err := fmt.Fprintf(sb, " %s:%d:%d", failure.Filename(), failure.Position.Start.Line, failure.Position.Start.Column)
return err
}
type statEntry struct {
@@ -79,11 +92,7 @@ type statEntry struct {
failures int
}
func (*Friendly) printSummary(w io.Writer, errors, warnings int) {
emoji := warningEmoji
if errors > 0 {
emoji = errorEmoji
}
func (*Friendly) printSummary(w io.Writer, firstColumn string, errors, warnings int) error {
problemsLabel := "problems"
if errors+warnings == 1 {
problemsLabel = "problem"
@@ -98,18 +107,19 @@ func (*Friendly) printSummary(w io.Writer, errors, warnings int) {
}
str := fmt.Sprintf("%d %s (%d %s, %d %s)", errors+warnings, problemsLabel, errors, errorsLabel, warnings, warningsLabel)
if errors > 0 {
fmt.Fprintf(w, "%s %s\n\n", emoji, color.RedString(str))
return
_, err := fmt.Fprintf(w, "%s %s\n\n", firstColumn, color.RedString(str))
return err
}
if warnings > 0 {
fmt.Fprintf(w, "%s %s\n\n", emoji, color.YellowString(str))
return
_, err := fmt.Fprintf(w, "%s %s\n\n", firstColumn, color.YellowString(str))
return err
}
return nil
}
func (*Friendly) printStatistics(w io.Writer, header string, stats map[string]int) {
func (*Friendly) printStatistics(w io.Writer, header string, stats map[string]int) error {
if len(stats) == 0 {
return
return nil
}
data := make([]statEntry, 0, len(stats))
for name, total := range stats {
@@ -122,8 +132,13 @@ func (*Friendly) printStatistics(w io.Writer, header string, stats map[string]in
for _, entry := range data {
formatted = append(formatted, []string{color.GreenString(fmt.Sprintf("%d", entry.failures)), entry.name})
}
fmt.Fprintln(w, header)
fmt.Fprintln(w, table(formatted))
if _, err := fmt.Fprintln(w, header); err != nil {
return err
}
if _, err := fmt.Fprintln(w, table(formatted)); err != nil {
return err
}
return nil
}
func table(rows [][]string) string {
+1 -1
View File
@@ -6,7 +6,7 @@ import (
"github.com/mgechev/revive/lint"
)
// JSON is an implementation of the Formatter interface
// JSON is an implementation of the [lint.Formatter] interface
// which formats the errors to JSON.
type JSON struct {
Metadata lint.FormatterMetadata
+1 -1
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// NDJSON is an implementation of the Formatter interface
// NDJSON is an implementation of the [lint.Formatter] interface
// which formats the errors to NDJSON stream.
type NDJSON struct {
Metadata lint.FormatterMetadata
+6 -3
View File
@@ -7,8 +7,8 @@ import (
"github.com/mgechev/revive/lint"
)
// Plain is an implementation of the Formatter interface
// which formats the errors to JSON.
// Plain is an implementation of the [lint.Formatter] interface
// which formats the errors to plain text.
type Plain struct {
Metadata lint.FormatterMetadata
}
@@ -22,7 +22,10 @@ func (*Plain) Name() string {
func (*Plain) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
var sb strings.Builder
for failure := range failures {
sb.WriteString(fmt.Sprintf("%v: %s %s\n", failure.Position.Start, failure.Failure, ruleDescriptionURL(failure.RuleName)))
_, err := fmt.Fprintf(&sb, "%v: %s %s\n", failure.Position.Start, failure.Failure, ruleDescriptionURL(failure.RuleName))
if err != nil {
return "", err
}
}
return sb.String(), nil
}
+1 -1
View File
@@ -10,7 +10,7 @@ import (
"github.com/mgechev/revive/lint"
)
// Sarif is an implementation of the Formatter interface
// Sarif is an implementation of the [lint.Formatter] interface
// which formats revive failures into SARIF format.
type Sarif struct {
Metadata lint.FormatterMetadata
+4 -2
View File
@@ -9,12 +9,14 @@ import (
"github.com/mgechev/revive/lint"
)
// Stylish is an implementation of the Formatter interface
// which formats the errors to JSON.
// Stylish is an implementation of the [lint.Formatter] interface
// which formats the errors to a stylish, human-readable format.
type Stylish struct {
Metadata lint.FormatterMetadata
}
var _ lint.Formatter = (*Stylish)(nil)
// Name returns the name of the formatter.
func (*Stylish) Name() string {
return "stylish"
+6 -3
View File
@@ -7,8 +7,8 @@ import (
"github.com/mgechev/revive/lint"
)
// Unix is an implementation of the Formatter interface
// which formats the errors to a simple line based error format
// Unix is an implementation of the [lint.Formatter] interface
// which formats the errors to a simple line based error format:
//
// main.go:24:9: [errorf] should replace errors.New(fmt.Sprintf(...)) with fmt.Errorf(...)
type Unix struct {
@@ -24,7 +24,10 @@ func (*Unix) Name() string {
func (*Unix) Format(failures <-chan lint.Failure, _ lint.Config) (string, error) {
var sb strings.Builder
for failure := range failures {
sb.WriteString(fmt.Sprintf("%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure))
_, err := fmt.Fprintf(&sb, "%v: [%s] %s\n", failure.Position.Start, failure.RuleName, failure.Failure)
if err != nil {
return "", err
}
}
return sb.String(), nil
}
+8 -3
View File
@@ -1,4 +1,4 @@
// Package astutils provides utility functions for working with AST nodes
// Package astutils provides utility functions for working with AST nodes.
package astutils
import (
@@ -16,8 +16,13 @@ import (
// FuncSignatureIs returns true if the given func decl satisfies a signature characterized
// by the given name, parameters types and return types; false otherwise.
//
// Example: to check if a function declaration has the signature Foo(int, string) (bool,error)
// call to FuncSignatureIs(funcDecl,"Foo",[]string{"int","string"},[]string{"bool","error"}).
// Example: To check if a function declaration has the signature
//
// Foo(int, string) (bool, error)
//
// call to
//
// FuncSignatureIs(funcDecl, "Foo", []string{"int", "string"}, []string{"bool", "error"})
func FuncSignatureIs(funcDecl *ast.FuncDecl, wantName string, wantParametersTypes, wantResultsTypes []string) bool {
if wantName != funcDecl.Name.String() {
return false // func name doesn't match expected one
+3 -3
View File
@@ -1,6 +1,6 @@
// Package ifelse provides helpers for analyzing the control flow in if-else chains,
// presently used by the following rules:
// - early-return
// - indent-error-flow
// - superfluous-else
// - early-return
// - indent-error-flow
// - superfluous-else
package ifelse
+1 -1
View File
@@ -23,7 +23,7 @@ var DeviatingFuncs = map[Call]BranchKind{
{"log", "Panicln"}: Panic,
}
// ExprCall gets the Call of an ExprStmt, if any.
// ExprCall gets the [Call] of an [ast.ExprStmt], if any.
func ExprCall(expr *ast.ExprStmt) (Call, bool) {
call, ok := expr.X.(*ast.CallExpr)
if !ok {
+1 -1
View File
@@ -15,7 +15,7 @@ type CheckFunc func(Chain) (string, bool)
// Apply evaluates the given Rule on if-else chains found within the given AST,
// and returns the failures.
//
// Note that in if-else chain with multiple "if" blocks, only the *last* one is checked,
// Note that in if-else chain with multiple "if" blocks, only the "last" one is checked,
// that is to say, given:
//
// if foo {
+44
View File
@@ -0,0 +1,44 @@
// Package syncset provides a simple, mutex-protected set for strings.
package syncset
import (
"maps"
"slices"
"sync"
)
// Set is a concurrency-safe set of strings.
type Set struct {
mu sync.Mutex
elements map[string]struct{}
}
// New returns an initialized, empty Set.
func New() *Set {
return &Set{elements: map[string]struct{}{}}
}
// AddIfAbsent adds str to the set if it is not already present, and reports whether it was added.
func (s *Set) AddIfAbsent(str string) bool {
s.mu.Lock()
defer s.mu.Unlock()
if s.elements == nil {
s.elements = map[string]struct{}{str: {}}
return true
}
_, exists := s.elements[str]
if !exists {
s.elements[str] = struct{}{}
}
return !exists
}
// Elements returns a slice of all elements in the set.
func (s *Set) Elements() []string {
s.mu.Lock()
defer s.mu.Unlock()
return slices.Collect(maps.Keys(s.elements))
}
+2 -2
View File
@@ -15,9 +15,9 @@ type RuleConfig struct {
Arguments Arguments
Severity Severity
Disabled bool
// Exclude - rule-level file excludes, TOML related (strings)
// Exclude is rule-level file excludes, TOML related (strings).
Exclude []string
// excludeFilters - regex-based file filters, initialized from Exclude
// excludeFilters is regex-based file filters, initialized from Exclude.
excludeFilters []*FileFilter
}
+8 -9
View File
@@ -72,19 +72,18 @@ type FailurePosition struct {
// Failure defines a struct for a linting failure.
type Failure struct {
Failure string `json:"Failure"`
RuleName string `json:"RuleName"`
Category FailureCategory `json:"Category"`
Position FailurePosition `json:"Position"`
Node ast.Node `json:"-"`
Confidence float64 `json:"Confidence"`
// For future use
ReplacementLine string `json:"ReplacementLine"`
Failure string `json:"Failure"`
RuleName string `json:"RuleName"`
Category FailureCategory `json:"Category"`
Position FailurePosition `json:"Position"`
Node ast.Node `json:"-"`
Confidence float64 `json:"Confidence"`
ReplacementLine string `json:"ReplacementLine"`
}
// GetFilename returns the filename.
//
// Deprecated: Use [Filename].
// Deprecated: Use [Failure.Filename] instead.
func (f *Failure) GetFilename() string {
return f.Filename()
}
+36 -35
View File
@@ -5,45 +5,46 @@ import (
"io"
"log/slog"
"os"
"sync"
"testing"
)
const logFile = "revive.log"
var (
logger *slog.Logger
loggerFile *os.File
)
// GetLogger retrieves an instance of an application logger which outputs
// to a file if the debug flag is enabled.
// GetLogger retrieves an instance of an application logger.
// The log level can be configured via the REVIVE_LOG_LEVEL environment variable.
// If REVIVE_LOG_LEVEL is unset or empty, logging is disabled.
// If it is set to an invalid value, the log level defaults to WARN.
//
//nolint:unparam // err is always nil, but is included in the signature for future extensibility.
func GetLogger() (*slog.Logger, error) {
if logger != nil {
return logger, nil
}
debugModeEnabled := os.Getenv("DEBUG") != ""
if !debugModeEnabled {
// by default, suppress all logging output
return slog.New(slog.DiscardHandler), nil
}
var err error
loggerFile, err = os.Create(logFile)
if err != nil {
return nil, err
}
logger = slog.New(slog.NewTextHandler(io.MultiWriter(os.Stderr, loggerFile), nil))
logger.Info("Logger initialized", "logFile", logFile)
return logger, nil
return getLogger(), nil
}
// Close closes the logger file if it was opened.
func Close() error {
if loggerFile == nil {
return nil
var getLogger = sync.OnceValue(initLogger(os.Stderr))
func initLogger(out io.Writer) func() *slog.Logger {
return func() *slog.Logger {
logLevel := os.Getenv("REVIVE_LOG_LEVEL")
if logLevel == "" {
return slog.New(slog.DiscardHandler)
}
leveler := &slog.LevelVar{}
opts := &slog.HandlerOptions{Level: leveler}
level := slog.LevelWarn
_ = level.UnmarshalText([]byte(logLevel)) // Ignore error and default to WARN if invalid
leveler.Set(level)
logger := slog.New(slog.NewTextHandler(out, opts))
logger.Info("Logger initialized", "logLevel", logLevel)
return logger
}
return loggerFile.Close()
}
// InitForTesting initializes the logger singleton cache for testing purposes.
// This function should only be called in tests.
func InitForTesting(tb testing.TB, out io.Writer) {
tb.Helper()
getLogger = sync.OnceValue(initLogger(out))
}
+1 -1
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// BoolLiteralRule warns when logic expressions contains Boolean literals.
// BoolLiteralRule warns when logic expressions contain boolean literals.
type BoolLiteralRule struct{}
// Apply applies the rule to given file.
+2 -2
View File
@@ -7,8 +7,8 @@ import (
"github.com/mgechev/revive/lint"
)
// CommentSpacingsRule check whether there is a space between
// the comment symbol( // ) and the start of the comment text.
// CommentSpacingsRule checks whether there is a space between
// the comment symbol // and the start of the comment text.
type CommentSpacingsRule struct {
allowList []string
}
+1 -1
View File
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
// ContextAsArgumentRule suggests that `context.Context` should be the first argument of a function.
// ContextAsArgumentRule suggests that [context.Context] should be the first argument of a function.
type ContextAsArgumentRule struct {
allowTypes map[string]struct{}
}
+1 -1
View File
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
// ContextKeysType disallows the usage of basic types in `context.WithValue`.
// ContextKeysType disallows the usage of basic types in [context.WithValue].
type ContextKeysType struct{}
// Apply applies the rule to given file.
+2 -2
View File
@@ -11,7 +11,8 @@ import (
//nolint:staticcheck // TODO: ast.Object is deprecated
type nodeUID *ast.Object // type of the unique id for AST nodes
// DataRaceRule lints assignments to value method-receivers.
// DataRaceRule spots potential dataraces caused by goroutines capturing (by-reference)
// particular identifiers of the function from which goroutines are created.
type DataRaceRule struct{}
// Apply applies the rule to given file.
@@ -65,7 +66,6 @@ func (*DataRaceRule) extractReturnIDs(fields []*ast.Field) map[nodeUID]struct{}
}
type lintFunctionForDataRaces struct {
_ struct{}
onFailure func(failure lint.Failure)
returnIDs map[nodeUID]struct{}
rangeIDs map[nodeUID]struct{}
+1 -1
View File
@@ -11,7 +11,7 @@ import (
"github.com/mgechev/revive/lint"
)
// DeepExitRule lints program exit at functions other than main or init.
// DeepExitRule lints program exit in functions other than main or init.
type DeepExitRule struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
// DotImportsRule forbids . imports.
// DotImportsRule forbids dot imports.
type DotImportsRule struct {
allowedPackages allowPackages
}
+2
View File
@@ -18,6 +18,8 @@ type EarlyReturnRule struct {
allowJump bool
}
var _ lint.ConfigurableRule = (*EarlyReturnRule)(nil)
// Configure validates the rule configuration, and configures the rule accordingly.
//
// Configuration implements the [lint.ConfigurableRule] interface.
+2 -2
View File
@@ -408,8 +408,8 @@ func (w *lintExported) checkGoDocStatus(comment *ast.CommentGroup, name string)
}
// firstCommentLine yields the first line of interest in comment group or "" if there is nothing of interest.
// An "interesting line" is a comment line that is neither a directive (e.g. //go:...) or a deprecation comment
// (lines from the first line with a prefix // Deprecated: to the end of the comment group)
// An "interesting line" is a comment line that is neither a directive (e.g. `//go:...`) or a deprecation comment
// (lines from the first line with a prefix `// Deprecated:` to the end of the comment group).
// Empty or spaces-only lines are discarded.
func (*lintExported) firstCommentLine(comment *ast.CommentGroup) (result string) {
if comment == nil {
+1 -1
View File
@@ -38,7 +38,7 @@ func (*FilenameFormatRule) getMsgForNonASCIIChars(str string) string {
continue
}
result.WriteString(fmt.Sprintf(" Non ASCII character %c (%U) found.", c, c))
fmt.Fprintf(&result, " Non ASCII character %c (%U) found.", c, c)
}
return result.String()
+1 -1
View File
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
// ForbiddenCallInWgGoRule spots calls to panic or wg.Done when using WaitGroup.Go.
// ForbiddenCallInWgGoRule spots calls to panic or wg.Done when using [sync.WaitGroup.Go].
type ForbiddenCallInWgGoRule struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
// IncrementDecrementRule lints `i += 1` and `i -= 1` constructs.
// IncrementDecrementRule suggests replacing `i += 1` and `i -= 1` with `i++` and `i--`.
type IncrementDecrementRule struct{}
// Apply applies the rule to given file.
+2
View File
@@ -11,6 +11,8 @@ type IndentErrorFlowRule struct {
preserveScope bool
}
var _ lint.ConfigurableRule = (*IndentErrorFlowRule)(nil)
// Configure validates the rule configuration, and configures the rule accordingly.
//
// Configuration implements the [lint.ConfigurableRule] interface.
+1 -1
View File
@@ -12,7 +12,7 @@ import (
"github.com/mgechev/revive/lint"
)
// LineLengthLimitRule lints number of characters in a line.
// LineLengthLimitRule lints the number of characters in a line.
type LineLengthLimitRule struct {
max int
}
+315
View File
@@ -0,0 +1,315 @@
package rule
import (
"errors"
"fmt"
"go/ast"
"path/filepath"
"regexp"
"strings"
gopackages "golang.org/x/tools/go/packages"
"github.com/mgechev/revive/internal/syncset"
"github.com/mgechev/revive/lint"
)
// defaultBadNames is the list of "bad" package names from https://go.dev/blog/package-names#bad-package-names.
var defaultBadNames = map[string]struct{}{
"common": {},
"interface": {},
"interfaces": {},
"misc": {},
"type": {},
"types": {},
"util": {},
"utils": {},
}
// extraBadNames is the list of additional "bad" package names that are not recommended.
var extraBadNames = map[string]struct{}{
"api": {},
"helpers": {},
"miscellaneous": {},
"models": {},
"shared": {},
"utilities": {},
}
// commonStdNames is the list of standard library package names that are commonly used in Go programs.
// This list is based on the most popular standard library packages according to importedby tab in pkg.go.dev.
// For example, "http" imported by 1,705,800 times https://pkg.go.dev/net/http?tab=importedby
var commonStdNames = map[string]string{
"bytes": "bytes",
"bufio": "bufio",
"flag": "flag",
"context": "context",
"errors": "errors",
"filepath": "path/filepath",
"fmt": "fmt",
"http": "net/http",
"io": "io",
"ioutil": "io/ioutil",
"json": "encoding/json",
"log": "log",
"math": "math",
"net": "net",
"os": "os",
"strconv": "strconv",
"reflect": "reflect",
"regexp": "regexp",
"runtime": "runtime",
"sort": "sort",
"strings": "strings",
"sync": "sync",
"time": "time",
"url": "net/url",
}
// nonPublicPackageSegments are package path segments that indicate the std package is not public.
var nonPublicPackageSegments = map[string]struct{}{
"internal": {},
"vendor": {},
}
// forbiddenTopLevelNames is the set of forbidden top level package names.
var forbiddenTopLevelNames = map[string]struct{}{
"pkg": {},
}
// PackageNamingRule is a rule that checks package names.
type PackageNamingRule struct {
skipConventionNameCheck bool // if true - skip checks for package name conventions (e.g., no underscores, no MixedCaps etc.)
conventionNameCheckRegex *regexp.Regexp // the regex used to check package name conventions
skipTopLevelCheck bool // if true - skip checks for top level package names (e.g., "pkg")
skipDefaultBadNameCheck bool // if true - skip checks for default bad package names (e.g., "util", "misc" etc.)
checkExtraBadName bool // if true - enable check for extra bad package names (e.g., "helpers", "models" etc.)
userDefinedBadNames map[string]struct{} // set of user defined bad package names
skipCollisionWithCommonStd bool // if true - skip checks for collisions with common Go standard library package names (e.g., "http", "json", "rand" etc.)
checkCollisionWithAllStd bool // if true - enable checks for collisions with all Go standard library package names (including "version", "metrics" etc.)
// allStdNames holds name -> path of standard library packages excluding internal and vendor.
// Populated only if checkCollisionWithAllStd is true. `net/http` stored as `http`, `math/rand/v2` as `rand` etc.
allStdNames map[string]string
// alreadyCheckedNames is keyed by fileDir (package directory path) to track which package directories
// have already been checked and avoid duplicate checks across files in the same package.
alreadyCheckedNames *syncset.Set
}
// Configure validates the rule configuration, and configures the rule accordingly.
//
// Configuration implements the [lint.ConfigurableRule] interface.
func (r *PackageNamingRule) Configure(arguments lint.Arguments) error {
r.alreadyCheckedNames = syncset.New()
if len(arguments) == 0 {
return nil
}
if len(arguments) > 1 {
return fmt.Errorf("invalid arguments to the package-naming rule: expected at most 1 argument, but got %d", len(arguments))
}
args, ok := arguments[0].(map[string]any)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting a k,v map, but got %T", arguments[0])
}
for k, v := range args {
switch {
case isRuleOption(k, "skipConventionNameCheck"):
r.skipConventionNameCheck, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting skipConventionNameCheck to be a boolean, but got %T", v)
}
case isRuleOption(k, "conventionNameCheckRegex"):
regexStr, ok := v.(string)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting conventionNameCheckRegex to be a string, but got %T", v)
}
if regexStr == "" {
return errors.New("invalid argument to the package-naming rule: conventionNameCheckRegex cannot be an empty string")
}
regex, err := regexp.Compile(regexStr)
if err != nil {
return fmt.Errorf("invalid argument to the package-naming rule: invalid regex for conventionNameCheckRegex: %w", err)
}
r.conventionNameCheckRegex = regex
case isRuleOption(k, "skipTopLevelCheck"):
r.skipTopLevelCheck, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting skipTopLevelCheck to be a boolean, but got %T", v)
}
case isRuleOption(k, "skipDefaultBadNameCheck"):
r.skipDefaultBadNameCheck, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting skipDefaultBadNameCheck to be a boolean, but got %T", v)
}
case isRuleOption(k, "checkExtraBadName"):
r.checkExtraBadName, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting checkExtraBadName to be a boolean, but got %T", v)
}
case isRuleOption(k, "userDefinedBadNames"):
userDefinedBadNames, ok := v.([]any)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting userDefinedBadNames of type slice of strings, but got %T", v)
}
for i, name := range userDefinedBadNames {
if r.userDefinedBadNames == nil {
r.userDefinedBadNames = map[string]struct{}{}
}
n, ok := name.(string)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting element %d of userDefinedBadNames to be a string, but got %v(%T)", i, name, name)
}
if n == "" {
return fmt.Errorf("invalid argument to the package-naming rule: userDefinedBadNames cannot contain empty string (index %d)", i)
}
r.userDefinedBadNames[strings.ToLower(n)] = struct{}{}
}
case isRuleOption(k, "skipCollisionWithCommonStd"):
r.skipCollisionWithCommonStd, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting skipCollisionWithCommonStd to be a boolean, but got %T", v)
}
case isRuleOption(k, "checkCollisionWithAllStd"):
r.checkCollisionWithAllStd, ok = v.(bool)
if !ok {
return fmt.Errorf("invalid argument to the package-naming rule: expecting checkCollisionWithAllStd to be a boolean, but got %T", v)
}
}
}
if r.skipConventionNameCheck && r.conventionNameCheckRegex != nil {
return errors.New("invalid configuration for package-naming rule: skipConventionNameCheck and conventionNameCheckRegex cannot be both set")
}
if r.skipCollisionWithCommonStd && r.checkCollisionWithAllStd {
return errors.New("invalid configuration for package-naming rule: skipCollisionWithCommonStd and checkCollisionWithAllStd cannot be both set")
}
if r.checkCollisionWithAllStd && r.allStdNames == nil {
pkgs, err := gopackages.Load(nil, "std")
if err != nil {
return fmt.Errorf("load std packages: %w", err)
}
r.allStdNames = map[string]string{}
for _, pkg := range pkgs {
if isNonPublicPackage(pkg.PkgPath) {
continue
}
if existingPath, ok := r.allStdNames[pkg.Name]; !ok || pkg.PkgPath < existingPath {
r.allStdNames[pkg.Name] = pkg.PkgPath
}
}
}
return nil
}
// isNonPublicPackage reports whether the path represents an internal or vendor directory.
func isNonPublicPackage(path string) bool {
for p := range strings.SplitSeq(path, "/") {
if _, ok := nonPublicPackageSegments[p]; ok {
return true
}
}
return false
}
// Apply applies the rule to given file.
func (r *PackageNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
onFailure := func(failure lint.Failure) {
failures = append(failures, failure)
}
fileDir := filepath.Dir(file.Name)
if !r.alreadyCheckedNames.AddIfAbsent(fileDir) {
return failures
}
node := file.AST.Name
pkgName := node.Name
pkgNameWithoutTestSuffix := strings.TrimSuffix(pkgName, "_test")
if r.conventionNameCheckRegex != nil {
if !r.conventionNameCheckRegex.MatchString(pkgNameWithoutTestSuffix) {
onFailure(r.pkgNameFailure(node, "package name %q doesn't match the convention defined by conventionNameCheckRegex", pkgName))
return failures
}
} else if !r.skipConventionNameCheck {
// Package names need slightly different handling than other names.
if strings.Contains(pkgNameWithoutTestSuffix, "_") {
onFailure(r.pkgNameFailure(node, "don't use package name %q that contains an underscore", pkgName))
return failures
}
if hasUpperCaseLetter(pkgNameWithoutTestSuffix) {
onFailure(r.pkgNameFailure(node, "don't use package name %q that contains MixedCaps", pkgName))
return failures
}
}
pkgNameLower := strings.ToLower(pkgName)
if !r.skipTopLevelCheck {
if _, ok := forbiddenTopLevelNames[pkgNameLower]; ok && filepath.Base(fileDir) != pkgName {
onFailure(r.pkgNameFailure(node, "don't use %q as a root level package name", pkgName))
return failures
}
}
if !r.skipDefaultBadNameCheck {
if _, ok := defaultBadNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(node, "don't use %q because it is a bad package name according to https://go.dev/blog/package-names#bad-package-names", pkgName))
return failures
}
}
if r.checkExtraBadName {
if _, ok := extraBadNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(node, "don't use %q because it is a bad package name (extra)", pkgName))
return failures
}
}
if r.userDefinedBadNames != nil {
if _, ok := r.userDefinedBadNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(node, "don't use %q because it is a bad package name (user-defined)", pkgName))
return failures
}
}
if r.checkCollisionWithAllStd {
// all std names are also common std names, so no need to check separately
if std, ok := r.allStdNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(node, "don't use %q because it conflicts with Go standard library package %q", pkgName, std))
}
} else if !r.skipCollisionWithCommonStd {
if std, ok := commonStdNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(node, "don't use %q because it conflicts with common Go standard library package %q", pkgName, std))
}
}
return failures
}
// Name returns the rule name.
func (*PackageNamingRule) Name() string {
return "package-naming"
}
func (*PackageNamingRule) pkgNameFailure(node ast.Node, msg string, args ...any) lint.Failure {
return lint.Failure{
Failure: fmt.Sprintf(msg, args...),
Confidence: 1,
Node: node,
Category: lint.FailureCategoryNaming,
}
}
+1 -1
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// RedundantTestMainExitRule suggests removing Exit call in TestMain function for test files.
// RedundantTestMainExitRule suggests removing redundant [os.Exit] or [syscall.Exit] calls in TestMain function.
type RedundantTestMainExitRule struct{}
// Apply applies the rule to given file.
+2 -1
View File
@@ -11,7 +11,7 @@ import (
"github.com/mgechev/revive/lint"
)
// StringFormatRule lints strings and/or comments according to a set of regular expressions given as Arguments.
// StringFormatRule lints strings and/or comments according to a set of regular expressions given as arguments.
type StringFormatRule struct {
rules []stringFormatSubrule
}
@@ -87,6 +87,7 @@ const identRegex = "[_A-Za-z][_A-Za-z0-9]*"
var parseStringFormatScope = regexp.MustCompile(
fmt.Sprintf("^(%s(?:\\.%s)?)(?:\\[([0-9]+)\\](?:\\.(%s))?)?$", identRegex, identRegex, identRegex))
//revive:disable-next-line:function-result-limit
func (r *StringFormatRule) parseArgument(argument any, ruleNum int) (scopes stringFormatSubruleScopes, regex *regexp.Regexp, negated bool, errorMessage string, err error) {
g, ok := argument.([]any) // Cast to generic slice first
if !ok {
+1 -1
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// StringOfIntRule warns when logic expressions contains Boolean literals.
// StringOfIntRule warns when an integer is converted to a string using a string cast.
type StringOfIntRule struct{}
// Apply applies the rule to given file.
+2
View File
@@ -13,6 +13,8 @@ type SuperfluousElseRule struct {
preserveScope bool
}
var _ lint.ConfigurableRule = (*SuperfluousElseRule)(nil)
// Configure validates the rule configuration, and configures the rule accordingly.
//
// Configuration implements the [lint.ConfigurableRule] interface.
+1 -1
View File
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
// TimeEqualRule shows where "==" and "!=" used for equality check [time.Time].
// TimeEqualRule flags where "==" and "!=" are used for equality checks on [time.Time].
type TimeEqualRule struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -79,7 +79,7 @@ type lintUnconditionalRecursionRule struct {
}
// Visit will traverse function's body we search for calls to the function itself.
// We do not search inside conditional control structures (if, for, switch, ...)
// We do not search inside conditional control structures (if, for, switch etc.)
// because any recursive call inside them is conditioned.
// We do search inside conditional control structures are statements
// that will take the control out of the function (return, exit, panic).
+1 -1
View File
@@ -9,7 +9,7 @@ import (
"github.com/mgechev/revive/lint"
)
// UnexportedReturnRule warns when a public return is from unexported type.
// UnexportedReturnRule warns when a public function returns an unexported type.
type UnexportedReturnRule struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -11,7 +11,7 @@ import (
)
// UnsecureURLSchemeRule checks if a file contains string literals with unsecure URL schemes.
// For example: http://... in place of https://....
// For example: "http://" in place of "https://".
type UnsecureURLSchemeRule struct{}
// Apply applied the rule to the given file.
+2 -1
View File
@@ -9,7 +9,8 @@ import (
"github.com/mgechev/revive/lint"
)
// UseFmtPrintRule lints calls to print and println.
// UseFmtPrintRule proposes to replace calls to built-in `print` and `println`
// with their equivalents from [fmt] package.
type UseFmtPrintRule struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -8,7 +8,7 @@ import (
"github.com/mgechev/revive/lint"
)
// UseSlicesSort spots calls to sort.* that can be replaced by slices package methods.
// UseSlicesSort spots calls to sort.* that can be replaced by [slices] package methods.
type UseSlicesSort struct{}
// Apply applies the rule to given file.
+1 -1
View File
@@ -7,7 +7,7 @@ import (
"github.com/mgechev/revive/lint"
)
// UseWaitGroupGoRule spots Go idioms that might be rewritten using WaitGroup.Go.
// UseWaitGroupGoRule spots Go idioms that might be rewritten using [sync.WaitGroup.Go].
type UseWaitGroupGoRule struct{}
// Apply applies the rule to given file.
+15 -152
View File
@@ -4,15 +4,12 @@ import (
"fmt"
"go/ast"
"go/token"
"path/filepath"
"strings"
"sync"
gopackages "golang.org/x/tools/go/packages"
"github.com/mgechev/revive/internal/astutils"
"github.com/mgechev/revive/internal/rule"
"github.com/mgechev/revive/lint"
"github.com/mgechev/revive/logging"
)
var knownNameExceptions = map[string]bool{
@@ -20,49 +17,19 @@ var knownNameExceptions = map[string]bool{
"kWh": true,
}
// defaultBadPackageNames is the list of "bad" package names from https://go.dev/wiki/CodeReviewComments#package-names
// and https://go.dev/blog/package-names#bad-package-names.
// The rule warns about the usage of any package name in this list if skipPackageNameChecks is false.
// Values in the list should be lowercased.
var defaultBadPackageNames = map[string]struct{}{
"api": {},
"common": {},
"interface": {},
"interfaces": {},
"misc": {},
"miscellaneous": {},
"shared": {},
"type": {},
"types": {},
"util": {},
"utilities": {},
"utils": {},
}
// VarNamingRule lints the name of a variable.
type VarNamingRule struct {
allowList []string
blockList []string
allowUpperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
skipInitialismNameChecks bool // if true - disable enforcing capitals for common initialisms
skipPackageNameChecks bool // if true - disable check for meaningless and user-defined bad package names
extraBadPackageNames map[string]struct{} // inactive if skipPackageNameChecks is false
pkgNameAlreadyChecked syncSet // set of packages names already checked
skipPackageNameCollisionWithGoStd bool // if true - disable checks for collisions with Go standard library package names
// stdPackageNames holds the names of standard library packages excluding internal and vendor.
// populated only if skipPackageNameCollisionWithGoStd is false.
// E.g., `net/http` stored as `http`, `math/rand/v2` - `rand` etc.
stdPackageNames map[string]struct{}
allowUpperCaseConst bool // if true - allows to use UPPER_SOME_NAMES for constants
skipInitialismNameChecks bool // if true - disable enforcing capitals for common initialisms
}
// Configure validates the rule configuration, and configures the rule accordingly.
//
// Configuration implements the [lint.ConfigurableRule] interface.
func (r *VarNamingRule) Configure(arguments lint.Arguments) error {
r.pkgNameAlreadyChecked = syncSet{elements: map[string]struct{}{}}
if len(arguments) >= 1 {
list, err := getList(arguments[0], "allowlist")
if err != nil {
@@ -100,58 +67,28 @@ func (r *VarNamingRule) Configure(arguments lint.Arguments) error {
case isRuleOption(k, "upperCaseConst"):
r.allowUpperCaseConst = fmt.Sprint(v) == "true"
case isRuleOption(k, "skipPackageNameChecks"):
r.skipPackageNameChecks = fmt.Sprint(v) == "true"
case isRuleOption(k, "extraBadPackageNames"):
extraBadPackageNames, ok := v.([]any)
if !ok {
return fmt.Errorf("invalid third argument to the var-naming rule. Expecting extraBadPackageNames of type slice of strings, but %T", v)
logger, err := logging.GetLogger()
if err == nil {
logger.Warn("The option var-naming.skipPackageNameChecks is no longer supported and will be ignored; use package-naming rule instead")
}
for i, name := range extraBadPackageNames {
if r.extraBadPackageNames == nil {
r.extraBadPackageNames = map[string]struct{}{}
}
n, ok := name.(string)
if !ok {
return fmt.Errorf("invalid third argument to the var-naming rule: expected element %d of extraBadPackageNames to be a string, but got %v(%T)", i, name, name)
}
r.extraBadPackageNames[strings.ToLower(n)] = struct{}{}
case isRuleOption(k, "extraBadPackageNames"):
logger, err := logging.GetLogger()
if err == nil {
logger.Warn("The option var-naming.extraBadPackageNames is no longer supported and will be ignored; use package-naming.userDefinedBadNames instead")
}
case isRuleOption(k, "skipPackageNameCollisionWithGoStd"):
r.skipPackageNameCollisionWithGoStd = fmt.Sprint(v) == "true"
logger, err := logging.GetLogger()
if err == nil {
logger.Warn("The option var-naming.skipPackageNameCollisionWithGoStd is no longer supported and will be ignored; " +
"use package-naming.skipCollisionWithCommonStd instead (or package-naming.checkCollisionWithAllStd for the old 'all std' behavior)")
}
}
}
}
if !r.skipPackageNameCollisionWithGoStd && r.stdPackageNames == nil {
pkgs, err := gopackages.Load(nil, "std")
if err != nil {
return fmt.Errorf("load std packages: %w", err)
}
r.stdPackageNames = map[string]struct{}{}
for _, pkg := range pkgs {
if isInternalOrVendorPackage(pkg.PkgPath) {
continue
}
r.stdPackageNames[pkg.Name] = struct{}{}
}
}
return nil
}
// isInternalOrVendorPackage reports whether the path represents an internal or vendor directory.
//
// Borrowed and modified from
// https://github.com/golang/pkgsite/blob/84333735ffe124f7bd904805fd488b93841de49f/internal/postgres/search.go#L1009-L1016
func isInternalOrVendorPackage(path string) bool {
for p := range strings.SplitSeq(path, "/") {
if p == "internal" || p == "vendor" {
return true
}
}
return false
}
// Apply applies the rule to given file.
func (r *VarNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure {
var failures []lint.Failure
@@ -159,10 +96,6 @@ func (r *VarNamingRule) Apply(file *lint.File, _ lint.Arguments) []lint.Failure
failures = append(failures, failure)
}
if !r.skipPackageNameChecks {
r.applyPackageCheckRules(file, onFailure)
}
fileAst := file.AST
walker := lintNames{
file: file,
@@ -184,61 +117,6 @@ func (*VarNamingRule) Name() string {
return "var-naming"
}
func (r *VarNamingRule) applyPackageCheckRules(file *lint.File, onFailure func(failure lint.Failure)) {
fileDir := filepath.Dir(file.Name)
// Protect pkgsWithNameFailure from concurrent modifications
r.pkgNameAlreadyChecked.Lock()
defer r.pkgNameAlreadyChecked.Unlock()
if r.pkgNameAlreadyChecked.has(fileDir) {
return
}
r.pkgNameAlreadyChecked.add(fileDir) // mark this package as already checked
pkgNameNode := file.AST.Name
pkgName := pkgNameNode.Name
pkgNameLower := strings.ToLower(pkgName)
// Check if top level package
if pkgNameLower == "pkg" && filepath.Base(fileDir) != pkgName {
onFailure(r.pkgNameFailure(pkgNameNode, "should not have a root level package called pkg"))
return
}
if _, ok := r.extraBadPackageNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(pkgNameNode, "avoid bad package names"))
return
}
if _, ok := defaultBadPackageNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(pkgNameNode, "avoid meaningless package names"))
return
}
if !r.skipPackageNameCollisionWithGoStd {
if _, ok := r.stdPackageNames[pkgNameLower]; ok {
onFailure(r.pkgNameFailure(pkgNameNode, "avoid package names that conflict with Go standard library package names"))
}
}
// Package names need slightly different handling than other names.
if strings.Contains(pkgName, "_") && !strings.HasSuffix(pkgName, "_test") {
onFailure(r.pkgNameFailure(pkgNameNode, "don't use an underscore in package name"))
}
if hasUpperCaseLetter(pkgName) {
onFailure(r.pkgNameFailure(pkgNameNode, "don't use MixedCaps in package names; %s should be %s", pkgName, pkgNameLower))
}
}
func (*VarNamingRule) pkgNameFailure(node ast.Node, msg string, args ...any) lint.Failure {
return lint.Failure{
Failure: fmt.Sprintf(msg, args...),
Confidence: 1,
Node: node,
Category: lint.FailureCategoryNaming,
}
}
type lintNames struct {
file *lint.File
fileAst *ast.File
@@ -485,18 +363,3 @@ func getList(arg any, argName string) ([]string, error) {
}
return list, nil
}
type syncSet struct {
sync.Mutex
elements map[string]struct{}
}
func (sm *syncSet) has(s string) bool {
_, result := sm.elements[s]
return result
}
func (sm *syncSet) add(s string) {
sm.elements[s] = struct{}{}
}
@@ -29,6 +29,7 @@ const (
FuncSigArgType
ErrFuncActualArgType
GomegaParamArgType
TBParamArgType
MultiRetsArgType
ErrorMethodArgType
@@ -17,8 +17,13 @@ func getAsyncFuncArg(sig *gotypes.Signature) ArgPayload {
if sig.Params().Len() > 0 {
arg := sig.Params().At(0).Type()
if gomegainfo.IsGomegaType(arg) && sig.Results().Len() == 0 {
argType |= FuncSigArgType | GomegaParamArgType
if sig.Results().Len() == 0 {
if gomegainfo.IsGomegaType(arg) {
argType |= FuncSigArgType | GomegaParamArgType
}
if typecheck.ImplementsTB(arg) {
argType |= FuncSigArgType | TBParamArgType
}
}
}
@@ -17,7 +17,7 @@ func (AsyncSucceedRule) isApply(gexp *expression.GomegaExpression) bool {
return gexp.IsAsync() &&
gexp.MatcherTypeIs(matcher.SucceedMatcherType) &&
gexp.ActualArgTypeIs(actual.FuncSigArgType) &&
!gexp.ActualArgTypeIs(actual.ErrorTypeArgType|actual.GomegaParamArgType)
!gexp.ActualArgTypeIs(actual.ErrorTypeArgType|actual.GomegaParamArgType|actual.TBParamArgType)
}
func (r AsyncSucceedRule) Apply(gexp *expression.GomegaExpression, _ config.Config, reportBuilder *reports.Builder) bool {
@@ -25,6 +25,9 @@ func (r AsyncSucceedRule) Apply(gexp *expression.GomegaExpression, _ config.Conf
if gexp.ActualArgTypeIs(actual.MultiRetsArgType) {
reportBuilder.AddIssue(false, "Success matcher does not support multiple values")
} else {
// The message intentionally does not call out "function with a TB implementation" as another alternative because
// that alternative is not valid for generic Gomega - it would be confusing for many users. Users
// of a Gomega wrapper which supports such functions must figure that out themselves.
reportBuilder.AddIssue(false, "Success matcher only support a single error value, or function with Gomega as its first parameter")
}
}
@@ -8,6 +8,16 @@ import (
var (
errorType *gotypes.Interface
gomegaMatcherType *gotypes.Interface
tbTypes = []*gotypes.Interface{
// In practice, interfaces which mimick testing.TB probably implement
// more than one of these at the same time. But for ImplementsTB
// it's sufficient to have just one method which can be used
// to report a test failure.
tbInterface("Error", false),
tbInterface("Errorf", true),
tbInterface("Fatal", false),
tbInterface("Fatalf", true),
}
)
func init() {
@@ -76,3 +86,32 @@ func ImplementsError(t gotypes.Type) bool {
func ImplementsGomegaMatcher(t gotypes.Type) bool {
return t != nil && gotypes.Implements(t, gomegaMatcherType)
}
// ImplementsTB checks if the argument type implements any of the methods in testing.TB which
// can be used to report test failures. Such a type is a potential alternative to a Gomega
// parameter in some Gomega wrappers.
func ImplementsTB(t gotypes.Type) bool {
for _, tbType := range tbTypes {
if gotypes.Implements(t, tbType) {
return true
}
}
return false
}
// tbInterface generates an interface type with exactly one method
// which has the given name and Printf or Println signature.
func tbInterface(name string, printf bool) *gotypes.Interface {
var params []*gotypes.Var
if printf {
params = append(params, gotypes.NewVar(0, nil, "", gotypes.Typ[gotypes.String]))
}
params = append(params, gotypes.NewVar(0, nil, "", gotypes.NewSlice(gotypes.Universe.Lookup("any").Type())))
signature := gotypes.NewSignatureType(nil, nil, nil,
gotypes.NewTuple(params...),
gotypes.NewTuple(),
true,
)
method := gotypes.NewFunc(0, nil, name, signature)
return gotypes.NewInterfaceType([]*gotypes.Func{method}, nil)
}
+18 -388
View File
@@ -1,405 +1,35 @@
package analyzer
import (
"go/types"
"flag"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
)
const (
rowsName = "Rows"
stmtName = "Stmt"
namedStmtName = "NamedStmt"
closeMethod = "Close"
)
type action uint8
const (
actionUnhandled action = iota
actionHandled
actionReturned
actionPassed
actionClosed
actionUnvaluedCall
actionUnvaluedDefer
actionNoOp
)
var (
sqlPackages = []string{
"database/sql",
"github.com/jmoiron/sqlx",
"github.com/jackc/pgx/v5",
"github.com/jackc/pgx/v5/pgxpool",
}
)
// NewAnalyzer returns a non-configurable analyzer that defaults to the defer-only mode.
// Deprecated, this will be removed in v1.0.0.
func NewAnalyzer() *analysis.Analyzer {
flags := flag.NewFlagSet("analyzer", flag.ExitOnError)
return newAnalyzer(run, flags)
}
func run(pass *analysis.Pass) (interface{}, error) {
opinionatedAnalyzer := &deferOnlyAnalyzer{}
return opinionatedAnalyzer.Run(pass)
}
// newAnalyzer returns a new analyzer with the given run function, should be used by all analyzers.
func newAnalyzer(
r func(pass *analysis.Pass) (interface{}, error),
flags *flag.FlagSet,
) *analysis.Analyzer {
return &analysis.Analyzer{
Name: "sqlclosecheck",
Doc: "Checks that sql.Rows, sql.Stmt, sqlx.NamedStmt, pgx.Query are closed.",
Run: run,
Run: r,
Requires: []*analysis.Analyzer{
buildssa.Analyzer,
},
}
}
func run(pass *analysis.Pass) (interface{}, error) {
pssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
if !ok {
return nil, nil
}
// Build list of types we are looking for
targetTypes := getTargetTypes(pssa, sqlPackages)
// If non of the types are found, skip
if len(targetTypes) == 0 {
return nil, nil
}
funcs := pssa.SrcFuncs
for _, f := range funcs {
for _, b := range f.Blocks {
for i := range b.Instrs {
// Check if instruction is call that returns a target pointer type
targetValues := getTargetTypesValues(b, i, targetTypes)
if len(targetValues) == 0 {
continue
}
// For each found target check if they are closed and deferred
for _, targetValue := range targetValues {
refs := (*targetValue.value).Referrers()
isClosed := checkClosed(refs, targetTypes)
if !isClosed {
pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt/NamedStmt was not closed")
}
checkDeferred(pass, refs, targetTypes, false)
}
}
}
}
return nil, nil
}
func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []any {
targets := []any{}
for _, sqlPkg := range targetPackages {
pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg)
if pkg == nil {
// the SQL package being checked isn't imported
continue
}
rowsPtrType := getTypePointerFromName(pkg, rowsName)
if rowsPtrType != nil {
targets = append(targets, rowsPtrType)
}
rowsType := getTypeFromName(pkg, rowsName)
if rowsType != nil {
targets = append(targets, rowsType)
}
stmtType := getTypePointerFromName(pkg, stmtName)
if stmtType != nil {
targets = append(targets, stmtType)
}
namedStmtType := getTypePointerFromName(pkg, namedStmtName)
if namedStmtType != nil {
targets = append(targets, namedStmtType)
}
}
return targets
}
func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer {
pkgType := pkg.Type(name)
if pkgType == nil {
// this package does not use Rows/Stmt/NamedStmt
return nil
}
obj := pkgType.Object()
named, ok := obj.Type().(*types.Named)
if !ok {
return nil
}
return types.NewPointer(named)
}
func getTypeFromName(pkg *ssa.Package, name string) *types.Named {
pkgType := pkg.Type(name)
if pkgType == nil {
// this package does not use Rows/Stmt
return nil
}
obj := pkgType.Object()
named, ok := obj.Type().(*types.Named)
if !ok {
return nil
}
return named
}
type targetValue struct {
value *ssa.Value
instr ssa.Instruction
}
func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []any) []targetValue {
targetValues := []targetValue{}
instr := b.Instrs[i]
call, ok := instr.(*ssa.Call)
if !ok {
return targetValues
}
signature := call.Call.Signature()
results := signature.Results()
for i := 0; i < results.Len(); i++ {
v := results.At(i)
varType := v.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if !types.Identical(varType, tt) {
continue
}
for _, cRef := range *call.Referrers() {
switch instr := cRef.(type) {
case *ssa.Call:
if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), tt) {
targetValues = append(targetValues, targetValue{
value: &instr.Call.Args[0],
instr: call,
})
}
case ssa.Value:
if types.Identical(instr.Type(), tt) {
targetValues = append(targetValues, targetValue{
value: &instr,
instr: call,
})
}
}
}
}
}
return targetValues
}
func checkClosed(refs *[]ssa.Instruction, targetTypes []any) bool {
numInstrs := len(*refs)
for idx, ref := range *refs {
action := getAction(ref, targetTypes)
switch action {
case actionClosed, actionReturned, actionHandled:
return true
case actionPassed:
// Passed and not used after
if numInstrs == idx+1 {
return true
}
}
}
return false
}
func getAction(instr ssa.Instruction, targetTypes []any) action {
switch instr := instr.(type) {
case *ssa.Defer:
if instr.Call.Value != nil {
name := instr.Call.Value.Name()
if name == closeMethod {
return actionClosed
}
}
if instr.Call.Method != nil {
name := instr.Call.Method.Name()
if name == closeMethod {
return actionClosed
}
}
return actionUnvaluedDefer
case *ssa.Call:
if instr.Call.Value == nil {
return actionUnvaluedCall
}
isTarget := false
staticCallee := instr.Call.StaticCallee()
if staticCallee != nil {
receiver := instr.Call.StaticCallee().Signature.Recv()
if receiver != nil {
isTarget = isTargetType(receiver.Type(), targetTypes)
}
}
name := instr.Call.Value.Name()
if isTarget && name == closeMethod {
return actionClosed
}
if !isTarget {
return actionPassed
}
case *ssa.Phi:
return actionPassed
case *ssa.MakeInterface:
return actionPassed
case *ssa.Store:
// A Row/Stmt is stored in a struct, which may be closed later
// by a different flow.
if _, ok := instr.Addr.(*ssa.FieldAddr); ok {
return actionReturned
}
if len(*instr.Addr.Referrers()) == 0 {
return actionNoOp
}
for _, aRef := range *instr.Addr.Referrers() {
if c, ok := aRef.(*ssa.MakeClosure); ok {
if f, ok := c.Fn.(*ssa.Function); ok {
for _, b := range f.Blocks {
if checkClosed(&b.Instrs, targetTypes) {
return actionHandled
}
}
}
}
}
case *ssa.UnOp:
instrType := instr.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if types.Identical(instrType, tt) {
if checkClosed(instr.Referrers(), targetTypes) {
return actionHandled
}
}
}
case *ssa.FieldAddr:
if checkClosed(instr.Referrers(), targetTypes) {
return actionHandled
}
case *ssa.Return:
return actionReturned
}
return actionUnhandled
}
func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []any, inDefer bool) {
for _, instr := range *instrs {
switch instr := instr.(type) {
case *ssa.Defer:
if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
return
}
if instr.Call.Method != nil && instr.Call.Method.Name() == closeMethod {
return
}
case *ssa.Call:
if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
if !inDefer {
pass.Reportf(instr.Pos(), "Close should use defer")
}
return
}
case *ssa.Store:
if len(*instr.Addr.Referrers()) == 0 {
return
}
for _, aRef := range *instr.Addr.Referrers() {
if c, ok := aRef.(*ssa.MakeClosure); ok {
if f, ok := c.Fn.(*ssa.Function); ok {
for _, b := range f.Blocks {
checkDeferred(pass, &b.Instrs, targetTypes, true)
}
}
}
}
case *ssa.UnOp:
instrType := instr.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if types.Identical(instrType, tt) {
checkDeferred(pass, instr.Referrers(), targetTypes, inDefer)
}
}
case *ssa.FieldAddr:
checkDeferred(pass, instr.Referrers(), targetTypes, inDefer)
}
}
}
func isTargetType(t types.Type, targetTypes []any) bool {
for _, targetType := range targetTypes {
switch tt := targetType.(type) {
case *types.Pointer:
if types.Identical(t, tt) {
return true
}
case *types.Named:
if types.Identical(t, tt) {
return true
}
}
}
return false
}
+25
View File
@@ -0,0 +1,25 @@
package analyzer
import (
"flag"
"golang.org/x/tools/go/analysis"
)
type closedAnalyzer struct{}
func NewClosedAnalyzer() *analysis.Analyzer {
analyzer := &closedAnalyzer{}
flags := flag.NewFlagSet("closedAnalyzer", flag.ExitOnError)
return newAnalyzer(analyzer.Run, flags)
}
// Run implements the main analysis pass
func (a *closedAnalyzer) Run(pass *analysis.Pass) (interface{}, error) {
// pssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
// if !ok {
// return nil, nil
// }
return nil, nil
}
@@ -0,0 +1,40 @@
package analyzer
import (
"flag"
"fmt"
"golang.org/x/tools/go/analysis"
)
type ConfigurableModeType string
const (
ConfigurableAnalyzerDeferOnly ConfigurableModeType = "defer-only"
ConfigurableAnalyzerClosed ConfigurableModeType = "closed"
)
type ConifgurableAnalyzer struct {
Mode string
}
func NewConfigurableAnalyzer(mode ConfigurableModeType) *analysis.Analyzer {
cfgAnalyzer := &ConifgurableAnalyzer{}
flags := flag.NewFlagSet("cfgAnalyzer", flag.ExitOnError)
flags.StringVar(&cfgAnalyzer.Mode, "mode", string(mode),
"Mode to run the analyzer in. (defer-only, closed)")
return newAnalyzer(cfgAnalyzer.run, flags)
}
func (c *ConifgurableAnalyzer) run(pass *analysis.Pass) (interface{}, error) {
switch c.Mode {
case string(ConfigurableAnalyzerDeferOnly):
analyzer := &deferOnlyAnalyzer{}
return analyzer.Run(pass)
case string(ConfigurableAnalyzerClosed):
analyzer := &closedAnalyzer{}
return analyzer.Run(pass)
default:
return nil, fmt.Errorf("invalid mode: %s", c.Mode)
}
}
@@ -0,0 +1,441 @@
package analyzer
import (
"flag"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
)
const (
rowsName = "Rows"
stmtName = "Stmt"
namedStmtName = "NamedStmt"
closeMethod = "Close"
)
type action uint8
const (
actionUnhandled action = iota
actionHandled
actionReturned
actionPassed
actionClosed
actionUnvaluedCall
actionUnvaluedDefer
actionNoOp
)
var (
sqlPackages = []string{
"database/sql",
"github.com/jmoiron/sqlx",
"github.com/jackc/pgx/v5",
"github.com/jackc/pgx/v5/pgxpool",
}
)
type deferOnlyAnalyzer struct{}
func NewDeferOnlyAnalyzer() *analysis.Analyzer {
analyzer := &deferOnlyAnalyzer{}
flags := flag.NewFlagSet("deferOnlyAnalyzer", flag.ExitOnError)
return newAnalyzer(analyzer.Run, flags)
}
// Run implements the main analysis pass
func (a *deferOnlyAnalyzer) Run(pass *analysis.Pass) (interface{}, error) {
pssa, ok := pass.ResultOf[buildssa.Analyzer].(*buildssa.SSA)
if !ok {
return nil, nil
}
// Build list of types we are looking for
targetTypes := getTargetTypes(pssa, sqlPackages)
// If non of the types are found, skip
if len(targetTypes) == 0 {
return nil, nil
}
funcs := pssa.SrcFuncs
for _, f := range funcs {
for _, b := range f.Blocks {
for i := range b.Instrs {
// Check if instruction is call that returns a target pointer type
targetValues := getTargetTypesValues(b, i, targetTypes)
if len(targetValues) == 0 {
continue
}
// For each found target check if they are closed and deferred
for _, targetValue := range targetValues {
refs := (*targetValue.value).Referrers()
isClosed := checkClosed(refs, targetTypes)
if !isClosed {
pass.Reportf((targetValue.instr).Pos(), "Rows/Stmt/NamedStmt was not closed")
}
checkDeferred(pass, refs, targetTypes, false)
}
}
}
}
return nil, nil
}
func getTargetTypes(pssa *buildssa.SSA, targetPackages []string) []any {
targets := []any{}
for _, sqlPkg := range targetPackages {
pkg := pssa.Pkg.Prog.ImportedPackage(sqlPkg)
if pkg == nil {
// the SQL package being checked isn't imported
continue
}
rowsPtrType := getTypePointerFromName(pkg, rowsName)
if rowsPtrType != nil {
targets = append(targets, rowsPtrType)
}
rowsType := getTypeFromName(pkg, rowsName)
if rowsType != nil {
targets = append(targets, rowsType)
}
stmtType := getTypePointerFromName(pkg, stmtName)
if stmtType != nil {
targets = append(targets, stmtType)
}
namedStmtType := getTypePointerFromName(pkg, namedStmtName)
if namedStmtType != nil {
targets = append(targets, namedStmtType)
}
}
return targets
}
func getTypePointerFromName(pkg *ssa.Package, name string) *types.Pointer {
pkgType := pkg.Type(name)
if pkgType == nil {
// this package does not use Rows/Stmt/NamedStmt
return nil
}
obj := pkgType.Object()
named, ok := obj.Type().(*types.Named)
if !ok {
return nil
}
return types.NewPointer(named)
}
func getTypeFromName(pkg *ssa.Package, name string) *types.Named {
pkgType := pkg.Type(name)
if pkgType == nil {
// this package does not use Rows/Stmt
return nil
}
obj := pkgType.Object()
named, ok := obj.Type().(*types.Named)
if !ok {
return nil
}
return named
}
type targetValue struct {
value *ssa.Value
instr ssa.Instruction
}
func getTargetTypesValues(b *ssa.BasicBlock, i int, targetTypes []any) []targetValue {
targetValues := []targetValue{}
instr := b.Instrs[i]
call, ok := instr.(*ssa.Call)
if !ok {
return targetValues
}
signature := call.Call.Signature()
results := signature.Results()
for i := 0; i < results.Len(); i++ {
v := results.At(i)
varType := v.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if !types.Identical(varType, tt) {
continue
}
for _, cRef := range *call.Referrers() {
switch instr := cRef.(type) {
case *ssa.Call:
if len(instr.Call.Args) >= 1 && types.Identical(instr.Call.Args[0].Type(), tt) {
targetValues = append(targetValues, targetValue{
value: &instr.Call.Args[0],
instr: call,
})
}
case ssa.Value:
if types.Identical(instr.Type(), tt) {
targetValues = append(targetValues, targetValue{
value: &instr,
instr: call,
})
}
}
}
}
}
return targetValues
}
func checkClosed(refs *[]ssa.Instruction, targetTypes []any) bool {
numInstrs := len(*refs)
for idx, ref := range *refs {
action := getAction(ref, targetTypes)
switch action {
case actionClosed, actionReturned, actionHandled:
return true
case actionPassed:
// Passed and not used after
if numInstrs == idx+1 {
return true
}
}
}
return false
}
func getAction(instr ssa.Instruction, targetTypes []any) action {
switch instr := instr.(type) {
case *ssa.Defer:
if instr.Call.Value != nil {
name := instr.Call.Value.Name()
if name == closeMethod {
return actionClosed
}
}
if instr.Call.Method != nil {
name := instr.Call.Method.Name()
if name == closeMethod {
return actionClosed
}
} else if instr.Call.Value != nil {
// If it is a deferred function, go further down the call chain
if f, ok := instr.Call.Value.(*ssa.Function); ok {
for _, b := range f.Blocks {
if checkClosed(&b.Instrs, targetTypes) {
return actionHandled
}
}
}
}
return actionUnvaluedDefer
case *ssa.Call:
if instr.Call.Value == nil {
return actionUnvaluedCall
}
isTarget := false
staticCallee := instr.Call.StaticCallee()
if staticCallee != nil {
receiver := instr.Call.StaticCallee().Signature.Recv()
if receiver != nil {
isTarget = isTargetType(receiver.Type(), targetTypes)
}
}
name := instr.Call.Value.Name()
if isTarget && name == closeMethod {
return actionClosed
}
if !isTarget {
return actionPassed
}
case *ssa.Phi:
return actionPassed
case *ssa.MakeInterface:
return actionPassed
case *ssa.Store:
// A Row/Stmt is stored in a struct, which may be closed later
// by a different flow.
if _, ok := instr.Addr.(*ssa.FieldAddr); ok {
return actionReturned
}
if instr.Addr.Referrers() == nil {
return actionNoOp
}
if len(*instr.Addr.Referrers()) == 0 {
return actionNoOp
}
for _, aRef := range *instr.Addr.Referrers() {
if c, ok := aRef.(*ssa.MakeClosure); ok {
if f, ok := c.Fn.(*ssa.Function); ok {
for _, b := range f.Blocks {
if checkClosed(&b.Instrs, targetTypes) {
return actionHandled
}
}
}
}
}
case *ssa.UnOp:
instrType := instr.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if types.Identical(instrType, tt) {
if checkClosed(instr.Referrers(), targetTypes) {
return actionHandled
}
}
}
case *ssa.FieldAddr:
if checkClosed(instr.Referrers(), targetTypes) {
return actionHandled
}
case *ssa.Return:
if len(instr.Results) != 0 {
for _, result := range instr.Results {
resultType := result.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if types.Identical(resultType, tt) {
return actionReturned
}
}
}
}
}
return actionUnhandled
}
func checkDeferred(pass *analysis.Pass, instrs *[]ssa.Instruction, targetTypes []any, inDefer bool) {
for _, instr := range *instrs {
switch instr := instr.(type) {
case *ssa.Defer:
if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
return
}
if instr.Call.Method != nil && instr.Call.Method.Name() == closeMethod {
return
}
case *ssa.Call:
if instr.Call.Value != nil && instr.Call.Value.Name() == closeMethod {
if !inDefer {
pass.Reportf(instr.Pos(), "Close should use defer")
}
return
}
case *ssa.Store:
if instr.Addr.Referrers() == nil {
return
}
if len(*instr.Addr.Referrers()) == 0 {
return
}
for _, aRef := range *instr.Addr.Referrers() {
if c, ok := aRef.(*ssa.MakeClosure); ok {
if f, ok := c.Fn.(*ssa.Function); ok {
for _, b := range f.Blocks {
checkDeferred(pass, &b.Instrs, targetTypes, true)
}
}
}
}
case *ssa.UnOp:
instrType := instr.Type()
for _, targetType := range targetTypes {
var tt types.Type
switch t := targetType.(type) {
case *types.Pointer:
tt = t
case *types.Named:
tt = t
default:
continue
}
if types.Identical(instrType, tt) {
checkDeferred(pass, instr.Referrers(), targetTypes, inDefer)
}
}
case *ssa.FieldAddr:
checkDeferred(pass, instr.Referrers(), targetTypes, inDefer)
}
}
}
func isTargetType(t types.Type, targetTypes []any) bool {
for _, targetType := range targetTypes {
switch tt := targetType.(type) {
case *types.Pointer:
if types.Identical(t, tt) {
return true
}
case *types.Named:
if types.Identical(t, tt) {
return true
}
}
}
return false
}
+2
View File
@@ -7,6 +7,7 @@
*.so
*.swp
/gosec
/gosec-debug
# Folders
_obj
@@ -25,6 +26,7 @@ _cgo_gotypes.go
_cgo_export.*
_testmain.go
coverage.out
*.exe
*.test
+2 -2
View File
@@ -27,12 +27,12 @@ builds:
signs:
- cmd: cosign
signature: "${artifact}.sigstore.json"
stdin: '{{ .Env.COSIGN_PASSWORD}}'
args:
- "sign-blob"
- "--key=/tmp/cosign.key"
- "--output=${signature}"
- "--bundle=${signature}"
- "${artifact}"
- "--yes"
artifacts: all
-81
View File
@@ -1,81 +0,0 @@
# Contributing
## Adding a new rule
New rules can be implemented in two ways:
- as a `gosec.Rule` -- these define an arbitrary function which will be called on every AST node in the analyzed file, and are appropriate for rules that mostly need to reason about a single statement.
- as an Analyzer -- these can operate on the entire program, and receive an [SSA](https://pkg.go.dev/golang.org/x/tools/go/ssa) representation of the package. This type of rule is useful when you need to perform a more complex analysis that requires a great deal of context.
### Adding a gosec.Rule
1. Copy an existing rule file as a starting point-- `./rules/unsafe.go` is a good option, as it implements a very simple rule with no additional supporting logic. Put the copied file in the `./rules/` directory.
2. Change the name of the rule constructor function and of the types in the rule file you've copied so they will be unique.
3. Edit the `Generate` function in `./rules/rulelist.go` to include your rule.
4. Add a RuleID to CWE ID mapping for your rule to the `ruleToCWE` map in `./issue/issue.go`. If you need a CWE that isn't already defined in `./cwe/data.go`, add it to the `idWeaknessess` map in that file.
5. Use `make` to compile `gosec`. The binary will now contain your rule.
To make your rule actually useful, you will likely want to use the support functions defined in `./resolve.go`, `./helpers.go` and `./call_list.go`. There are inline comments explaining the purpose of most of these functions, and you can find usage examples in the existing rule files.
### Adding an Analyzer
1. Create a new go file under `./analyzers/` with the following scaffolding in it:
```go
package analyzers
import (
"fmt"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"github.com/securego/gosec/v2/issue"
)
const defaultIssueDescriptionMyAnalyzer = "My new analyzer!"
func newMyAnalyzer(id string, description string) *analysis.Analyzer {
return &analysis.Analyzer{
Name: id,
Doc: description,
Run: runMyAnalyzer,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
}
func runMyAnalyzer(pass *analysis.Pass) (interface{}, error) {
ssaResult, err := getSSAResult(pass)
if err != nil {
return nil, fmt.Errorf("building ssa representation: %w", err)
}
var issues []*issue.Issue
fmt.Printf("My Analyzer ran! %+v\n", ssaResult)
return issues, nil
}
```
2. Add the analyzer to `./analyzers/analyzerslist.go` in the `defaultAnalyzers` variable under an entry like `{"G999", "My test analyzer", newMyAnalyzer}`
3. Add a RuleID to CWE ID mapping for your rule to the `ruleToCWE` map in `./issue/issue.go`. If you need a CWE that isn't already defined in `./cwe/data.go`, add it to the `idWeaknessess` map in that file.
4. `make`; then run the `gosec` binary produced. You should see the output from our print statement.
5. You now have a working example analyzer to play with-- look at the other implemented analyzers for ideas on how to make useful rules.
## Developing your rule
There are some utility tools which are useful for analyzing the SSA and AST representation `gosec` works with before writing rules or analyzers.
For instance to dump the SSA, the [ssadump](https://pkg.go.dev/golang.org/x/tools/cmd/ssadump) tool can be used as following:
```bash
ssadump -build F main.go
```
Consult the documentation for ssadump for an overview of available output flags and options.
For outputting the AST and supporting information, there is a utility tool in <https://github.com/securego/gosec/blob/master/cmd/gosecutil/tools.go> which can be compiled and used as standalone.
```bash
gosecutil -tool ast main.go
```
Valid tool arguments for this command are `ast`, `callobj`, `uses`, `types`, `defs`, `comments`, and `imports`.
+435
View File
@@ -0,0 +1,435 @@
# Development
## Table of Contents
- [Local workflow](#local-workflow)
- [Contributing: adding rules and analyzers](#contributing-adding-rules-and-analyzers)
- [Add an AST rule](#add-an-ast-rule)
- [Add an SSA analyzer](#add-an-ssa-analyzer)
- [Creating taint analysis rules](#creating-taint-analysis-rules)
- [Steps](#steps)
- [Taint configuration reference](#taint-configuration-reference)
- [Sources](#sources)
- [Sinks](#sinks)
- [Sanitizers](#sanitizers)
- [Common taint sources](#common-taint-sources)
- [AI-generated rule workflow (Copilot)](#ai-generated-rule-workflow-copilot)
- [AI-generated bug fix workflow (Copilot)](#ai-generated-bug-fix-workflow-copilot)
- [AI-supported Go version update workflow (Copilot)](#ai-supported-go-version-update-workflow-copilot)
- [Rule development utilities](#rule-development-utilities)
- [SARIF types generation](#sarif-types-generation)
- [Performance regression guard](#performance-regression-guard)
- [Generate TLS rule data](#generate-tls-rule-data)
- [Release](#release)
- [Docker image](#docker-image)
## Local workflow
- Go version: `1.25+` (see `go.mod`)
- Build: `make`
- Run all checks used in CI (format, vet, security scan, vulnerability scan, tests): `make test`
- Run linter only: `make golangci`
## Contributing: adding rules and analyzers
gosec supports three implementation styles:
- **AST rules** (`gosec.Rule`) for node-level checks in `rules/`
- **SSA analyzers** (`analysis.Analyzer`) for whole-program context in `analyzers/`
- **Taint analyzers** for source-to-sink data-flow checks in `analyzers/` via `taint.NewGosecAnalyzer`
### Add an AST rule
1. Create a new file in `rules/` (for example, use `rules/unsafe.go` as a simple template).
2. Implement your rule constructor and `Match` logic.
3. Register the rule in `rules/rulelist.go`.
4. Add rule-to-CWE mapping in `issue/issue.go` (and add CWE data in `cwe/data.go` only if needed).
5. Add tests and samples:
- sample code in `testutils/`
- rule tests in `rules/` or integration tests in `analyzer_test.go`
### Add an SSA analyzer
1. Create a new file in `analyzers/`.
2. Define the analyzer and require `buildssa.Analyzer`.
3. Read SSA input using `ssautil.GetSSAResult(pass)`.
4. Return findings as `[]*issue.Issue`.
5. Register in `analyzers/analyzerslist.go`.
6. Add rule-to-CWE mapping in `issue/issue.go`.
7. Add tests and sample code in `analyzers/` and `testutils/`.
Minimal skeleton:
```go
package analyzers
import (
"fmt"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"github.com/securego/gosec/v2/internal/ssautil"
"github.com/securego/gosec/v2/issue"
)
func newMyAnalyzer(id, description string) *analysis.Analyzer {
return &analysis.Analyzer{
Name: id,
Doc: description,
Run: runMyAnalyzer,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
}
func runMyAnalyzer(pass *analysis.Pass) (interface{}, error) {
ssaResult, err := ssautil.GetSSAResult(pass)
if err != nil {
return nil, fmt.Errorf("getting SSA result: %w", err)
}
_ = ssaResult
var issues []*issue.Issue
return issues, nil
}
```
### Creating taint analysis rules
gosec taint analyzers track data flow from untrusted sources to dangerous sinks.
Current taint rules include SQL injection, command injection, path traversal, SSRF, XSS, log injection, and SMTP injection.
#### Steps
1. Create a new analyzer file in `analyzers/` (for example `analyzers/newvuln.go`) with both:
- the taint `Config` (sources, sinks, optional sanitizers)
- the analyzer constructor that returns `taint.NewGosecAnalyzer(...)`
```go
package analyzers
import (
"golang.org/x/tools/go/analysis"
"github.com/securego/gosec/v2/taint"
)
func NewVulnerability() taint.Config {
return taint.Config{
Sources: []taint.Source{
{Package: "net/http", Name: "Request", Pointer: true},
{Package: "os", Name: "Args", IsFunc: true},
},
Sinks: []taint.Sink{
{Package: "dangerous/package", Method: "DangerousFunc"},
},
}
}
func newNewVulnAnalyzer(id string, description string) *analysis.Analyzer {
config := NewVulnerability()
rule := NewVulnerabilityRule
rule.ID = id
rule.Description = description
return taint.NewGosecAnalyzer(&rule, &config)
}
```
2. Register the analyzer in `analyzers/analyzerslist.go`:
```go
var defaultAnalyzers = []AnalyzerDefinition{
// ... existing analyzers ...
{"G7XX", "Description of vulnerability", newNewVulnAnalyzer},
}
```
3. Add sample programs in `testutils/g7xx_samples.go`.
4. Add the analyzer test in `analyzers/analyzers_test.go`:
```go
It("should detect your new vulnerability", func() {
runner("G7XX", testutils.SampleCodeG7XX)
})
```
Each taint analyzer keeps its configuration function in the same file as the analyzer.
Reference implementations:
- `analyzers/sqlinjection.go` (G701)
- `analyzers/commandinjection.go` (G702)
- `analyzers/pathtraversal.go` (G703)
#### Taint configuration reference
##### Sources
Sources define where untrusted data starts:
- `Package`: import path (for example `"net/http"`)
- `Name`: type or function name (for example `"Request"`, `"Getenv"`)
- `Pointer`: set `true` for pointer types (for example `*http.Request`)
- `IsFunc`: set `true` when the source is a function that returns tainted data
##### Sinks
Sinks define where tainted data must not reach:
- `Package`
- `Receiver`: method receiver type, empty for package functions
- `Method`
- `Pointer`: whether receiver is a pointer
- `CheckArgs`: optional argument indexes to inspect; if omitted, all args are inspected
Example:
```go
// For *sql.DB.Query, Args[1] is the query string.
{Package: "database/sql", Receiver: "DB", Method: "Query", Pointer: true, CheckArgs: []int{1}}
// Skip writer arg in fmt.Fprintf and check the rest.
{Package: "fmt", Method: "Fprintf", CheckArgs: []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}}
```
##### Sanitizers
Sanitizers break taint flow after validation/escaping:
- `Package`
- `Receiver`
- `Method`
- `Pointer`
If data passes through a configured sanitizer, it is treated as safe for subsequent sinks.
#### Common taint sources
| Source Type | Package | Type/Method | Pointer | IsFunc |
|-------------|---------|-------------|---------|--------|
| HTTP Request | `net/http` | `Request` | `true` | `false` |
| Command Line Args | `os` | `Args` | `false` | `true` |
| Environment Variables | `os` | `Getenv` | `false` | `true` |
| File Content | `bufio` | `Reader` | `true` | `false` |
## AI-generated rule workflow (Copilot)
This repository includes a reusable Copilot skill and prompt for creating new gosec rules from an issue description.
- Skill file: `.github/skills/gosec-new-rule/SKILL.md`
- Prompt file: `.github/prompts/create-gosec-rule.prompt.md`
### Use via `/prompt` (recommended)
1. In VS Code Copilot Chat, run `/prompt` and select **Create Gosec Rule**.
2. Fill in the issue fields (`Summary`, repro steps, versions, environment, expected, actual).
3. Submit the prompt.
4. First response should only propose:
- rule ID
- implementation approach (SSA / taint / AST)
- relevance for Go `1.25` and `1.26`
- confirmation request
5. Reply with explicit confirmation (for example: `Confirmed. Proceed with implementation.`).
### Use the skill directly (without `/prompt`)
Send this in Copilot Chat:
```text
Use the skill "Create New Gosec Rule" from .github/skills/gosec-new-rule/SKILL.md.
```
Then paste the same issue template fields and confirm after the proposal step.
### If `/prompt` does not list the prompt
1. Ensure the workspace root is this repository.
2. Confirm the file exists at `.github/prompts/create-gosec-rule.prompt.md`.
3. Reload VS Code window and start a new chat session.
4. As fallback, open the prompt file and send its content directly in chat.
## AI-generated bug fix workflow (Copilot)
This repository also includes a Copilot skill and prompt for fixing bugs described in GitHub issues.
- Skill file: `.github/skills/gosec-fix-issue/SKILL.md`
- Prompt file: `.github/prompts/fix-gosec-bug-from-issue.prompt.md`
### Use via `/prompt` (recommended)
1. In VS Code Copilot Chat, run `/prompt` and select **Fix Gosec Bug From Issue**.
2. Fill in at least the `GitHub issue URL` field (other fields are optional but useful).
3. Submit the prompt.
4. First response should only include:
- reproduction status on `master` (or clear blocker)
- root cause analysis
- detailed fix plan
- confirmation request
5. Reply with explicit confirmation (for example: `Confirmed. Proceed with fix.`).
### Use the skill directly (without `/prompt`)
Send this in Copilot Chat:
```text
Use the skill "Fix Gosec Bug From Issue" from .github/skills/gosec-fix-issue/SKILL.md.
```
Then provide the GitHub issue URL and confirm after the analysis and plan step.
### Expected implementation guardrails
After confirmation, the workflow should:
- keep the fix small and isolated to the problem
- use idiomatic Go and good design
- add positive and negative tests
- add or update `testutils/` code samples when appropriate for reproducing/validating the issue
- validate with build, tests, `golangci-lint`, and a `gosec` CLI run against a sample
## AI-supported Go version update workflow (Copilot)
This repository includes a Copilot skill and prompt to update supported Go versions to the latest patch versions of the two newest major Go series.
- Skill file: `.github/skills/gosec-update-go-versions/SKILL.md`
- Prompt file: `.github/prompts/update-supported-go-versions.prompt.md`
### Use via `/prompt` (recommended)
1. In VS Code Copilot Chat, run `/prompt` and select **Update Supported Go Versions**.
2. Submit the prompt (no additional fields required).
3. The workflow should:
- read `https://go.dev/doc/devel/release`
- detect latest two supported Go series and latest patch for each
- update all active repository locations where supported Go versions are configured or documented
- run validation checks
- create branch, commit, push, and open a PR
### Use the skill directly (without `/prompt`)
Send this in Copilot Chat:
```text
Use the skill "Update Supported Go Versions" from .github/skills/gosec-update-go-versions/SKILL.md.
```
### Expected outputs
The result should include:
- detected versions (`previous_patch`, `latest_patch`, `previous_minor`, `latest_minor`)
- grouped file update summary
- test command result
- branch, commit SHA, PR title, and PR URL
## Rule development utilities
Use these tools while building or debugging rules:
- Dump SSA with [`ssadump`](https://pkg.go.dev/golang.org/x/tools/cmd/ssadump):
```bash
ssadump -build F main.go
```
- Inspect AST/types/defs/imports with `gosecutil`:
```bash
gosecutil -tool ast main.go
```
Valid `-tool` values: `ast`, `callobj`, `uses`, `types`, `defs`, `comments`, `imports`.
## SARIF types generation
Install `schema-generate`:
```bash
go install github.com/a-h/generate/cmd/schema-generate@latest
```
Generate types:
```bash
schema-generate -i sarif-schema-2.1.0.json -o path/to/types.go
```
Most `MarshalJSON`/`UnmarshalJSON` helpers can be removed after generation, except `PropertyBag` where inlined additional properties are useful.
## Performance regression guard
CI includes a taint benchmark guard based on `BenchmarkTaintPackageAnalyzers_SharedCache`.
- Baseline and thresholds: `.github/benchmarks/taint_benchmark_baseline.env`
- Guard script: `tools/check_taint_benchmark.sh`
Run locally:
```bash
bash tools/check_taint_benchmark.sh
```
Update baseline after intentional changes:
```bash
BENCH_COUNT=10 bash tools/check_taint_benchmark.sh --update-baseline
```
If you update the baseline, commit both the benchmark-related code and the baseline file.
## Generate TLS rule data
The TLS rule data is generated from Mozilla recommendations.
From the repository root:
```bash
go generate ./...
```
If `go generate` fails with `exec: "tlsconfig": executable file not found in $PATH`, install the local generator and add `$(go env GOPATH)/bin` to `PATH`:
```bash
export PATH="$(go env GOPATH)/bin:$PATH"
go install ./cmd/tlsconfig
go generate ./...
```
This updates `rules/tls_config.go`.
If you need to install the generator binary outside this repository:
```bash
go install github.com/securego/gosec/v2/cmd/tlsconfig@latest
```
## Release
Tag and push:
```bash
git tag v1.0.0 -m "Release version v1.0.0"
git push origin v1.0.0
```
The release workflow builds binaries and Docker images, then signs artifacts.
Verify signatures:
```bash
cosign verify --key cosign.pub ghcr.io/securego/gosec:<TAG>
cosign verify-blob --key cosign.pub --signature gosec_<VERSION>_darwin_amd64.tar.gz.sig gosec_<VERSION>_darwin_amd64.tar.gz
```
## Docker image
Build locally:
```bash
make image
```
Run against a local project:
```bash
docker run --rm -it -w /<PROJECT>/ -v <YOUR_PROJECT_PATH>/<PROJECT>:/<PROJECT> ghcr.io/securego/gosec:latest /<PROJECT>/...
```
Set `-w` so module dependencies resolve from the mounted project root.
+10 -4
View File
@@ -16,7 +16,7 @@ GOBIN ?= $(GOPATH)/bin
GOSEC ?= $(GOBIN)/gosec
GO_MINOR_VERSION = $(shell $(GO) version | cut -c 14- | cut -d' ' -f1 | cut -d'.' -f2)
GOVULN_MIN_VERSION = 17
GO_VERSION = 1.25
GO_VERSION = 1.26
LDFLAGS = -ldflags "\
-X 'main.Version=$(shell git describe --tags --always)' \
-X 'main.GitTag=$(shell git describe --tags --abbrev=0)' \
@@ -48,7 +48,7 @@ golangci:
sec:
@echo "SECURITY SCANNING"
./$(BIN) ./...
./$(BIN) -exclude-dir=testdata ./...
govulncheck: install-govulncheck
@echo "CHECKING VULNERABILITIES"
@@ -57,7 +57,7 @@ govulncheck: install-govulncheck
fi
test-coverage:
go test -race -v -count=1 -coverprofile=coverage.out ./...
go test -race -v -count=1 -coverpkg=./... -coverprofile=coverage.out ./...
build:
go build $(LDFLAGS) -o $(BIN) ./cmd/gosec/
@@ -65,9 +65,15 @@ build:
build-race:
go build -race $(LDFLAGS) -o $(BIN) ./cmd/gosec/
build-debug:
go build -tags debug $(LDFLAGS) -o $(BIN)-debug ./cmd/gosec/
build-debug-race:
go build -race -tags debug $(LDFLAGS) -o $(BIN)-debug ./cmd/gosec/
clean:
rm -rf build vendor dist coverage.out
rm -f release image $(BIN)
rm -f release image $(BIN) $(BIN)-debug
release:
@echo "Releasing the gosec binary..."
+176 -172
View File
@@ -5,6 +5,24 @@ Inspects source code for security problems by scanning the Go AST and SSA code r
<img src="https://securego.io/img/gosec.png" width="320">
## Quick links
- [GitHub Action](#github-action)
- [Local installation](#local-installation)
- [Quick start](#quick-start)
- [Common usage patterns](#common-usage-patterns)
- [Selecting rules](#selecting-rules)
- [Output formats](#output-formats)
> ⚠️ Container image migration notice: `gosec` images was migrated from Docker Hub to `ghcr.io/securego/gosec`.
> Starting with release `v2.24.7` the image is no longer published in Docker Hub.
## Features
- **Pattern-based rules** for detecting common security issues in Go code
- **SSA-based analyzers** for type conversions, slice bounds, and crypto issues
- **Taint analysis** for tracking data flow from user input to dangerous functions (SQL injection, command injection, path traversal, SSRF, XSS, log injection)
## License
Licensed under the Apache License, Version 2.0 (the "License").
@@ -20,39 +38,18 @@ You may obtain a copy of the License [here](http://www.apache.org/licenses/LICEN
[![GoDoc](https://pkg.go.dev/badge/github.com/securego/gosec/v2)](https://pkg.go.dev/github.com/securego/gosec/v2)
[![Docs](https://readthedocs.org/projects/docs/badge/?version=latest)](https://securego.io/)
[![Downloads](https://img.shields.io/github/downloads/securego/gosec/total.svg)](https://github.com/securego/gosec/releases)
[![Docker Pulls](https://img.shields.io/docker/pulls/securego/gosec.svg)](https://hub.docker.com/r/securego/gosec/tags)
[![GHCR](https://img.shields.io/badge/ghcr.io-securego%2Fgosec-blue)](https://github.com/orgs/securego/packages/container/package/gosec)
[![Slack](https://img.shields.io/badge/Slack-4A154B?style=for-the-badge&logo=slack&logoColor=white)](http://securego.slack.com)
[![go-recipes](https://raw.githubusercontent.com/nikolaydubina/go-recipes/main/badge.svg?raw=true)](https://github.com/nikolaydubina/go-recipes)
## Install
### CI Installation
```bash
# binary will be $(go env GOPATH)/bin/gosec
curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s -- -b $(go env GOPATH)/bin vX.Y.Z
# or install it into ./bin/
curl -sfL https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z
# In alpine linux (as it does not come with curl by default)
wget -O - -q https://raw.githubusercontent.com/securego/gosec/master/install.sh | sh -s vX.Y.Z
# If you want to use the checksums provided on the "Releases" page
# then you will have to download a tar.gz file for your operating system instead of a binary file
wget https://github.com/securego/gosec/releases/download/vX.Y.Z/gosec_vX.Y.Z_OS.tar.gz
# The file will be in the current folder where you run the command
# and you can check the checksum like this
echo "<check sum from the check sum file> gosec_vX.Y.Z_OS.tar.gz" | sha256sum -c -
gosec --help
```
## Installation
### GitHub Action
You can run `gosec` as a GitHub action as follows:
Use the versioned tag with `@master` which is pinned to the latest stable release. This will provide a stable behavior.
```yaml
name: Run Gosec
on:
@@ -76,6 +73,38 @@ jobs:
args: ./...
```
#### Scanning Projects with Private Modules
If your project imports private Go modules, you need to configure authentication so that `gosec` can fetch the dependencies. Set the following environment variables in your workflow:
- `GOPRIVATE`: A comma-separated list of module path prefixes that should be considered private (e.g., `github.com/your-org/*`).
- `GITHUB_AUTHENTICATION_TOKEN`: A GitHub token with read access to your private repositories.
```yaml
name: Run Gosec
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
tests:
runs-on: ubuntu-latest
env:
GO111MODULE: on
GOPRIVATE: github.com/your-org/*
GITHUB_AUTHENTICATION_TOKEN: ${{ secrets.PRIVATE_REPO_TOKEN }}
steps:
- name: Checkout Source
uses: actions/checkout@v3
- name: Run Gosec Security Scanner
uses: securego/gosec@v2
with:
args: ./...
```
### Integrating with code scanning
You can [integrate third-party code analysis tools](https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/integrating-with-code-scanning) with GitHub code scanning by uploading data as SARIF files.
@@ -101,7 +130,7 @@ jobs:
- name: Checkout Source
uses: actions/checkout@v3
- name: Run Gosec Security Scanner
uses: securego/gosec@master
uses: securego/gosec@v2
with:
# we let the report trigger content trigger a failure using the GitHub Security features.
args: '-no-fail -fmt sarif -out results.sarif ./...'
@@ -112,12 +141,48 @@ jobs:
sarif_file: results.sarif
```
### Go Analysis
The `goanalysis` package provides a [`golang.org/x/tools/go/analysis.Analyzer`](https://pkg.go.dev/golang.org/x/tools/go/analysis) for integration with tools that support the standard Go analysis interface, such as Bazel's [nogo](https://github.com/bazelbuild/rules_go/blob/master/go/nogo.rst) framework:
```starlark
nogo(
name = "nogo",
deps = [
"@com_github_securego_gosec_v2//goanalysis",
# add more analyzers as needed
],
visibility = ["//visibility:public"],
)
```
### Local Installation
gosec requires Go 1.25 or newer.
```bash
go install github.com/securego/gosec/v2/cmd/gosec@latest
```
## Quick start
```bash
# Scan all packages in current module
gosec ./...
# Write JSON report
gosec -fmt json -out results.json ./...
# Write SARIF report for code scanning
gosec -fmt sarif -out results.sarif ./...
```
### Exit codes
- `0`: scan finished without unsuppressed findings/errors
- `1`: at least one unsuppressed finding or processing error
- Use `-no-fail` to always return `0`
## Usage
Gosec can be configured to only run a subset of rules, to exclude certain file
@@ -127,51 +192,21 @@ directory you can supply `./...` as the input argument.
### Available rules
- G101: Look for hard coded credentials
- G102: Bind to all interfaces
- G103: Audit the use of unsafe block
- G104: Audit errors not checked
- G106: Audit the use of ssh.InsecureIgnoreHostKey
- G107: Url provided to HTTP request as taint input
- G108: Profiling endpoint automatically exposed on /debug/pprof
- G109: Potential Integer overflow made by strconv.Atoi result conversion to int16/32
- G110: Potential DoS vulnerability via decompression bomb
- G111: Potential directory traversal
- G112: Potential slowloris attack
- G114: Use of net/http serve function that has no support for setting timeouts
- G115: Potential integer overflow when converting between integer types
- G201: SQL query construction using format string
- G202: SQL query construction using string concatenation
- G203: Use of unescaped data in HTML templates
- G204: Audit use of command execution
- G301: Poor file permissions used when creating a directory
- G302: Poor file permissions used with chmod
- G303: Creating tempfile using a predictable path
- G304: File path provided as taint input
- G305: File traversal when extracting zip/tar archive
- G306: Poor file permissions used when writing to a new file
- G307: Poor file permissions used when creating a file with os.Create
- G401: Detect the usage of MD5 or SHA1
- G402: Look for bad TLS connection settings
- G403: Ensure minimum RSA key length of 2048 bits
- G404: Insecure random number source (rand)
- G405: Detect the usage of DES or RC4
- G406: Detect the usage of MD4 or RIPEMD160
- G407: Detect the usage of hardcoded Initialization Vector(IV)/Nonce
- G501: Import blocklist: crypto/md5
- G502: Import blocklist: crypto/des
- G503: Import blocklist: crypto/rc4
- G504: Import blocklist: net/http/cgi
- G505: Import blocklist: crypto/sha1
- G506: Import blocklist: golang.org/x/crypto/md4
- G507: Import blocklist: golang.org/x/crypto/ripemd160
- G601: Implicit memory aliasing of items from a range statement (only for Go 1.21 or lower)
- G602: Slice access out of bounds
gosec includes rules across these categories:
- `G1xx`: general secure coding issues (for example hardcoded credentials, unsafe usage, HTTP hardening)
- `G2xx`: injection risks in query/template/command construction
- `G3xx`: file and path handling risks (permissions, traversal, temp files, archive extraction)
- `G4xx`: crypto and TLS weaknesses
- `G5xx`: blocklisted imports
- `G6xx`: Go-specific correctness/security checks (for example range aliasing and slice bounds)
- `G7xx`: taint analysis rules (SQL injection, command injection, path traversal, SSRF, XSS, log and SMTP injection)
For the full list, rule descriptions, and per-rule configuration, see [RULES.md](RULES.md).
### Retired rules
- G105: Audit the use of math/big.Int.Exp - [CVE is fixed](https://github.com/golang/go/issues/15184)
- G113: Usage of Rat.SetString in math/big with an overflow (CVE-2022-23772). This affected Go <1.16.14 and Go <1.17.7, which are no longer supported by gosec.
- G307: Deferring a method which returns an error - causing more inconvenience than fixing a security issue, despite the details from this [blog post](https://www.joeshaw.org/dont-defer-close-on-writable-files/)
### Selecting rules
@@ -212,6 +247,45 @@ A number of global settings can be provided in a configuration file as follows:
$ gosec -conf config.json .
```
### Path-Based Rule Exclusions
Large repositories with multiple components may need different security rules
for different paths. Use `exclude-rules` to suppress specific rules for specific
paths.
**Configuration File:**
```json
{
"exclude-rules": [
{
"path": "cmd/.*",
"rules": ["G204", "G304"]
},
{
"path": "scripts/.*",
"rules": ["*"]
}
]
}
```
**CLI Flag:**
```bash
# Exclude G204 and G304 from cmd/ directory
gosec --exclude-rules="cmd/.*:G204,G304" ./...
# Exclude all rules from scripts/ directory
gosec --exclude-rules="scripts/.*:*" ./...
# Multiple exclusions
gosec --exclude-rules="cmd/.*:G204,G304;test/.*:G101" ./...
```
| Field | Type | Description |
|-------|------|-------------|
| `path` | string (regex) | Regular expression matched against file paths |
| `rules` | []string | Rule IDs to exclude. Use `*` to exclude all rules |
#### Rule Configuration
Some rules accept configuration flags as well; these flags are documented in [RULES.md](https://github.com/securego/gosec/blob/master/RULES.md).
@@ -224,8 +298,14 @@ The Go module version is parsed using the `go list` command which in some cases
### Dependencies
gosec will fetch automatically the dependencies of the code which is being analyzed when go module is turned on (e.g.`GO111MODULE=on`). If this is not the case,
the dependencies need to be explicitly downloaded by running the `go get -d` command before the scan.
gosec loads packages using Go modules. In most projects, dependencies are resolved automatically during scanning.
If dependencies are missing, run:
```bash
go mod tidy
go mod download
```
### Excluding test files and folders
@@ -310,7 +390,7 @@ As with all automated detection tools, there will be cases of false positives.
In cases where gosec reports a failure that has been manually verified as being safe,
it is possible to annotate the code with a comment that starts with `#nosec`.
The `#nosec` comment should have the format `#nosec [RuleList] [-- Justification]`.
The `#nosec` comment should have the format `#nosec [RuleList] [- Justification]`.
The `#nosec` comment needs to be placed on the line where the warning is reported.
@@ -356,18 +436,16 @@ gosec -nosec=true ./...
### Tracking suppressions
As described above, we could suppress violations externally (using `-include`/
`-exclude`) or inline (using `#nosec` annotations) in gosec. This suppression
inflammation can be used to generate corresponding signals for auditing
purposes.
`-exclude`) or inline (using `#nosec` annotations). Suppression metadata can be emitted for auditing.
We could track suppressions by the `-track-suppressions` flag as follows:
Enable suppression tracking with `-track-suppressions`:
```bash
gosec -track-suppressions -exclude=G101 -fmt=sarif -out=results.sarif ./...
```
- For external suppressions, gosec records suppression info where `kind` is
`external` and `justification` is a certain sentence "Globally suppressed".
`external` and `justification` is `Globally suppressed.`.
- For inline suppressions, gosec records suppression info where `kind` is
`inSource` and `justification` is the text after two or more dashes in the
comment.
@@ -385,7 +463,7 @@ gosec -tags debug,ignore ./...
### Output formats
gosec currently supports `text`, `json`, `yaml`, `csv`, `sonarqube`, `JUnit XML`, `html` and `golint` output formats. By default
gosec supports `text`, `json`, `yaml`, `csv`, `junit-xml`, `html`, `sonarqube`, `golint`, and `sarif`. By default,
results will be reported to stdout, but can also be written to an output
file. The output format is controlled by the `-fmt` flag, and the output file is controlled by the `-out` flag as follows:
@@ -394,8 +472,8 @@ file. The output format is controlled by the `-fmt` flag, and the output file is
$ gosec -fmt=json -out=results.json *.go
```
Results will be reported to stdout as well as to the provided output file by `-stdout` flag. The `-verbose` flag overrides the
output format when stdout the results while saving them in the output file
Use `-stdout` to print results while also writing `-out`.
Use `-verbose` to override stdout format while preserving the file format.
```bash
# Write output in json format to results.json as well as stdout
$ gosec -fmt=json -out=results.json -stdout *.go
@@ -406,102 +484,28 @@ $ gosec -fmt=json -out=results.json -stdout -verbose=text *.go
**Note:** gosec generates the [generic issue import format](https://docs.sonarqube.org/latest/analysis/generic-issue/) for SonarQube, and a report has to be imported into SonarQube using `sonar.externalIssuesReportPaths=path/to/gosec-report.json`.
## Common usage patterns
```bash
# Fail only on medium+ severity findings
gosec -severity medium ./...
# Fail only on medium+ confidence findings
gosec -confidence medium ./...
# Exclude specific rules for specific paths
gosec --exclude-rules="cmd/.*:G204,G304;scripts/.*:*" ./...
# Exclude generated files in scan
gosec -exclude-generated ./...
# Include test files in scan
gosec -tests ./...
```
## Development
[CONTRIBUTING.md](https://github.com/securego/gosec/blob/master/CONTRIBUTING.md) contains detailed information about adding new rules to gosec.
### Build
You can build the binary with:
```bash
make
```
### Note on Sarif Types Generation
Install the tool with :
```bash
go get -u github.com/a-h/generate/cmd/schema-generate
```
Then generate the types with :
```bash
schema-generate -i sarif-schema-2.1.0.json -o mypath/types.go
```
Most of the MarshallJSON/UnmarshalJSON are removed except the one for PropertyBag which is handy to inline the additional properties. The rest can be removed.
The URI,ID, UUID, GUID were renamed so it fits the Go convention defined [here](https://github.com/golang/lint/blob/master/lint.go#L700)
### Tests
You can run all unit tests using:
```bash
make test
```
### Release
You can create a release by tagging the version as follows:
``` bash
git tag v1.0.0 -m "Release version v1.0.0"
git push origin v1.0.0
```
The GitHub [release workflow](.github/workflows/release.yml) triggers immediately after the tag is pushed upstream. This flow will
release the binaries using the [goreleaser](https://goreleaser.com/actions/) action and then it will build and publish the docker image into Docker Hub.
The released artifacts are signed using [cosign](https://docs.sigstore.dev/). You can use the public key from [cosign.pub](cosign.pub)
file to verify the signature of docker image and binaries files.
The docker image signature can be verified with the following command:
```
cosign verify --key cosign.pub securego/gosec:<TAG>
```
The binary files signature can be verified with the following command:
```
cosign verify-blob --key cosign.pub --signature gosec_<VERSION>_darwin_amd64.tar.gz.sig gosec_<VERSION>_darwin_amd64.tar.gz
```
### Docker image
You can also build locally the docker image by using the command:
```bash
make image
```
You can run the `gosec` tool in a container against your local Go project. You only have to mount the project
into a volume as follows:
```bash
docker run --rm -it -w /<PROJECT>/ -v <YOUR PROJECT PATH>/<PROJECT>:/<PROJECT> securego/gosec /<PROJECT>/...
```
**Note:** the current working directory needs to be set with `-w` option in order to get successfully resolved the dependencies from go module file
### Generate TLS rule
The configuration of TLS rule can be generated from [Mozilla's TLS ciphers recommendation](https://statics.tls.security.mozilla.org/server-side-tls-conf.json).
First you need to install the generator tool:
```bash
go get github.com/securego/gosec/v2/cmd/tlsconfig/...
```
You can invoke now the `go generate` in the root of the project:
```bash
go generate ./...
```
This will generate the `rules/tls_config.go` file which will contain the current ciphers recommendation from Mozilla.
Development documentation was moved to [DEVELOPMENT.md](DEVELOPMENT.md).
## Who is using gosec?
+151 -28
View File
@@ -1,61 +1,184 @@
# Rule Documentation
## Rules accepting parameters
## Table of Contents
As [README.md](https://github.com/securego/gosec/blob/master/README.md) mentions, some rules can be configured by adding parameters to the gosec JSON config. Per rule configs are encoded as top level objects in the gosec config, with the rule ID (`Gxxx`) as the key.
- [Rules List](#rules-list)
- [G1xx: General Secure Coding](#g1xx-general-secure-coding)
- [G2xx: Injection Patterns](#g2xx-injection-patterns)
- [G3xx: Filesystem and Permissions](#g3xx-filesystem-and-permissions)
- [G4xx: Crypto and Protocol security](#g4xx-crypto-and-protocol-security)
- [G5xx: Import Blocklist](#g5xx-import-blocklist)
- [G6xx: Language/Runtime safety](#g6xx-languageruntime-safety)
- [G7xx: Taint Analysis](#g7xx-taint-analysis)
- [Retired and reassigned IDs](#retired-and-reassigned-ids)
- [Rules configuration](#rules-configuration)
- [G101](#g101)
- [G104](#g104)
- [G111](#g111)
- [G117](#g117)
- [G301, G302, G306, G307](#g301-g302-g306-g307)
Currently, the following rules accept parameters. This list is manually maintained; if you notice an omission please add it!
## Rules List
### G1xx: General Secure Coding
- [G101](#g101) — Look for hardcoded credentials (**AST**)
- G102 — Bind to all interfaces (**AST**)
- G103 — Audit the use of unsafe block (**AST**)
- [G104](#g104) — Audit errors not checked (**AST**)
- G106 — Audit the use of `ssh.InsecureIgnoreHostKey` function (**AST**)
- G107 — URL provided to HTTP request as taint input (**AST**)
- G108 — Profiling endpoint is automatically exposed (**AST**)
- G109 — Converting `strconv.Atoi` result to `int32/int16` (**AST**)
- G110 — Detect `io.Copy` instead of `io.CopyN` when decompressing (**AST**)
- [G111](#g111) — Detect `http.Dir('/')` as a potential risk (**AST**)
- G112 — Detect `ReadHeaderTimeout` not configured as a potential risk (**AST**)
- G113 — HTTP request smuggling via conflicting headers or bare LF in body parsing (**SSA**)
- G114 — Use of `net/http` serve function that has no support for setting timeouts (**AST**)
- G115 — Type conversion which leads to integer overflow (**SSA**)
- G116 — Detect Trojan Source attacks using bidirectional Unicode characters (**AST**)
- [G117](#g117) — Potential exposure of secrets via JSON/YAML/XML/TOML marshaling (**AST**)
- G118 — Context propagation failure leading to goroutine/resource leaks (**SSA**)
- G119 — Unsafe redirect policy may propagate sensitive headers (**SSA**)
- G120 — Unbounded form parsing in HTTP handlers can cause memory exhaustion (**SSA**)
- G121 — Unsafe CrossOriginProtection bypass patterns (**SSA**)
- G122 — Filesystem TOCTOU race risk in `filepath.Walk/WalkDir` callbacks (**SSA**)
- G123 — TLS resumption may bypass `VerifyPeerCertificate` when `VerifyConnection` is unset (**SSA**)
### G2xx: Injection Patterns
- G201 — SQL query construction using format string (**AST**)
- G202 — SQL query construction using string concatenation (**AST**)
- G203 — Use of unescaped data in HTML templates (**AST**)
- G204 — Audit use of command execution (**AST**)
### G3xx: Filesystem and Permissions
- [G301](#g301-g302-g306-g307) — Poor file permissions used when creating a directory (**AST**)
- [G302](#g301-g302-g306-g307) — Poor file permissions used when creating file or using `chmod` (**AST**)
- G303 — Creating tempfile using a predictable path (**AST**)
- G304 — File path provided as taint input (**AST**)
- G305 — File path traversal when extracting zip archive (**AST**)
- [G306](#g301-g302-g306-g307) — Poor file permissions used when writing to a file (**AST**)
- [G307](#g301-g302-g306-g307) — Poor file permissions used when creating a file with `os.Create` (**AST**)
### G4xx: Crypto and Protocol security
- G401 — Detect the usage of MD5 or SHA1 (**AST**)
- G402 — Look for bad TLS connection settings (**AST**)
- G403 — Ensure minimum RSA key length of 2048 bits (**AST**)
- G404 — Insecure random number source (`rand`) (**AST**)
- G405 — Detect the usage of DES or RC4 (**AST**)
- G406 — Detect the usage of deprecated MD4 or RIPEMD160 (**AST**)
- G407 — Use of hardcoded IV/nonce for encryption (**SSA**)
- G408 — Stateful misuse of `ssh.PublicKeyCallback` leading to auth bypass (**SSA**)
### G5xx: Import Blocklist
- G501 — Import blocklist: `crypto/md5` (**AST**)
- G502 — Import blocklist: `crypto/des` (**AST**)
- G503 — Import blocklist: `crypto/rc4` (**AST**)
- G504 — Import blocklist: `net/http/cgi` (**AST**)
- G505 — Import blocklist: `crypto/sha1` (**AST**)
- G506 — Import blocklist: `golang.org/x/crypto/md4` (**AST**)
- G507 — Import blocklist: `golang.org/x/crypto/ripemd160` (**AST**)
### G6xx: Language/Runtime safety
- G601 — Implicit memory aliasing in `RangeStmt` (Go 1.21 or lower) (**AST**)
- G602 — Possible slice bounds out of range (**SSA**)
### G7xx: Taint Analysis
- G701 — SQL injection via taint analysis (**Taint**)
- G702 — Command injection via taint analysis (**Taint**)
- G703 — Path traversal via taint analysis (**Taint**)
- G704 — SSRF via taint analysis (**Taint**)
- G705 — XSS via taint analysis (**Taint**)
- G706 — Log injection via taint analysis (**Taint**)
- G707 — SMTP command/header injection via taint analysis (**Taint**)
_Note: Implementation types used in this document:_
- **AST**: rule implemented in `rules/` and evaluated on AST patterns
- **SSA**: analyzer implemented in `analyzers/` using the analyzer framework (SSA-backed execution path)
- **Taint**: taint analysis rule implemented via `taint.NewGosecAnalyzer`
### Retired and reassigned IDs
- G105 is retired.
- G307 (old meaning: deferred method error handling) is retired; the ID now refers to file creation permissions.
- G113 was previously used for a retired `math/big` check and is now used for HTTP request smuggling.
## Rules configuration
Some rules accept configuration in the gosec JSON config file.
Per-rule settings are top-level objects keyed by rule ID (`Gxxx`).
Configurable rules (alphabetical): [G101](#g101), [G104](#g104), [G111](#g111), [G117](#g117), [G301](#g301-g302-g306-g307), [G302](#g301-g302-g306-g307), [G306](#g301-g302-g306-g307), [G307](#g301-g302-g306-g307).
### G101
The hard-coded credentials rule `G101` can be configured with additional patterns, and the entropy threshold can be adjusted:
`G101` (hardcoded credentials) can be configured with custom patterns and entropy thresholds:
```JSON
```json
{
"G101": {
"pattern": "(?i)passwd|pass|password|pwd|secret|private_key|token",
"ignore_entropy": false,
"entropy_threshold": "80.0",
"per_char_threshold": "3.0",
"truncate": "32"
}
"G101": {
"pattern": "(?i)passwd|pass|password|pwd|secret|private_key|token",
"ignore_entropy": false,
"entropy_threshold": "80.0",
"per_char_threshold": "3.0",
"truncate": "32",
"min_entropy_length": "8"
}
}
```
### G104
The unchecked error value rule `G104` can be configured with additional functions that should be permitted to be called without checking errors.
`G104` (unchecked errors) can be configured with function allowlists:
```JSON
```json
{
"G104": {
"ioutil": ["WriteFile"]
}
"G104": {
"ioutil": ["WriteFile"]
}
}
```
### G111
The HTTP Directory serving rule `G111` can be configured with a different regex for detecting potentially overly permissive servers. Note that this *replaces* the default pattern of `http\.Dir\("\/"\)|http\.Dir\('\/'\)`.
`G111` (HTTP directory serving) can be configured with a custom detection regex.
This replaces the default pattern.
```JSON
```json
{
"G111": {
"pattern": "http\\.Dir\\(\"\\\/\"\\)|http\\.Dir\\('\\\/'\\)"
}
"G111": {
"pattern": "http\\.Dir\\(\"\\/\"\\)|http\\.Dir\\('\\/'\\)"
}
}
```
### G117
`G117` (secret serialization) can be configured with a custom field-name pattern.
```json
{
"G117": {
"pattern": "(?i)secret|token|password"
}
}
```
### G301, G302, G306, G307
The various file and directory permission checking rules can be configured with a different maximum allowable file permission.
File and directory permission rules can be configured with stricter maximum permissions:
```JSON
```json
{
"G301":"0o600",
"G302":"0o600",
"G306":"0o750",
"G307":"0o750"
"G301": "0o600",
"G302": "0o600",
"G306": "0o750",
"G307": "0o750"
}
```
+1 -1
View File
@@ -10,7 +10,7 @@ inputs:
runs:
using: "docker"
image: "docker://securego/gosec:2.22.10"
image: "docker://ghcr.io/securego/gosec:2.24.7"
args:
- ${{ inputs.args }}
+523 -241
View File
File diff suppressed because it is too large Load Diff
+90
View File
@@ -16,6 +16,8 @@ package analyzers
import (
"golang.org/x/tools/go/analysis"
"github.com/securego/gosec/v2/taint"
)
// AnalyzerDefinition contains the description of an analyzer and a mechanism to
@@ -29,6 +31,58 @@ type AnalyzerDefinition struct {
// AnalyzerBuilder is used to register an analyzer definition with the analyzer
type AnalyzerBuilder func(id string, description string) *analysis.Analyzer
// Taint analysis rule definitions
var (
SQLInjectionRule = taint.RuleInfo{
ID: "G701",
Description: "SQL injection via string concatenation",
Severity: "HIGH",
CWE: "CWE-89",
}
CommandInjectionRule = taint.RuleInfo{
ID: "G702",
Description: "Command injection via user input",
Severity: "CRITICAL",
CWE: "CWE-78",
}
PathTraversalRule = taint.RuleInfo{
ID: "G703",
Description: "Path traversal via user input",
Severity: "HIGH",
CWE: "CWE-22",
}
SSRFRule = taint.RuleInfo{
ID: "G704",
Description: "SSRF via user-controlled URL",
Severity: "HIGH",
CWE: "CWE-918",
}
XSSRule = taint.RuleInfo{
ID: "G705",
Description: "XSS via unescaped user input",
Severity: "MEDIUM",
CWE: "CWE-79",
}
LogInjectionRule = taint.RuleInfo{
ID: "G706",
Description: "Log injection via user input",
Severity: "LOW",
CWE: "CWE-117",
}
SMTPInjectionRule = taint.RuleInfo{
ID: "G707",
Description: "SMTP command/header injection via user input",
Severity: "HIGH",
CWE: "CWE-93",
}
)
// AnalyzerList contains a mapping of analyzer ID's to analyzer definitions and a mapping
// of analyzer ID's to whether analyzers are suppressed.
type AnalyzerList struct {
@@ -66,9 +120,24 @@ func NewAnalyzerFilter(action bool, analyzerIDs ...string) AnalyzerFilter {
}
var defaultAnalyzers = []AnalyzerDefinition{
{"G113", "HTTP request smuggling via conflicting headers or bare LF in body parsing", newRequestSmugglingAnalyzer},
{"G115", "Type conversion which leads to integer overflow", newConversionOverflowAnalyzer},
{"G118", "Context propagation failure leading to goroutine/resource leaks", newContextPropagationAnalyzer},
{"G119", "Unsafe redirect policy may propagate sensitive headers", newRedirectHeaderPropagationAnalyzer},
{"G120", "Unbounded form parsing in HTTP handlers can cause memory exhaustion", newFormParsingLimitAnalyzer},
{"G121", "Unsafe CrossOriginProtection bypass patterns", newCORSBypassPatternAnalyzer},
{"G122", "Filesystem TOCTOU race risk in filepath.Walk/WalkDir callbacks", newWalkSymlinkRaceAnalyzer},
{"G123", "TLS resumption may bypass VerifyPeerCertificate when VerifyConnection is unset", newTLSResumptionVerifyPeerAnalyzer},
{"G602", "Possible slice bounds out of range", newSliceBoundsAnalyzer},
{"G407", "Use of hardcoded IV/nonce for encryption", newHardCodedNonce},
{"G408", "Stateful misuse of ssh.PublicKeyCallback leading to auth bypass", newSSHCallbackAnalyzer},
{"G701", "SQL injection via taint analysis", newSQLInjectionAnalyzer},
{"G702", "Command injection via taint analysis", newCommandInjectionAnalyzer},
{"G703", "Path traversal via taint analysis", newPathTraversalAnalyzer},
{"G704", "SSRF via taint analysis", newSSRFAnalyzer},
{"G705", "XSS via taint analysis", newXSSAnalyzer},
{"G706", "Log injection via taint analysis", newLogInjectionAnalyzer},
{"G707", "SMTP command/header injection via taint analysis", newSMTPInjectionAnalyzer},
}
// Generate the list of analyzers to use
@@ -93,3 +162,24 @@ func Generate(trackSuppressions bool, filters ...AnalyzerFilter) *AnalyzerList {
}
return &AnalyzerList{Analyzers: analyzerMap, AnalyzerSuppressed: analyzerSuppressedMap}
}
// DefaultTaintAnalyzers returns all predefined taint analysis analyzers.
func DefaultTaintAnalyzers() []*analysis.Analyzer {
sqlConfig := SQLInjection()
cmdConfig := CommandInjection()
pathConfig := PathTraversal()
ssrfConfig := SSRF()
xssConfig := XSS()
logConfig := LogInjection()
smtpConfig := SMTPInjection()
return []*analysis.Analyzer{
taint.NewGosecAnalyzer(&SQLInjectionRule, &sqlConfig),
taint.NewGosecAnalyzer(&CommandInjectionRule, &cmdConfig),
taint.NewGosecAnalyzer(&PathTraversalRule, &pathConfig),
taint.NewGosecAnalyzer(&SSRFRule, &ssrfConfig),
taint.NewGosecAnalyzer(&XSSRule, &xssConfig),
taint.NewGosecAnalyzer(&LogInjectionRule, &logConfig),
taint.NewGosecAnalyzer(&SMTPInjectionRule, &smtpConfig),
}
}
@@ -0,0 +1,60 @@
// (c) Copyright gosec's authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analyzers
import (
"golang.org/x/tools/go/analysis"
"github.com/securego/gosec/v2/taint"
)
// CommandInjection returns a configuration for detecting command injection vulnerabilities.
func CommandInjection() taint.Config {
return taint.Config{
Sources: []taint.Source{
// Type sources: tainted when received as parameters
{Package: "net/http", Name: "Request", Pointer: true},
{Package: "bufio", Name: "Reader", Pointer: true},
{Package: "bufio", Name: "Scanner", Pointer: true},
// Function sources
{Package: "os", Name: "Args", IsFunc: true},
{Package: "os", Name: "Getenv", IsFunc: true},
},
Sinks: []taint.Sink{
// Detect at command creation, not execution (avoids double detection)
{Package: "os/exec", Method: "Command"},
{Package: "os/exec", Method: "CommandContext"},
{Package: "os", Method: "StartProcess"},
{Package: "syscall", Method: "Exec"},
{Package: "syscall", Method: "ForkExec"},
{Package: "syscall", Method: "StartProcess"},
},
Sanitizers: []taint.Sanitizer{
// No general-purpose stdlib sanitizer for command injection.
// The proper fix is to use exec.Command with separate args, not shell strings.
},
}
}
// newCommandInjectionAnalyzer creates an analyzer for detecting command injection vulnerabilities
// via taint analysis (G702)
func newCommandInjectionAnalyzer(id string, description string) *analysis.Analyzer {
config := CommandInjection()
rule := CommandInjectionRule
rule.ID = id
rule.Description = description
return taint.NewGosecAnalyzer(&rule, &config)
}
@@ -0,0 +1,973 @@
// (c) Copyright gosec's authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package analyzers
import (
"go/token"
"go/types"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
"github.com/securego/gosec/v2/internal/ssautil"
"github.com/securego/gosec/v2/issue"
)
const (
contextPkgPath = "context"
httpPkgPath = "net/http"
msgContextBackground = "Goroutine uses context.Background/TODO while request-scoped context is available"
msgLostCancel = "context cancellation function returned by WithCancel/WithTimeout/WithDeadline is not called"
msgLoopWithoutDone = "Long-running loop performs calls without a ctx.Done() cancellation guard"
)
func newContextPropagationAnalyzer(id string, description string) *analysis.Analyzer {
return &analysis.Analyzer{
Name: id,
Doc: description,
Run: runContextPropagationAnalysis,
Requires: []*analysis.Analyzer{buildssa.Analyzer},
}
}
type contextPropagationState struct {
*BaseAnalyzerState
ssaFuncs []*ssa.Function
issues map[token.Pos]*issue.Issue
}
func newContextPropagationState(pass *analysis.Pass, funcs []*ssa.Function) *contextPropagationState {
return &contextPropagationState{
BaseAnalyzerState: NewBaseState(pass),
ssaFuncs: funcs,
issues: make(map[token.Pos]*issue.Issue),
}
}
func (s *contextPropagationState) addIssue(pos token.Pos, what string, severity issue.Score, confidence issue.Score) {
if pos == token.NoPos {
return
}
if _, found := s.issues[pos]; found {
return
}
s.issues[pos] = newIssue(s.Pass.Analyzer.Name, what, s.Pass.Fset, pos, severity, confidence)
}
func runContextPropagationAnalysis(pass *analysis.Pass) (any, error) {
ssaResult, err := ssautil.GetSSAResult(pass)
if err != nil {
return nil, err
}
state := newContextPropagationState(pass, ssaResult.SSA.SrcFuncs)
defer state.Release()
for _, fn := range state.ssaFuncs {
if fn == nil || len(fn.Blocks) == 0 {
continue
}
hasRequestContext := functionHasRequestContext(fn)
ctxValues := collectContextValues(fn)
if hasRequestContext {
state.detectUnsafeGoroutines(fn, ctxValues)
state.detectLoopsWithoutCancellationGuard(fn, ctxValues)
}
state.detectLostCancel(fn)
}
if len(state.issues) == 0 {
return nil, nil
}
issues := make([]*issue.Issue, 0, len(state.issues))
for _, i := range state.issues {
issues = append(issues, i)
}
return issues, nil
}
func functionHasRequestContext(fn *ssa.Function) bool {
if fn.Signature == nil {
return false
}
params := fn.Signature.Params()
for i := 0; i < params.Len(); i++ {
p := params.At(i)
if p == nil {
continue
}
if isContextType(p.Type()) {
return true
}
if isHTTPRequestPointerType(p.Type()) {
return true
}
}
return false
}
func collectContextValues(fn *ssa.Function) map[ssa.Value]struct{} {
ctxVals := make(map[ssa.Value]struct{})
for _, param := range fn.Params {
if param == nil {
continue
}
if isContextType(param.Type()) {
ctxVals[param] = struct{}{}
}
}
for _, block := range fn.Blocks {
for _, instr := range block.Instrs {
callInstr, ok := instr.(ssa.CallInstruction)
if !ok {
continue
}
common := callInstr.Common()
if common == nil {
continue
}
if isHTTPRequestContextCall(common) {
if val := callInstr.Value(); val != nil {
ctxVals[val] = struct{}{}
}
continue
}
if !isContextWithFamily(common) {
continue
}
tuple := callInstr.Value()
for _, ref := range safeReferrers(tuple) {
extract, ok := ref.(*ssa.Extract)
if !ok {
continue
}
if extract.Index == 0 {
ctxVals[extract] = struct{}{}
}
}
}
}
return ctxVals
}
func (s *contextPropagationState) detectUnsafeGoroutines(fn *ssa.Function, contextValues map[ssa.Value]struct{}) {
for _, block := range fn.Blocks {
for _, instr := range block.Instrs {
goInstr, ok := instr.(*ssa.Go)
if !ok {
continue
}
hasBackgroundCtx := false
for _, arg := range goInstr.Call.Args {
if isBackgroundOrTodoValue(arg) {
hasBackgroundCtx = true
break
}
}
if !hasBackgroundCtx {
for _, callee := range resolveGoCallTargets(goInstr) {
if callee == nil {
continue
}
if functionCallsBackground(callee) {
hasBackgroundCtx = true
break
}
}
}
if hasBackgroundCtx && len(contextValues) > 0 {
s.addIssue(goInstr.Pos(), msgContextBackground, issue.High, issue.Medium)
}
}
}
}
func (s *contextPropagationState) detectLostCancel(fn *ssa.Function) {
for _, block := range fn.Blocks {
for _, instr := range block.Instrs {
callInstr, ok := instr.(ssa.CallInstruction)
if !ok {
continue
}
common := callInstr.Common()
if common == nil || !isContextWithFamily(common) {
continue
}
tupleCall := callInstr.Value()
if tupleCall == nil {
continue
}
cancelValue := findCancelResult(tupleCall)
if cancelValue == nil {
continue
}
if !isCancelCalled(cancelValue, s.ssaFuncs) {
s.addIssue(instr.Pos(), msgLostCancel, issue.Medium, issue.High)
}
}
}
}
func (s *contextPropagationState) detectLoopsWithoutCancellationGuard(fn *ssa.Function, contextValues map[ssa.Value]struct{}) {
if len(contextValues) == 0 {
return
}
if len(fn.Blocks) == 0 {
return
}
features := make(map[*ssa.BasicBlock]blockFeatures, len(fn.Blocks))
for _, block := range fn.Blocks {
if block == nil {
continue
}
features[block] = analyzeBlockFeatures(block)
}
regions := findLoopRegions(fn)
for _, region := range regions {
if region.hasExternalExit {
continue
}
hasDoneGuard := false
hasBlocking := false
for _, block := range region.blocks {
feature := features[block]
if feature.hasDoneGuard {
hasDoneGuard = true
}
if feature.hasBlocking {
hasBlocking = true
}
if hasDoneGuard && hasBlocking {
break
}
}
if hasDoneGuard || !hasBlocking {
continue
}
s.addIssue(region.pos, msgLoopWithoutDone, issue.High, issue.Low)
}
}
type blockFeatures struct {
hasDoneGuard bool
hasBlocking bool
}
func analyzeBlockFeatures(block *ssa.BasicBlock) blockFeatures {
features := blockFeatures{}
for _, instr := range block.Instrs {
callInstr, ok := instr.(ssa.CallInstruction)
if !ok {
switch i := instr.(type) {
case *ssa.Go:
features.hasBlocking = true
case *ssa.Call:
if looksLikeBlockingCall(i.Common()) {
features.hasBlocking = true
}
case *ssa.Defer:
if looksLikeBlockingCall(i.Common()) {
features.hasBlocking = true
}
}
continue
}
common := callInstr.Common()
if common == nil {
continue
}
if isContextDoneCall(common) {
features.hasDoneGuard = true
}
if looksLikeBlockingCall(common) {
features.hasBlocking = true
}
}
return features
}
type loopRegion struct {
blocks []*ssa.BasicBlock
hasExternalExit bool
pos token.Pos
}
func findLoopRegions(fn *ssa.Function) []loopRegion {
if fn == nil || len(fn.Blocks) == 0 {
return nil
}
var regions []loopRegion
index := 0
stack := make([]*ssa.BasicBlock, 0, len(fn.Blocks))
onStack := make(map[*ssa.BasicBlock]bool, len(fn.Blocks))
indexMap := make(map[*ssa.BasicBlock]int, len(fn.Blocks))
lowLink := make(map[*ssa.BasicBlock]int, len(fn.Blocks))
var strongConnect func(v *ssa.BasicBlock)
strongConnect = func(v *ssa.BasicBlock) {
indexMap[v] = index
lowLink[v] = index
index++
stack = append(stack, v)
onStack[v] = true
for _, w := range v.Succs {
if w == nil {
continue
}
if _, seen := indexMap[w]; !seen {
strongConnect(w)
if lowLink[w] < lowLink[v] {
lowLink[v] = lowLink[w]
}
} else if onStack[w] {
if indexMap[w] < lowLink[v] {
lowLink[v] = indexMap[w]
}
}
}
if lowLink[v] != indexMap[v] {
return
}
scc := make([]*ssa.BasicBlock, 0, 4)
sccSet := make(map[*ssa.BasicBlock]bool, 4)
for {
n := stack[len(stack)-1]
stack = stack[:len(stack)-1]
onStack[n] = false
scc = append(scc, n)
sccSet[n] = true
if n == v {
break
}
}
if !isLoopSCC(scc, sccSet) {
return
}
hasExternalExit := false
pos := token.NoPos
for _, b := range scc {
if pos == token.NoPos && len(b.Instrs) > 0 {
pos = b.Instrs[0].Pos()
}
for _, succ := range b.Succs {
if succ == nil {
continue
}
if !sccSet[succ] {
hasExternalExit = true
break
}
}
if hasExternalExit {
break
}
}
if pos == token.NoPos {
for _, instr := range v.Instrs {
if instr.Pos() != token.NoPos {
pos = instr.Pos()
break
}
}
}
regions = append(regions, loopRegion{
blocks: scc,
hasExternalExit: hasExternalExit,
pos: pos,
})
}
for _, block := range fn.Blocks {
if block == nil {
continue
}
if _, seen := indexMap[block]; seen {
continue
}
strongConnect(block)
}
return regions
}
func isLoopSCC(scc []*ssa.BasicBlock, sccSet map[*ssa.BasicBlock]bool) bool {
if len(scc) > 1 {
return true
}
if len(scc) == 0 {
return false
}
b := scc[0]
for _, succ := range b.Succs {
if succ == b || sccSet[succ] {
return true
}
}
return false
}
func looksLikeBlockingCall(common *ssa.CallCommon) bool {
if common == nil {
return false
}
if common.IsInvoke() {
name := ""
if common.Method != nil {
name = common.Method.Name()
}
switch name {
case "Do", "RoundTrip", "QueryContext", "ExecContext", "Read", "Write", "Recv", "Send":
return true
}
return false
}
callee := common.StaticCallee()
if callee == nil || callee.Pkg == nil || callee.Pkg.Pkg == nil {
return false
}
pkgPath := callee.Pkg.Pkg.Path()
name := callee.Name()
if pkgPath == "time" && name == "Sleep" {
return true
}
if pkgPath == "net/http" {
switch name {
case "Get", "Head", "Post", "PostForm":
return true
}
}
if pkgPath == "database/sql" {
switch name {
case "Query", "QueryContext", "Exec", "ExecContext", "Begin", "BeginTx":
return true
}
}
if pkgPath == "os" {
switch name {
case "ReadFile", "WriteFile", "Open", "OpenFile":
return true
}
}
return false
}
func resolveGoCallTargets(goInstr *ssa.Go) []*ssa.Function {
var funcs []*ssa.Function
if goInstr == nil {
return funcs
}
value := goInstr.Call.Value
if value == nil {
return funcs
}
s := &BaseAnalyzerState{ClosureCache: make(map[ssa.Value]bool)}
s.ResolveFuncs(value, &funcs)
return funcs
}
func safeReferrers(v ssa.Value) []ssa.Instruction {
if v == nil {
return nil
}
refs := v.Referrers()
if refs == nil {
return nil
}
return *refs
}
func functionCallsBackground(fn *ssa.Function) bool {
if fn == nil {
return false
}
for _, block := range fn.Blocks {
for _, instr := range block.Instrs {
callInstr, ok := instr.(ssa.CallInstruction)
if !ok {
continue
}
common := callInstr.Common()
if common == nil {
continue
}
if isBackgroundOrTodoCall(common) {
return true
}
}
}
return false
}
func isBackgroundOrTodoValue(v ssa.Value) bool {
call, ok := v.(*ssa.Call)
if !ok {
return false
}
return isBackgroundOrTodoCall(call.Common())
}
func isBackgroundOrTodoCall(common *ssa.CallCommon) bool {
if common == nil {
return false
}
callee := common.StaticCallee()
if callee == nil || callee.Pkg == nil || callee.Pkg.Pkg == nil {
return false
}
if callee.Pkg.Pkg.Path() != contextPkgPath {
return false
}
switch callee.Name() {
case "Background", "TODO":
return true
default:
return false
}
}
func isContextWithFamily(common *ssa.CallCommon) bool {
if common == nil {
return false
}
callee := common.StaticCallee()
if callee == nil || callee.Pkg == nil || callee.Pkg.Pkg == nil {
return false
}
if callee.Pkg.Pkg.Path() != contextPkgPath {
return false
}
switch callee.Name() {
case "WithCancel", "WithTimeout", "WithDeadline":
return true
default:
return false
}
}
func isHTTPRequestContextCall(common *ssa.CallCommon) bool {
if common == nil || common.IsInvoke() {
return false
}
callee := common.StaticCallee()
if callee == nil || callee.Signature == nil || callee.Pkg == nil || callee.Pkg.Pkg == nil {
return false
}
if callee.Name() != "Context" {
return false
}
if callee.Pkg.Pkg.Path() != httpPkgPath {
return false
}
recv := callee.Signature.Recv()
return recv != nil && isHTTPRequestPointerType(recv.Type())
}
func isContextDoneCall(common *ssa.CallCommon) bool {
if common == nil {
return false
}
if common.IsInvoke() {
if common.Method == nil || common.Method.Name() != "Done" {
return false
}
recv := common.Value
return recv != nil && isContextType(recv.Type())
}
callee := common.StaticCallee()
if callee == nil || callee.Signature == nil || callee.Name() != "Done" {
return false
}
recv := callee.Signature.Recv()
return recv != nil && isContextType(recv.Type())
}
func findCancelResult(tupleCall *ssa.Call) ssa.Value {
if tupleCall == nil {
return nil
}
for _, ref := range safeReferrers(tupleCall) {
extract, ok := ref.(*ssa.Extract)
if !ok {
continue
}
if extract.Index != 1 {
continue
}
if isCancelFuncType(extract.Type()) {
return extract
}
}
return nil
}
func isCancelFuncType(t types.Type) bool {
sig, ok := t.Underlying().(*types.Signature)
if !ok {
return false
}
if sig.Params().Len() != 0 || sig.Results().Len() != 0 {
return false
}
return true
}
func isCancelCalled(cancelValue ssa.Value, allFuncs []*ssa.Function) bool {
if cancelValue == nil {
return false
}
queue := []ssa.Value{cancelValue}
visited := make(map[ssa.Value]bool, 8)
for len(queue) > 0 {
current := queue[0]
queue = queue[1:]
if current == nil || visited[current] {
continue
}
visited[current] = true
for _, ref := range safeReferrers(current) {
switch r := ref.(type) {
case ssa.CallInstruction:
if isUsedInCall(r.Common(), current) {
return true
}
case *ssa.Store:
if r.Val != current {
continue
}
// Check if storing to a struct field — if so, search other
// methods of the same type for loads of that field + call.
if fa, ok := r.Addr.(*ssa.FieldAddr); ok {
if isCancelCalledViaStructField(fa, allFuncs) {
return true
}
// Check if the struct containing this field is returned,
// transferring cancel responsibility to the caller.
if isStructFieldReturnedFromFunc(fa) {
return true
}
}
queue = append(queue, r.Addr)
case *ssa.UnOp:
if r.Op == token.MUL && r.X == current {
queue = append(queue, r)
}
case *ssa.Phi:
queue = append(queue, r)
case *ssa.ChangeType:
if r.X == current {
queue = append(queue, r)
}
case *ssa.Convert:
if r.X == current {
queue = append(queue, r)
}
case *ssa.MakeInterface:
if r.X == current {
queue = append(queue, r)
}
case *ssa.MakeClosure:
// The cancel value is captured as a free variable in a closure.
// Find the corresponding FreeVar inside the closure body and
// follow it so that calls within the closure are detected.
if fn, ok := r.Fn.(*ssa.Function); ok {
for i, binding := range r.Bindings {
if binding == current && i < len(fn.FreeVars) {
queue = append(queue, fn.FreeVars[i])
}
}
}
case *ssa.Return:
// Cancel function is returned to the caller — responsibility
// is transferred; treat as "called".
for _, result := range r.Results {
if result == current {
return true
}
}
}
}
}
return false
}
// isStructFieldReturnedFromFunc checks whether the struct that owns a FieldAddr
// is loaded and returned from the enclosing function. When a cancel is stored in
// a struct field and the struct is returned, responsibility for calling the
// cancel is transferred to the caller.
func isStructFieldReturnedFromFunc(fa *ssa.FieldAddr) bool {
structBase := fa.X
if structBase == nil {
return false
}
// Follow referrers of the struct base pointer to find loads (*struct)
// that are then returned.
for _, ref := range safeReferrers(structBase) {
load, ok := ref.(*ssa.UnOp)
if !ok || load.Op != token.MUL {
continue
}
for _, loadRef := range safeReferrers(load) {
if _, ok := loadRef.(*ssa.Return); ok {
return true
}
}
}
return false
}
// isCancelCalledViaStructField checks whether a cancel function stored into a
// struct field (e.g., job.cancelFn = cancel) is subsequently called in any other
// method of the same receiver type (e.g., job.Close() calls job.cancelFn()).
func isCancelCalledViaStructField(storeFA *ssa.FieldAddr, allFuncs []*ssa.Function) bool {
// Get the field index and the receiver pointer type
fieldIdx := storeFA.Field
structPtrType := storeFA.X.Type()
for _, fn := range allFuncs {
if fn == nil || fn.Blocks == nil {
continue
}
// Only check methods on the same receiver type
if fn.Signature == nil || fn.Signature.Recv() == nil {
continue
}
if !types.Identical(fn.Signature.Recv().Type(), structPtrType) {
continue
}
// Look for a load of the same field followed by a call
for _, block := range fn.Blocks {
for _, instr := range block.Instrs {
fa, ok := instr.(*ssa.FieldAddr)
if !ok || fa.Field != fieldIdx {
continue
}
// Check that this FieldAddr is on the receiver (Params[0])
if len(fn.Params) == 0 {
continue
}
if !reachesParam(fa.X, fn.Params[0]) {
continue
}
// Check if the value loaded from this field is eventually called
if isFieldValueCalled(fa) {
return true
}
}
}
}
return false
}
// reachesParam checks if a value traces back to the given parameter,
// following through pointer dereferences and phi nodes.
func reachesParam(v ssa.Value, param *ssa.Parameter) bool {
seen := make(map[ssa.Value]bool)
return reachesParamImpl(v, param, seen)
}
func reachesParamImpl(v ssa.Value, param *ssa.Parameter, seen map[ssa.Value]bool) bool {
if v == nil || seen[v] {
return false
}
seen[v] = true
if v == param {
return true
}
switch val := v.(type) {
case *ssa.UnOp:
return reachesParamImpl(val.X, param, seen)
case *ssa.Phi:
for _, e := range val.Edges {
if reachesParamImpl(e, param, seen) {
return true
}
}
case *ssa.FieldAddr:
return reachesParamImpl(val.X, param, seen)
}
return false
}
// isFieldValueCalled checks if the value loaded from a FieldAddr is eventually
// used as a callee (i.e., the loaded function pointer is called).
func isFieldValueCalled(fa *ssa.FieldAddr) bool {
refs := fa.Referrers()
if refs == nil {
return false
}
for _, ref := range *refs {
// Look for a load (UnOp MUL = pointer dereference)
unop, ok := ref.(*ssa.UnOp)
if !ok || unop.Op != token.MUL {
continue
}
// Check if the loaded value is called
loadRefs := unop.Referrers()
if loadRefs == nil {
continue
}
queue := []ssa.Value{unop}
visited := make(map[ssa.Value]bool)
for len(queue) > 0 {
cur := queue[0]
queue = queue[1:]
if cur == nil || visited[cur] {
continue
}
visited[cur] = true
curRefs := cur.Referrers()
if curRefs == nil {
continue
}
for _, r := range *curRefs {
switch rr := r.(type) {
case ssa.CallInstruction:
if isUsedInCall(rr.Common(), cur) {
return true
}
case *ssa.Phi:
queue = append(queue, rr)
case *ssa.Store:
// stored then loaded elsewhere — follow addr
if rr.Val == cur {
queue = append(queue, rr.Addr)
}
case *ssa.UnOp:
if rr.X == cur {
queue = append(queue, rr)
}
}
}
}
}
return false
}
func isUsedInCall(common *ssa.CallCommon, target ssa.Value) bool {
if common == nil || target == nil {
return false
}
if common.Value == target {
return true
}
for _, arg := range common.Args {
if arg == target {
return true
}
}
return false
}
func isContextType(t types.Type) bool {
named, ok := t.(*types.Named)
if ok {
if obj := named.Obj(); obj != nil && obj.Name() == "Context" {
if pkg := obj.Pkg(); pkg != nil && pkg.Path() == contextPkgPath {
return true
}
}
}
iface, ok := t.Underlying().(*types.Interface)
if !ok {
return false
}
methodDone, _, _ := types.LookupFieldOrMethod(t, true, nil, "Done")
methodErr, _, _ := types.LookupFieldOrMethod(t, true, nil, "Err")
methodValue, _, _ := types.LookupFieldOrMethod(t, true, nil, "Value")
methodDeadline, _, _ := types.LookupFieldOrMethod(t, true, nil, "Deadline")
if iface.NumMethods() < 4 {
return false
}
return methodDone != nil && methodErr != nil && methodValue != nil && methodDeadline != nil
}
func isHTTPRequestPointerType(t types.Type) bool {
ptr, ok := t.(*types.Pointer)
if !ok {
return false
}
named, ok := ptr.Elem().(*types.Named)
if !ok {
return false
}
obj := named.Obj()
if obj == nil || obj.Name() != "Request" {
return false
}
pkg := obj.Pkg()
return pkg != nil && pkg.Path() == httpPkgPath
}
+207 -456
View File
@@ -15,45 +15,19 @@
package analyzers
import (
"cmp"
"fmt"
"go/token"
"go/types"
"math"
"regexp"
"strconv"
"strings"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/analysis/passes/buildssa"
"golang.org/x/tools/go/ssa"
"github.com/securego/gosec/v2/internal/ssautil"
"github.com/securego/gosec/v2/issue"
)
type integer struct {
signed bool
size int
min int
max uint
}
type rangeResult struct {
minValue int
maxValue uint
explicitPositiveVals []uint
explicitNegativeVals []int
isRangeCheck bool
convertFound bool
}
type branchResults struct {
minValue *int
maxValue *uint
explicitPositiveVals []uint
explicitNegativeVals []int
convertFound bool
}
// newConversionOverflowAnalyzer creates a new analysis.Analyzer for detecting integer overflows in conversions.
func newConversionOverflowAnalyzer(id string, description string) *analysis.Analyzer {
return &analysis.Analyzer{
Name: id,
@@ -63,32 +37,77 @@ func newConversionOverflowAnalyzer(id string, description string) *analysis.Anal
}
}
func runConversionOverflow(pass *analysis.Pass) (interface{}, error) {
ssaResult, err := getSSAResult(pass)
type conversionPair struct {
src types.BasicKind
dst types.BasicKind
}
type overflowState struct {
*BaseAnalyzerState
msgCache map[conversionPair]string
}
func newOverflowState(pass *analysis.Pass) *overflowState {
return &overflowState{
BaseAnalyzerState: NewBaseState(pass),
msgCache: make(map[conversionPair]string),
}
}
// runConversionOverflow analyzes the SSA representation of the code to find potential integer overflows in type conversions.
func runConversionOverflow(pass *analysis.Pass) (any, error) {
ssaResult, err := ssautil.GetSSAResult(pass)
if err != nil {
return nil, fmt.Errorf("building ssa representation: %w", err)
}
state := newOverflowState(pass)
defer state.Release()
issues := []*issue.Issue{}
for _, mcall := range ssaResult.SSA.SrcFuncs {
state.Reset()
for _, block := range mcall.DomPreorder() {
for _, instr := range block.Instrs {
switch instr := instr.(type) {
case *ssa.Convert:
src := instr.X.Type().Underlying().String()
dst := instr.Type().Underlying().String()
if isIntOverflow(src, dst) {
if isSafeConversion(instr) {
srcInfo, err := GetIntTypeInfo(instr.X.Type())
if err != nil {
continue
}
dstInfo, err := GetIntTypeInfo(instr.Type())
if err != nil {
continue
}
if hasOverflow(srcInfo, dstInfo) {
if state.isSafeConversion(instr, dstInfo) {
continue
}
issue := newIssue(pass.Analyzer.Name,
fmt.Sprintf("integer overflow conversion %s -> %s", src, dst),
srcBasic, _ := instr.X.Type().Underlying().(*types.Basic)
dstBasic, _ := instr.Type().Underlying().(*types.Basic)
if srcBasic == nil || dstBasic == nil {
continue
}
pair := conversionPair{
src: srcBasic.Kind(),
dst: dstBasic.Kind(),
}
msg, ok := state.msgCache[pair]
if !ok {
msg = fmt.Sprintf("integer overflow conversion %s -> %s", srcBasic.Name(), dstBasic.Name())
state.msgCache[pair] = msg
}
issues = append(issues, newIssue(pass.Analyzer.Name,
msg,
pass.Fset,
instr.Pos(),
issue.High,
issue.Medium,
)
issues = append(issues, issue)
))
}
}
}
@@ -101,459 +120,191 @@ func runConversionOverflow(pass *analysis.Pass) (interface{}, error) {
return nil, nil
}
func isIntOverflow(src string, dst string) bool {
srcInt, err := parseIntType(src)
if err != nil {
return false
}
dstInt, err := parseIntType(dst)
if err != nil {
return false
}
return srcInt.min < dstInt.min || srcInt.max > dstInt.max
}
func parseIntType(intType string) (integer, error) {
re := regexp.MustCompile(`^(?P<type>u?int)(?P<size>\d{1,2})?$`)
matches := re.FindStringSubmatch(intType)
if matches == nil {
return integer{}, fmt.Errorf("no integer type match found for %s", intType)
}
it := matches[re.SubexpIndex("type")]
is := matches[re.SubexpIndex("size")]
signed := it == "int"
// use default system int type in case size is not present in the type.
intSize := strconv.IntSize
if is != "" {
var err error
intSize, err = strconv.Atoi(is)
if err != nil {
return integer{}, fmt.Errorf("failed to parse the integer type size: %w", err)
}
}
if intSize != 8 && intSize != 16 && intSize != 32 && intSize != 64 && is != "" {
return integer{}, fmt.Errorf("invalid bit size: %d", intSize)
}
var minVal int
var maxVal uint
if signed {
shiftAmount := intSize - 1
// Perform a bounds check.
if shiftAmount < 0 {
return integer{}, fmt.Errorf("invalid shift amount: %d", shiftAmount)
}
maxVal = (1 << uint(shiftAmount)) - 1
minVal = -1 << (intSize - 1)
} else {
maxVal = (1 << uint(intSize)) - 1
minVal = 0
}
return integer{
signed: signed,
size: intSize,
min: minVal,
max: maxVal,
}, nil
}
func isSafeConversion(instr *ssa.Convert) bool {
dstType := instr.Type().Underlying().String()
// isSafeConversion checks if a specific conversion instruction is safe from overflow, considering logic and constraints.
func (s *overflowState) isSafeConversion(instr *ssa.Convert, dstInt IntTypeInfo) bool {
// Check for constant conversions.
if constVal, ok := instr.X.(*ssa.Const); ok {
if isConstantInRange(constVal, dstType) {
if IsConstantInTypeRange(constVal, dstInt) {
return true
}
}
// Check for string to integer conversions with specified bit size.
if isStringToIntConversion(instr, dstType) {
return true
}
// Check for explicit range checks.
if hasExplicitRangeCheck(instr, dstType) {
if s.hasRangeCheck(instr.X, dstInt, instr.Block()) {
return true
}
return false
}
func isConstantInRange(constVal *ssa.Const, dstType string) bool {
value, err := strconv.ParseInt(constVal.Value.String(), 10, 64)
if err != nil {
return false
}
dstInt, err := parseIntType(dstType)
if err != nil {
return false
}
if dstInt.signed {
return value >= -(1<<(dstInt.size-1)) && value <= (1<<(dstInt.size-1))-1
}
return value >= 0 && value <= (1<<dstInt.size)-1
func hasOverflow(srcInfo, dstInfo IntTypeInfo) bool {
return srcInfo.Min < dstInfo.Min || srcInfo.Max > dstInfo.Max
}
func isStringToIntConversion(instr *ssa.Convert, dstType string) bool {
// Traverse the SSA instructions to find the original variable.
original := instr.X
for {
switch v := original.(type) {
case *ssa.Call:
if v.Call.StaticCallee() != nil && (v.Call.StaticCallee().Name() == "ParseInt" || v.Call.StaticCallee().Name() == "ParseUint") {
if len(v.Call.Args) == 3 {
if bitSize, ok := v.Call.Args[2].(*ssa.Const); ok {
signed := v.Call.StaticCallee().Name() == "ParseInt"
bitSizeValue, err := strconv.Atoi(bitSize.Value.String())
if err != nil {
return false
}
dstInt, err := parseIntType(dstType)
if err != nil {
return false
}
// hasRangeCheck determines if there is a valid range check for the given value that ensures safety.
func (s *overflowState) hasRangeCheck(v ssa.Value, dstInt IntTypeInfo, block *ssa.BasicBlock) bool {
// Clear visited map for new resolution
clear(s.Visited)
// we're good if:
// - signs match and bit size is <= than destination
// - parsing unsigned and bit size is < than destination
isSafe := (bitSizeValue <= dstInt.size && signed == dstInt.signed) ||
(bitSizeValue < dstInt.size && !signed)
return isSafe
res := s.Analyzer.ResolveRange(v, block)
defer s.Analyzer.releaseResult(res)
// Check for explicit values
if ExplicitValsInRange(res.explicitPositiveVals, res.explicitNegativeVals, dstInt) {
return true
}
// Check all predecessors for OR support.
if len(block.Preds) > 1 {
allPredsSafe := true
for _, pred := range block.Preds {
if !s.isSafeFromPredecessor(v, dstInt, pred, block) {
allPredsSafe = false
break
}
}
if allPredsSafe {
return true
}
}
// Relax requirement: If we have a definitive range (both set) and it's safe,
// we allow it even if not explicitly "checked" by an IF,
// because definition-based ranges (like constants or arithmetic on constants) are certain.
isDefinitiveSafe := res.minValueSet && res.maxValueSet
if !res.isRangeCheck && !isDefinitiveSafe {
return false
}
return s.validateRangeLimits(v, res, dstInt)
}
func (s *overflowState) validateRangeLimits(v ssa.Value, res *rangeResult, dstInt IntTypeInfo) bool {
minValue, minValueSet, maxValue, maxValueSet := res.minValue, res.minValueSet, res.maxValue, res.maxValueSet
isSrcUnsigned := isUint(v)
// Check for impossible ranges (disjoint)
if !isSrcUnsigned {
if minValueSet && maxValueSet && toInt64(minValue) > toInt64(maxValue) {
return true
}
}
if isSrcUnsigned && minValueSet && maxValueSet && minValue > maxValue {
return true
}
srcInt, err := GetIntTypeInfo(v.Type())
if err != nil {
return false
}
if dstInt.Signed {
if isSrcUnsigned {
return maxValueSet && maxValue <= dstInt.Max
}
minSafe := true
if srcInt.Min < dstInt.Min {
minSafe = minValueSet && toInt64(minValue) >= dstInt.Min
}
maxSafe := true
if srcInt.Max > dstInt.Max {
maxSafe = maxValueSet && toInt64(maxValue) <= toInt64(dstInt.Max)
}
return minSafe && maxSafe
}
if isSrcUnsigned {
return maxValueSet && maxValue <= dstInt.Max
}
minSafe := true
if srcInt.Min < 0 {
minBound := int64(0)
if res.isRangeCheck && maxValueSet && toInt64(maxValue) > signedMaxForUnsignedSize(dstInt.Size) {
minBound = signedMinForUnsignedSize(dstInt.Size)
}
minSafe = minValueSet && toInt64(minValue) >= minBound
}
maxSafe := true
if srcInt.Max > dstInt.Max {
maxSafe = maxValueSet && maxValue <= dstInt.Max
}
return minSafe && maxSafe
}
func signedMinForUnsignedSize(size int) int64 {
if size >= 64 {
return math.MinInt64
}
return -(int64(1) << (size - 1))
}
func signedMaxForUnsignedSize(size int) int64 {
if size >= 64 {
return math.MaxInt64
}
return (int64(1) << (size - 1)) - 1
}
func (s *overflowState) isSafeFromPredecessor(v ssa.Value, dstInt IntTypeInfo, pred *ssa.BasicBlock, targetBlock *ssa.BasicBlock) bool {
edgeValue := v
if phi, ok := v.(*ssa.Phi); ok && phi.Block() == targetBlock {
for i, p := range targetBlock.Preds {
if p == pred && i < len(phi.Edges) {
edgeValue = phi.Edges[i]
break
}
}
}
if len(pred.Instrs) > 0 {
if vIf, ok := pred.Instrs[len(pred.Instrs)-1].(*ssa.If); ok {
for i, succ := range pred.Succs {
if succ == targetBlock {
result := s.Analyzer.getResultRangeForIfEdge(vIf, i == 0, edgeValue)
defer s.Analyzer.releaseResult(result)
if s.isSafeIfEdgeResult(edgeValue, dstInt, result) {
return true
}
}
}
return false
case *ssa.Phi:
original = v.Edges[0]
case *ssa.Extract:
original = v.Tuple
default:
return false
}
}
}
func hasExplicitRangeCheck(instr *ssa.Convert, dstType string) bool {
dstInt, err := parseIntType(dstType)
if err != nil {
return false
}
srcInt, err := parseIntType(instr.X.Type().String())
if err != nil {
return false
}
minValue := srcInt.min
maxValue := srcInt.max
explicitPositiveVals := []uint{}
explicitNegativeVals := []int{}
if minValue > dstInt.min && maxValue < dstInt.max {
return true
}
visitedIfs := make(map[*ssa.If]bool)
for _, block := range instr.Parent().Blocks {
for _, blockInstr := range block.Instrs {
switch v := blockInstr.(type) {
case *ssa.If:
result := getResultRange(v, instr, visitedIfs)
if result.isRangeCheck {
minValue = max(minValue, result.minValue)
maxValue = min(maxValue, result.maxValue)
explicitPositiveVals = append(explicitPositiveVals, result.explicitPositiveVals...)
explicitNegativeVals = append(explicitNegativeVals, result.explicitNegativeVals...)
}
case *ssa.Call:
// These function return an int of a guaranteed size.
if v != instr.X {
continue
}
if fn, isBuiltin := v.Call.Value.(*ssa.Builtin); isBuiltin {
switch fn.Name() {
case "len", "cap":
minValue = 0
if len(pred.Preds) == 1 {
parent := pred.Preds[0]
if len(parent.Instrs) > 0 {
if vIf, ok := parent.Instrs[len(parent.Instrs)-1].(*ssa.If); ok {
for i, succ := range parent.Succs {
if succ == pred {
result := s.Analyzer.getResultRangeForIfEdge(vIf, i == 0, edgeValue)
defer s.Analyzer.releaseResult(result)
if s.isSafeIfEdgeResult(edgeValue, dstInt, result) {
return true
}
}
}
}
if explicitValsInRange(explicitPositiveVals, explicitNegativeVals, dstInt) {
return true
} else if minValue >= dstInt.min && maxValue <= dstInt.max {
return true
}
}
}
return false
}
// getResultRange is a recursive function that walks the branches of the if statement to find the range of the variable.
func getResultRange(ifInstr *ssa.If, instr *ssa.Convert, visitedIfs map[*ssa.If]bool) rangeResult {
if visitedIfs[ifInstr] {
return rangeResult{minValue: math.MinInt, maxValue: math.MaxUint}
}
visitedIfs[ifInstr] = true
cond := ifInstr.Cond
binOp, ok := cond.(*ssa.BinOp)
if !ok || !isRangeCheck(binOp, instr.X) {
return rangeResult{minValue: math.MinInt, maxValue: math.MaxUint}
}
result := rangeResult{
minValue: math.MinInt,
maxValue: math.MaxUint,
isRangeCheck: true,
}
thenBounds := walkBranchForConvert(ifInstr.Block().Succs[0], instr, visitedIfs)
elseBounds := walkBranchForConvert(ifInstr.Block().Succs[1], instr, visitedIfs)
updateResultFromBinOp(&result, binOp, instr, thenBounds.convertFound)
if thenBounds.convertFound {
result.convertFound = true
result.minValue = maxWithPtr(result.minValue, thenBounds.minValue)
result.maxValue = minWithPtr(result.maxValue, thenBounds.maxValue)
} else if elseBounds.convertFound {
result.convertFound = true
result.minValue = maxWithPtr(result.minValue, elseBounds.minValue)
result.maxValue = minWithPtr(result.maxValue, elseBounds.maxValue)
}
result.explicitPositiveVals = append(result.explicitPositiveVals, thenBounds.explicitPositiveVals...)
result.explicitNegativeVals = append(result.explicitNegativeVals, thenBounds.explicitNegativeVals...)
result.explicitPositiveVals = append(result.explicitPositiveVals, elseBounds.explicitPositiveVals...)
result.explicitNegativeVals = append(result.explicitNegativeVals, elseBounds.explicitNegativeVals...)
return result
}
// updateResultFromBinOp updates the rangeResult based on the BinOp instruction and the location of the Convert instruction.
func updateResultFromBinOp(result *rangeResult, binOp *ssa.BinOp, instr *ssa.Convert, successPathConvert bool) {
x, y := binOp.X, binOp.Y
operandsFlipped := false
compareVal, op := getRealValueFromOperation(instr.X)
// Handle FieldAddr
if fieldAddr, ok := compareVal.(*ssa.FieldAddr); ok {
compareVal = fieldAddr
}
if !isSameOrRelated(x, compareVal) {
y = x
operandsFlipped = true
}
constVal, ok := y.(*ssa.Const)
if !ok {
return
}
// TODO: constVal.Value nil check avoids #1229 panic but seems to be hiding a bug in the code above or in x/tools/go/ssa.
if constVal.Value == nil {
// log.Fatalf("[gosec] constVal.Value is nil flipped=%t, constVal=%#v, binOp=%#v", operandsFlipped, constVal, binOp)
return
}
switch binOp.Op {
case token.LEQ, token.LSS:
updateMinMaxForLessOrEqual(result, constVal, binOp.Op, operandsFlipped, successPathConvert)
case token.GEQ, token.GTR:
updateMinMaxForGreaterOrEqual(result, constVal, binOp.Op, operandsFlipped, successPathConvert)
case token.EQL:
if !successPathConvert {
break
}
updateExplicitValues(result, constVal)
case token.NEQ:
if successPathConvert {
break
}
updateExplicitValues(result, constVal)
}
if op == "neg" {
minVal := result.minValue
maxVal := result.maxValue
if minVal >= 0 {
result.maxValue = uint(minVal)
}
if maxVal <= math.MaxInt {
result.minValue = int(maxVal)
}
}
}
func updateExplicitValues(result *rangeResult, constVal *ssa.Const) {
if strings.Contains(constVal.String(), "-") {
result.explicitNegativeVals = append(result.explicitNegativeVals, int(constVal.Int64()))
} else {
result.explicitPositiveVals = append(result.explicitPositiveVals, uint(constVal.Uint64()))
}
}
func updateMinMaxForLessOrEqual(result *rangeResult, constVal *ssa.Const, op token.Token, operandsFlipped bool, successPathConvert bool) {
// If the success path has a conversion and the operands are not flipped, then the constant value is the maximum value.
if successPathConvert && !operandsFlipped {
result.maxValue = uint(constVal.Uint64())
if op == token.LEQ {
result.maxValue--
}
} else {
result.minValue = int(constVal.Int64())
if op == token.GTR {
result.minValue++
}
}
}
func updateMinMaxForGreaterOrEqual(result *rangeResult, constVal *ssa.Const, op token.Token, operandsFlipped bool, successPathConvert bool) {
// If the success path has a conversion and the operands are not flipped, then the constant value is the minimum value.
if successPathConvert && !operandsFlipped {
result.minValue = int(constVal.Int64())
if op == token.GEQ {
result.minValue++
}
} else {
result.maxValue = uint(constVal.Uint64())
if op == token.LSS {
result.maxValue--
}
}
}
// walkBranchForConvert walks the branch of the if statement to find the range of the variable and where the conversion is.
func walkBranchForConvert(block *ssa.BasicBlock, instr *ssa.Convert, visitedIfs map[*ssa.If]bool) branchResults {
bounds := branchResults{}
for _, blockInstr := range block.Instrs {
switch v := blockInstr.(type) {
case *ssa.If:
result := getResultRange(v, instr, visitedIfs)
bounds.convertFound = bounds.convertFound || result.convertFound
if result.isRangeCheck {
bounds.minValue = toPtr(maxWithPtr(result.minValue, bounds.minValue))
bounds.maxValue = toPtr(minWithPtr(result.maxValue, bounds.maxValue))
bounds.explicitPositiveVals = append(bounds.explicitPositiveVals, result.explicitPositiveVals...)
bounds.explicitNegativeVals = append(bounds.explicitNegativeVals, result.explicitNegativeVals...)
}
case *ssa.Call:
if v == instr.X {
if fn, isBuiltin := v.Call.Value.(*ssa.Builtin); isBuiltin && (fn.Name() == "len" || fn.Name() == "cap") {
bounds.minValue = toPtr(0)
}
}
case *ssa.Convert:
if v == instr {
bounds.convertFound = true
return bounds
}
}
}
return bounds
}
func isRangeCheck(v ssa.Value, x ssa.Value) bool {
compareVal, _ := getRealValueFromOperation(x)
switch op := v.(type) {
case *ssa.BinOp:
switch op.Op {
case token.LSS, token.LEQ, token.GTR, token.GEQ, token.EQL, token.NEQ:
leftMatch := isSameOrRelated(op.X, compareVal)
rightMatch := isSameOrRelated(op.Y, compareVal)
return leftMatch || rightMatch
}
}
return false
}
func getRealValueFromOperation(v ssa.Value) (ssa.Value, string) {
switch v := v.(type) {
case *ssa.UnOp:
if v.Op == token.SUB {
val, _ := getRealValueFromOperation(v.X)
return val, "neg"
}
return getRealValueFromOperation(v.X)
case *ssa.FieldAddr:
return v, "field"
case *ssa.Alloc:
return v, "alloc"
}
return v, ""
}
func isSameOrRelated(a, b ssa.Value) bool {
aVal, _ := getRealValueFromOperation(a)
bVal, _ := getRealValueFromOperation(b)
if aVal == bVal {
return true
}
// Check if both are FieldAddr operations referring to the same field of the same struct
if aField, aOk := aVal.(*ssa.FieldAddr); aOk {
if bField, bOk := bVal.(*ssa.FieldAddr); bOk {
return aField.X == bField.X && aField.Field == bField.Field
}
}
return false
}
func explicitValsInRange(explicitPosVals []uint, explicitNegVals []int, dstInt integer) bool {
if len(explicitPosVals) == 0 && len(explicitNegVals) == 0 {
func (s *overflowState) isSafeIfEdgeResult(v ssa.Value, dstInt IntTypeInfo, result *rangeResult) bool {
if !result.isRangeCheck {
return false
}
for _, val := range explicitPosVals {
if val > dstInt.max {
return false
isSrcUnsigned := isUint(v)
if dstInt.Signed {
if isSrcUnsigned {
return result.maxValueSet && result.maxValue <= dstInt.Max
}
return (result.minValueSet && toInt64(result.minValue) >= dstInt.Min) && (result.maxValueSet && toInt64(result.maxValue) <= toInt64(dstInt.Max))
}
for _, val := range explicitNegVals {
if val < dstInt.min {
return false
}
if isSrcUnsigned {
return result.maxValueSet && result.maxValue <= dstInt.Max
}
return true
}
func minWithPtr[T cmp.Ordered](a T, b *T) T {
if b == nil {
return a
}
return min(a, *b)
}
func maxWithPtr[T cmp.Ordered](a T, b *T) T {
if b == nil {
return a
}
return max(a, *b)
}
func toPtr[T any](a T) *T {
return &a
return (result.minValueSet && toInt64(result.minValue) >= 0) && (result.maxValueSet && result.maxValue <= dstInt.Max)
}

Some files were not shown because too many files have changed in this diff Show More