diff --git a/cmd/nuclei/main.go b/cmd/nuclei/main.go
index a4ba02ddd3..a96bf64fd2 100644
--- a/cmd/nuclei/main.go
+++ b/cmd/nuclei/main.go
@@ -185,6 +185,11 @@ func main() {
go func() {
for range c {
gologger.Info().Msgf("CTRL+C pressed: Exiting\n")
+ if options.DASTServer {
+ nucleiRunner.Close()
+ os.Exit(1)
+ }
+
gologger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
@@ -357,9 +362,15 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.FuzzingMode, "fuzzing-mode", "fm", "", "overrides fuzzing mode set in template (multiple, single)"),
flagSet.BoolVar(&fuzzFlag, "fuzz", false, "enable loading fuzzing templates (Deprecated: use -dast instead)"),
flagSet.BoolVar(&options.DAST, "dast", false, "enable / run dast (fuzz) nuclei templates"),
+ flagSet.BoolVarP(&options.DASTServer, "dast-server", "dts", false, "enable dast server mode (live fuzzing)"),
+ flagSet.BoolVarP(&options.DASTReport, "dast-report", "dtr", false, "write dast scan report to file"),
+ flagSet.StringVarP(&options.DASTServerToken, "dast-server-token", "dtst", "", "dast server token (optional)"),
+ flagSet.StringVarP(&options.DASTServerAddress, "dast-server-address", "dtsa", "localhost:9055", "dast server address"),
flagSet.BoolVarP(&options.DisplayFuzzPoints, "display-fuzz-points", "dfp", false, "display fuzz points in the output for debugging"),
flagSet.IntVar(&options.FuzzParamFrequency, "fuzz-param-frequency", 10, "frequency of uninteresting parameters for fuzzing before skipping"),
flagSet.StringVarP(&options.FuzzAggressionLevel, "fuzz-aggression", "fa", "low", "fuzzing aggression level controls payload count for fuzz (low, medium, high)"),
+ flagSet.StringSliceVarP(&options.Scope, "fuzz-scope", "cs", nil, "in scope url regex to be followed by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
+ flagSet.StringSliceVarP(&options.OutOfScope, "fuzz-out-scope", "cos", nil, "out of scope url regex to be excluded by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
)
flagSet.CreateGroup("uncover", "Uncover",
diff --git a/go.mod b/go.mod
index 6161200383..f10aafd5e2 100644
--- a/go.mod
+++ b/go.mod
@@ -51,6 +51,7 @@ require (
github.com/DataDog/gostackparse v0.6.0
github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
+ github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.3.17
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.19.0
@@ -58,6 +59,7 @@ require (
github.com/aws/aws-sdk-go-v2/credentials v1.13.27
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.72
github.com/aws/aws-sdk-go-v2/service/s3 v1.37.0
+ github.com/ccojocar/randdetect v0.0.0-20241118085251-1581dcdbf207
github.com/cespare/xxhash v1.1.0
github.com/charmbracelet/glamour v0.8.0
github.com/clbanning/mxj/v2 v2.7.0
@@ -70,13 +72,14 @@ require (
github.com/go-ldap/ldap/v3 v3.4.5
github.com/go-pg/pg v8.0.7+incompatible
github.com/go-sql-driver/mysql v1.7.1
+ github.com/gorilla/mux v1.8.1
github.com/h2non/filetype v1.1.3
github.com/invopop/yaml v0.3.1
github.com/kitabisa/go-ci v1.0.3
- github.com/labstack/echo/v4 v4.10.2
+ github.com/labstack/echo/v4 v4.12.0
github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa
github.com/lib/pq v1.10.9
- github.com/mattn/go-sqlite3 v1.14.22
+ github.com/mattn/go-sqlite3 v1.14.24
github.com/mholt/archiver v3.1.1+incompatible
github.com/microsoft/go-mssqldb v1.6.0
github.com/ory/dockertest/v3 v3.10.0
@@ -277,7 +280,7 @@ require (
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/snappy v0.0.4 // indirect
github.com/google/go-querystring v1.1.0 // indirect
- github.com/google/uuid v1.6.0 // indirect
+ github.com/google/uuid v1.6.0
github.com/gorilla/css v1.0.1 // indirect
github.com/hashicorp/go-cleanhttp v0.5.2 // indirect
github.com/hashicorp/go-retryablehttp v0.7.7 // indirect
@@ -348,7 +351,7 @@ require (
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kevinburke/ssh_config v1.2.0 // indirect
- github.com/labstack/gommon v0.4.0 // indirect
+ github.com/labstack/gommon v0.4.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
diff --git a/go.sum b/go.sum
index 38538f3258..236f0f8ea0 100644
--- a/go.sum
+++ b/go.sum
@@ -112,6 +112,8 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAu
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74 h1:Kk6a4nehpJ3UuJRqlA3JxYxBZEqCeOmATOvrbT4p9RA=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
+github.com/alitto/pond v1.9.2 h1:9Qb75z/scEZVCoSU+osVmQ0I0JOeLfdTDafrbcJ8CLs=
+github.com/alitto/pond v1.9.2/go.mod h1:xQn3P/sHTYcU/1BR3i86IGIrilcrGC2LiS+E2+CJWsI=
github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.1.0 h1:eLKJA0d02Lf0mVpIDgYnqXcUn0GqVmEFny3VuID1U3M=
github.com/andybalholm/brotli v1.1.0/go.mod h1:sms7XGricyQI9K10gOSf56VKKWS4oLer58Q+mhRPtnY=
@@ -214,6 +216,8 @@ github.com/bytedance/sonic v1.9.1/go.mod h1:i736AoUSYt75HyZLoJW9ERYxcy6eaN6h4BZX
github.com/caddyserver/certmagic v0.19.2 h1:HZd1AKLx4592MalEGQS39DKs2ZOAJCEM/xYPMQ2/ui0=
github.com/caddyserver/certmagic v0.19.2/go.mod h1:fsL01NomQ6N+kE2j37ZCnig2MFosG+MIO4ztnmG/zz8=
github.com/casbin/casbin/v2 v2.1.2/go.mod h1:YcPU1XXisHhLzuxH9coDNf2FbKpjGlbCg3n9yuLkIJQ=
+github.com/ccojocar/randdetect v0.0.0-20241118085251-1581dcdbf207 h1:ZXvIckmW4Ky9CYRXGzf3kdnivvpUOUiEdDb5afC0VKk=
+github.com/ccojocar/randdetect v0.0.0-20241118085251-1581dcdbf207/go.mod h1:bR+6Ytp4l03qh4oOxwjzR/ld5ssouHtjIOdTKb8fox0=
github.com/cenkalti/backoff v2.2.1+incompatible/go.mod h1:90ReRw6GdpyfrHakVjL/QHaoyV4aDUVVkXQJJJ3NXXM=
github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqylYbM=
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
@@ -533,6 +537,8 @@ github.com/gorilla/css v1.0.1 h1:ntNaBIghp6JmvWnxbZKANoLyuXTPZ4cAMlo6RyhlbO8=
github.com/gorilla/css v1.0.1/go.mod h1:BvnYkspnSzMmwRK+b8/xgNPLiIuNZr6vbZBTPQ2A3b0=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/mux v1.7.3/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
+github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY=
+github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ=
github.com/gorilla/securecookie v1.1.1/go.mod h1:ra0sb63/xPlUeL+yeDciTfxMRAA+MP+HVt/4epWDjd4=
github.com/gorilla/sessions v1.2.1/go.mod h1:dk2InVEVJ0sfLlnXv9EAgkf6ecYs/i80K/zI+bUmuGM=
github.com/gorilla/websocket v0.0.0-20170926233335-4201258b820c/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
@@ -676,10 +682,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/labstack/echo/v4 v4.10.2 h1:n1jAhnq/elIFTHr1EYpiYtyKgx4RW9ccVgkqByZaN2M=
-github.com/labstack/echo/v4 v4.10.2/go.mod h1:OEyqf2//K1DFdE57vw2DRgWY0M7s65IVQO2FzvI4J5k=
-github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8=
-github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
+github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0=
+github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM=
+github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
+github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
@@ -707,12 +713,10 @@ github.com/mackerelio/go-osstat v0.2.4/go.mod h1:Zy+qzGdZs3A9cuIqmgbJvwbmLQH9dJv
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
-github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
@@ -721,8 +725,8 @@ github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m
github.com/mattn/go-runewidth v0.0.12/go.mod h1:RAqKPSqVFrSLVXbA8x7dzmKdmGzieGRCM46jaSJTDAk=
github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc=
github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
-github.com/mattn/go-sqlite3 v1.14.22 h1:2gZY6PC6kBnID23Tichd1K+Z0oS6nE/XwU+Vz/5o4kU=
-github.com/mattn/go-sqlite3 v1.14.22/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
+github.com/mattn/go-sqlite3 v1.14.24 h1:tpSp2G2KyMnnQu99ngJ47EIkWVmliIizyZBfPrBWDRM=
+github.com/mattn/go-sqlite3 v1.14.24/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4=
github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30=
@@ -1083,7 +1087,6 @@ github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijb
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
-github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/weppos/publicsuffix-go v0.12.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k=
@@ -1396,10 +1399,7 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -1638,7 +1638,6 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo=
diff --git a/internal/runner/lazy.go b/internal/runner/lazy.go
index 5cb91cfd09..30cca8e1d9 100644
--- a/internal/runner/lazy.go
+++ b/internal/runner/lazy.go
@@ -114,8 +114,13 @@ func GetLazyAuthFetchCallback(opts *AuthLazyFetchOptions) authx.LazyFetchSecret
}
// dynamic values
for k, v := range e.OperatorsResult.DynamicValues {
- if len(v) > 0 {
- data[k] = v[0]
+ // Iterate through all the values and choose the
+ // largest value as the extracted value
+ for _, value := range v {
+ oldVal, ok := data[k]
+ if !ok || len(value) > len(oldVal.(string)) {
+ data[k] = value
+ }
}
}
// named extractors
diff --git a/internal/runner/options.go b/internal/runner/options.go
index e36c248a64..65cd4ae790 100644
--- a/internal/runner/options.go
+++ b/internal/runner/options.go
@@ -171,6 +171,11 @@ func ValidateOptions(options *types.Options) error {
if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
}
+ if options.DAST {
+ if err := validateDASTOptions(options); err != nil {
+ return err
+ }
+ }
// Verify if any of the client certificate options were set since it requires all three to work properly
if options.HasClientCertificates() {
@@ -274,6 +279,14 @@ func validateMissingGitLabOptions(options *types.Options) []string {
return missing
}
+func validateDASTOptions(options *types.Options) error {
+ // Ensure the DAST server token meets minimum length requirement
+ if len(options.DASTServerToken) > 0 && len(options.DASTServerToken) < 16 {
+ return fmt.Errorf("DAST server token must be at least 16 characters long")
+ }
+ return nil
+}
+
func createReportingOptions(options *types.Options) (*reporting.Options, error) {
var reportingOptions = &reporting.Options{}
if options.ReportingConfig != "" {
diff --git a/internal/runner/runner.go b/internal/runner/runner.go
index bce579c48d..fa4e837a5c 100644
--- a/internal/runner/runner.go
+++ b/internal/runner/runner.go
@@ -4,8 +4,6 @@ import (
"context"
"encoding/json"
"fmt"
- "net/http"
- _ "net/http/pprof"
"os"
"path/filepath"
"reflect"
@@ -14,6 +12,7 @@ import (
"time"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
+ "github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
@@ -25,6 +24,7 @@ import (
"github.com/projectdiscovery/utils/env"
fileutil "github.com/projectdiscovery/utils/file"
permissionutil "github.com/projectdiscovery/utils/permission"
+ pprofutil "github.com/projectdiscovery/utils/pprof"
updateutils "github.com/projectdiscovery/utils/update"
"github.com/logrusorgru/aurora"
@@ -40,6 +40,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/external/customtemplates"
+ fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
@@ -88,7 +89,7 @@ type Runner struct {
rateLimiter *ratelimit.Limiter
hostErrors hosterrorscache.CacheInterface
resumeCfg *types.ResumeCfg
- pprofServer *http.Server
+ pprofServer *pprofutil.PprofServer
pdcpUploadErrMsg string
inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker
@@ -96,10 +97,10 @@ type Runner struct {
tmpDir string
parser parser.Parser
httpApiEndpoint *httpapi.Server
+ fuzzStats *fuzzStats.Tracker
+ dastServer *server.DASTServer
}
-const pprofServerAddress = "127.0.0.1:8086"
-
// New creates a new client for running the enumeration process.
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
@@ -216,15 +217,8 @@ func New(options *types.Options) (*Runner, error) {
templates.SeverityColorizer = colorizer.New(runner.colorizer)
if options.EnablePprof {
- server := &http.Server{
- Addr: pprofServerAddress,
- Handler: http.DefaultServeMux,
- }
- gologger.Info().Msgf("Listening pprof debug server on: %s", pprofServerAddress)
- runner.pprofServer = server
- go func() {
- _ = server.ListenAndServe()
- }()
+ runner.pprofServer = pprofutil.NewPprofServer()
+ runner.pprofServer.Start()
}
if options.HttpApiEndpoint != "" {
@@ -249,14 +243,6 @@ func New(options *types.Options) (*Runner, error) {
}
runner.inputProvider = inputProvider
- // Create the output file if asked
- outputWriter, err := output.NewStandardWriter(options)
- if err != nil {
- return nil, errors.Wrap(err, "could not create output file")
- }
- // setup a proxy writer to automatically upload results to PDCP
- runner.output = runner.setupPDCPUpload(outputWriter)
-
if options.JSONL && options.EnableProgressBar {
options.StatsJSON = true
}
@@ -296,6 +282,42 @@ func New(options *types.Options) (*Runner, error) {
}
runner.resumeCfg = resumeCfg
+ if options.DASTReport || options.DASTServer {
+ var err error
+ runner.fuzzStats, err = fuzzStats.NewTracker()
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create fuzz stats db")
+ }
+ if !options.DASTServer {
+ dastServer, err := server.NewStatsServer(runner.fuzzStats)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create dast server")
+ }
+ runner.dastServer = dastServer
+ }
+ }
+
+ // Create the output file if asked
+ outputWriter, err := output.NewStandardWriter(options)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create output file")
+ }
+ if runner.fuzzStats != nil {
+ outputWriter.JSONLogRequestHook = func(request *output.JSONLogRequest) {
+ if request.Error == "none" || request.Error == "" {
+ return
+ }
+ runner.fuzzStats.RecordErrorEvent(fuzzStats.ErrorEvent{
+ TemplateID: request.Template,
+ URL: request.Input,
+ Error: request.Error,
+ })
+ }
+ }
+
+ // setup a proxy writer to automatically upload results to PDCP
+ runner.output = runner.setupPDCPUpload(outputWriter)
+
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor
@@ -362,6 +384,9 @@ func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions,
// Close releases all the resources and cleans up
func (r *Runner) Close() {
+ if r.dastServer != nil {
+ r.dastServer.Close()
+ }
// dump hosterrors cache
if r.hostErrors != nil {
r.hostErrors.Close()
@@ -380,7 +405,7 @@ func (r *Runner) Close() {
}
protocolinit.Close()
if r.pprofServer != nil {
- _ = r.pprofServer.Shutdown(context.Background())
+ r.pprofServer.Stop()
}
if r.rateLimiter != nil {
r.rateLimiter.Stop()
@@ -439,6 +464,41 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
// RunEnumeration sets up the input layer for giving input nuclei.
// binary and runs the actual enumeration
func (r *Runner) RunEnumeration() error {
+ // If the user has asked for DAST server mode, run the live
+ // DAST fuzzing server.
+ if r.options.DASTServer {
+ execurOpts := &server.NucleiExecutorOptions{
+ Options: r.options,
+ Output: r.output,
+ Progress: r.progress,
+ Catalog: r.catalog,
+ IssuesClient: r.issuesClient,
+ RateLimiter: r.rateLimiter,
+ Interactsh: r.interactsh,
+ ProjectFile: r.projectFile,
+ Browser: r.browser,
+ Colorizer: r.colorizer,
+ Parser: r.parser,
+ TemporaryDirectory: r.tmpDir,
+ FuzzStatsDB: r.fuzzStats,
+ }
+ dastServer, err := server.New(&server.Options{
+ Address: r.options.DASTServerAddress,
+ Templates: r.options.Templates,
+ OutputWriter: r.output,
+ Verbose: r.options.Verbose,
+ Token: r.options.DASTServerToken,
+ InScope: r.options.Scope,
+ OutScope: r.options.OutOfScope,
+ NucleiExecutorOptions: execurOpts,
+ })
+ if err != nil {
+ return err
+ }
+ r.dastServer = dastServer
+ return dastServer.Start()
+ }
+
// If user asked for new templates to be executed, collect the list from the templates' directory.
if r.options.NewTemplates {
if arr := config.DefaultConfig.GetNewAdditions(); len(arr) > 0 {
@@ -624,6 +684,14 @@ func (r *Runner) RunEnumeration() error {
Retries: r.options.Retries,
}, "")
+ if r.dastServer != nil {
+ go func() {
+ if err := r.dastServer.Start(); err != nil {
+ gologger.Error().Msgf("could not start dast server: %v", err)
+ }
+ }()
+ }
+
enumeration := false
var results *atomic.Bool
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
@@ -633,6 +701,9 @@ func (r *Runner) RunEnumeration() error {
return err
}
+ if executorOpts.FuzzStatsDB != nil {
+ executorOpts.FuzzStatsDB.Close()
+ }
if r.interactsh != nil {
matched := r.interactsh.Close()
if matched {
diff --git a/internal/server/dedupe.go b/internal/server/dedupe.go
new file mode 100644
index 0000000000..f5c5b775bf
--- /dev/null
+++ b/internal/server/dedupe.go
@@ -0,0 +1,122 @@
+package server
+
+import (
+ "crypto/sha256"
+ "encoding/hex"
+ "net/url"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
+ mapsutil "github.com/projectdiscovery/utils/maps"
+)
+
+var dynamicHeaders = map[string]bool{
+ "date": true,
+ "if-modified-since": true,
+ "if-unmodified-since": true,
+ "cache-control": true,
+ "if-none-match": true,
+ "if-match": true,
+ "authorization": true,
+ "cookie": true,
+ "x-csrf-token": true,
+ "content-length": true,
+ "content-md5": true,
+ "host": true,
+ "x-request-id": true,
+ "x-correlation-id": true,
+ "user-agent": true,
+ "referer": true,
+}
+
+type requestDeduplicator struct {
+ hashes map[string]struct{}
+ lock *sync.RWMutex
+}
+
+func newRequestDeduplicator() *requestDeduplicator {
+ return &requestDeduplicator{
+ hashes: make(map[string]struct{}),
+ lock: &sync.RWMutex{},
+ }
+}
+
+func (r *requestDeduplicator) isDuplicate(req *types.RequestResponse) bool {
+ hash, err := hashRequest(req)
+ if err != nil {
+ return false
+ }
+
+ r.lock.RLock()
+ _, ok := r.hashes[hash]
+ r.lock.RUnlock()
+ if ok {
+ return true
+ }
+
+ r.lock.Lock()
+ r.hashes[hash] = struct{}{}
+ r.lock.Unlock()
+ return false
+}
+
+func hashRequest(req *types.RequestResponse) (string, error) {
+ normalizedURL, err := normalizeURL(req.URL.URL)
+ if err != nil {
+ return "", err
+ }
+
+ var hashContent strings.Builder
+ hashContent.WriteString(req.Request.Method)
+ hashContent.WriteString(normalizedURL)
+
+ headers := sortedNonDynamicHeaders(req.Request.Headers)
+ for _, header := range headers {
+ hashContent.WriteString(header.Key)
+ hashContent.WriteString(header.Value)
+ }
+
+ if len(req.Request.Body) > 0 {
+ hashContent.Write([]byte(req.Request.Body))
+ }
+
+ // Calculate the SHA256 hash
+ hash := sha256.Sum256([]byte(hashContent.String()))
+ return hex.EncodeToString(hash[:]), nil
+}
+
+func normalizeURL(u *url.URL) (string, error) {
+ query := u.Query()
+ sortedQuery := make(url.Values)
+ for k, v := range query {
+ sort.Strings(v)
+ sortedQuery[k] = v
+ }
+ u.RawQuery = sortedQuery.Encode()
+
+ if u.Path == "" {
+ u.Path = "/"
+ }
+ return u.String(), nil
+}
+
+type header struct {
+ Key string
+ Value string
+}
+
+func sortedNonDynamicHeaders(headers mapsutil.OrderedMap[string, string]) []header {
+ var result []header
+ headers.Iterate(func(k, v string) bool {
+ if !dynamicHeaders[strings.ToLower(k)] {
+ result = append(result, header{Key: k, Value: v})
+ }
+ return true
+ })
+ sort.Slice(result, func(i, j int) bool {
+ return result[i].Key < result[j].Key
+ })
+ return result
+}
diff --git a/internal/server/nuclei_sdk.go b/internal/server/nuclei_sdk.go
new file mode 100644
index 0000000000..aad3377437
--- /dev/null
+++ b/internal/server/nuclei_sdk.go
@@ -0,0 +1,199 @@
+package server
+
+import (
+ "context"
+ "fmt"
+ _ "net/http/pprof"
+ "strings"
+
+ "github.com/logrusorgru/aurora"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/http"
+ "github.com/projectdiscovery/nuclei/v3/pkg/projectfile"
+ "gopkg.in/yaml.v3"
+
+ "github.com/pkg/errors"
+ "github.com/projectdiscovery/ratelimit"
+
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
+ "github.com/projectdiscovery/nuclei/v3/pkg/core"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input"
+ "github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
+ parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output"
+ "github.com/projectdiscovery/nuclei/v3/pkg/progress"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/globalmatchers"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/hosterrorscache"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/excludematchers"
+ browserEngine "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
+ "github.com/projectdiscovery/nuclei/v3/pkg/reporting"
+ "github.com/projectdiscovery/nuclei/v3/pkg/templates"
+ "github.com/projectdiscovery/nuclei/v3/pkg/types"
+)
+
+type nucleiExecutor struct {
+ engine *core.Engine
+ store *loader.Store
+ options *NucleiExecutorOptions
+ executorOpts protocols.ExecutorOptions
+}
+
+type NucleiExecutorOptions struct {
+ Options *types.Options
+ Output output.Writer
+ Progress progress.Progress
+ Catalog catalog.Catalog
+ IssuesClient reporting.Client
+ RateLimiter *ratelimit.Limiter
+ Interactsh *interactsh.Client
+ ProjectFile *projectfile.ProjectFile
+ Browser *browserEngine.Browser
+ FuzzStatsDB *stats.Tracker
+ Colorizer aurora.Aurora
+ Parser parser.Parser
+ TemporaryDirectory string
+}
+
+func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
+ fuzzFreqCache := frequency.New(frequency.DefaultMaxTrackCount, opts.Options.FuzzParamFrequency)
+ resumeCfg := types.NewResumeCfg()
+
+ // Create the executor options which will be used throughout the execution
+ // stage by the nuclei engine modules.
+ executorOpts := protocols.ExecutorOptions{
+ Output: opts.Output,
+ Options: opts.Options,
+ Progress: opts.Progress,
+ Catalog: opts.Catalog,
+ IssuesClient: opts.IssuesClient,
+ RateLimiter: opts.RateLimiter,
+ Interactsh: opts.Interactsh,
+ ProjectFile: opts.ProjectFile,
+ Browser: opts.Browser,
+ Colorizer: opts.Colorizer,
+ ResumeCfg: resumeCfg,
+ ExcludeMatchers: excludematchers.New(opts.Options.ExcludeMatchers),
+ InputHelper: input.NewHelper(),
+ TemporaryDirectory: opts.TemporaryDirectory,
+ Parser: opts.Parser,
+ FuzzParamsFrequency: fuzzFreqCache,
+ GlobalMatchers: globalmatchers.New(),
+ FuzzStatsDB: opts.FuzzStatsDB,
+ }
+
+ if opts.Options.ShouldUseHostError() {
+ maxHostError := opts.Options.MaxHostError
+ if maxHostError == 30 {
+ maxHostError = 100 // auto adjust for fuzzings
+ }
+ if opts.Options.TemplateThreads > maxHostError {
+ gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
+
+ maxHostError = opts.Options.TemplateThreads
+ }
+
+ cache := hosterrorscache.New(maxHostError, hosterrorscache.DefaultMaxHostsCount, opts.Options.TrackError)
+ cache.SetVerbose(opts.Options.Verbose)
+
+ executorOpts.HostErrorsCache = cache
+ }
+
+ executorEngine := core.New(opts.Options)
+ executorEngine.SetExecuterOptions(executorOpts)
+
+ workflowLoader, err := parsers.NewLoader(&executorOpts)
+ if err != nil {
+ return nil, errors.Wrap(err, "Could not create loader options.")
+ }
+ executorOpts.WorkflowLoader = workflowLoader
+
+ // If using input-file flags, only load http fuzzing based templates.
+ loaderConfig := loader.NewConfig(opts.Options, opts.Catalog, executorOpts)
+ if !strings.EqualFold(opts.Options.InputFileMode, "list") || opts.Options.DAST || opts.Options.DASTServer {
+ // if input type is not list (implicitly enable fuzzing)
+ opts.Options.DAST = true
+ }
+ store, err := loader.New(loaderConfig)
+ if err != nil {
+ return nil, errors.Wrap(err, "Could not create loader options.")
+ }
+ store.Load()
+
+ return &nucleiExecutor{
+ engine: executorEngine,
+ store: store,
+ options: opts,
+ executorOpts: executorOpts,
+ }, nil
+}
+
+// proxifyRequest is a request for proxify
+type proxifyRequest struct {
+ URL string `json:"url"`
+ Request struct {
+ Header map[string]string `json:"header"`
+ Body string `json:"body"`
+ Raw string `json:"raw"`
+ } `json:"request"`
+}
+
+func (n *nucleiExecutor) ExecuteScan(target PostRequestsHandlerRequest) error {
+ finalTemplates := []*templates.Template{}
+ finalTemplates = append(finalTemplates, n.store.Templates()...)
+ finalTemplates = append(finalTemplates, n.store.Workflows()...)
+
+ if len(finalTemplates) == 0 {
+ return errors.New("no templates provided for scan")
+ }
+
+ payload := proxifyRequest{
+ URL: target.URL,
+ Request: struct {
+ Header map[string]string `json:"header"`
+ Body string `json:"body"`
+ Raw string `json:"raw"`
+ }{
+ Raw: target.RawHTTP,
+ },
+ }
+
+ marshalledYaml, err := yaml.Marshal(payload)
+ if err != nil {
+ return fmt.Errorf("error marshalling yaml: %s", err)
+ }
+
+ inputProvider, err := http.NewHttpInputProvider(&http.HttpMultiFormatOptions{
+ InputContents: string(marshalledYaml),
+ InputMode: "yaml",
+ Options: formats.InputFormatOptions{
+ Variables: make(map[string]interface{}),
+ },
+ })
+ if err != nil {
+ return errors.Wrap(err, "could not create input provider")
+ }
+
+ // We don't care about the result as its a boolean
+ // stating whether we got matches or not
+ _ = n.engine.ExecuteScanWithOpts(context.Background(), finalTemplates, inputProvider, true)
+ return nil
+}
+
+func (n *nucleiExecutor) Close() {
+ if n.executorOpts.FuzzStatsDB != nil {
+ n.executorOpts.FuzzStatsDB.Close()
+ }
+ if n.options.Interactsh != nil {
+ _ = n.options.Interactsh.Close()
+ }
+ if n.executorOpts.InputHelper != nil {
+ _ = n.executorOpts.InputHelper.Close()
+ }
+
+}
diff --git a/internal/server/requests_worker.go b/internal/server/requests_worker.go
new file mode 100644
index 0000000000..e811a005ac
--- /dev/null
+++ b/internal/server/requests_worker.go
@@ -0,0 +1,58 @@
+package server
+
+import (
+ "path"
+
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/internal/server/scope"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
+)
+
+func (s *DASTServer) consumeTaskRequest(req PostRequestsHandlerRequest) {
+ defer s.endpointsInQueue.Add(-1)
+
+ parsedReq, err := types.ParseRawRequestWithURL(req.RawHTTP, req.URL)
+ if err != nil {
+ gologger.Warning().Msgf("Could not parse raw request: %s\n", err)
+ return
+ }
+
+ if parsedReq.URL.Scheme != "http" && parsedReq.URL.Scheme != "https" {
+ gologger.Warning().Msgf("Invalid scheme: %s\n", parsedReq.URL.Scheme)
+ return
+ }
+
+ // Check filenames and don't allow non-interesting files
+ extension := path.Base(parsedReq.URL.Path)
+ if extension != "/" && extension != "" && scope.IsUninterestingPath(extension) {
+ gologger.Warning().Msgf("Uninteresting path: %s\n", parsedReq.URL.Path)
+ return
+ }
+
+ inScope, err := s.scopeManager.Validate(parsedReq.URL.URL)
+ if err != nil {
+ gologger.Warning().Msgf("Could not validate scope: %s\n", err)
+ return
+ }
+ if !inScope {
+ gologger.Warning().Msgf("Request is out of scope: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+ return
+ }
+
+ if s.deduplicator.isDuplicate(parsedReq) {
+ gologger.Warning().Msgf("Duplicate request detected: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+ return
+ }
+
+ gologger.Verbose().Msgf("Fuzzing request: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+
+ s.endpointsBeingTested.Add(1)
+ defer s.endpointsBeingTested.Add(-1)
+
+ // Fuzz the request finally
+ err = s.nucleiExecutor.ExecuteScan(req)
+ if err != nil {
+ gologger.Warning().Msgf("Could not run nuclei: %s\n", err)
+ return
+ }
+}
diff --git a/internal/server/scope/extensions.go b/internal/server/scope/extensions.go
new file mode 100644
index 0000000000..f7e5929189
--- /dev/null
+++ b/internal/server/scope/extensions.go
@@ -0,0 +1,33 @@
+package scope
+
+import "path"
+
+func IsUninterestingPath(uriPath string) bool {
+ extension := path.Ext(uriPath)
+ if _, ok := excludedExtensions[extension]; ok {
+ return true
+ }
+ return false
+}
+
+var excludedExtensions = map[string]struct{}{
+ ".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {}, ".bmp": {}, ".tiff": {}, ".ico": {},
+ ".mp4": {}, ".avi": {}, ".mov": {}, ".wmv": {}, ".flv": {}, ".mkv": {}, ".webm": {},
+ ".mp3": {}, ".wav": {}, ".aac": {}, ".flac": {}, ".ogg": {}, ".wma": {},
+ ".zip": {}, ".rar": {}, ".7z": {}, ".tar": {}, ".gz": {}, ".bz2": {},
+ ".exe": {}, ".bin": {}, ".iso": {}, ".img": {},
+ ".doc": {}, ".docx": {}, ".xls": {}, ".xlsx": {}, ".ppt": {}, ".pptx": {},
+ ".pdf": {}, ".psd": {}, ".ai": {}, ".eps": {}, ".indd": {},
+ ".swf": {}, ".fla": {}, ".css": {}, ".scss": {}, ".less": {},
+ ".js": {}, ".ts": {}, ".jsx": {}, ".tsx": {},
+ ".xml": {}, ".json": {}, ".yaml": {}, ".yml": {},
+ ".csv": {}, ".txt": {}, ".log": {}, ".md": {},
+ ".ttf": {}, ".otf": {}, ".woff": {}, ".woff2": {}, ".eot": {},
+ ".svg": {}, ".svgz": {}, ".webp": {}, ".tif": {},
+ ".mpg": {}, ".mpeg": {}, ".weba": {},
+ ".m4a": {}, ".m4v": {}, ".3gp": {}, ".3g2": {},
+ ".ogv": {}, ".ogm": {}, ".oga": {}, ".ogx": {},
+ ".srt": {}, ".min.js": {}, ".min.css": {}, ".js.map": {},
+ ".min.js.map": {}, ".chunk.css.map": {}, ".hub.js.map": {},
+ ".hub.css.map": {}, ".map": {},
+}
diff --git a/internal/server/scope/scope.go b/internal/server/scope/scope.go
new file mode 100644
index 0000000000..31c74a76de
--- /dev/null
+++ b/internal/server/scope/scope.go
@@ -0,0 +1,77 @@
+// From Katana
+package scope
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+)
+
+// Manager manages scope for crawling process
+type Manager struct {
+ inScope []*regexp.Regexp
+ outOfScope []*regexp.Regexp
+ noScope bool
+}
+
+// NewManager returns a new scope manager for crawling
+func NewManager(inScope, outOfScope []string) (*Manager, error) {
+ manager := &Manager{}
+
+ for _, regex := range inScope {
+ if compiled, err := regexp.Compile(regex); err != nil {
+ return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
+ } else {
+ manager.inScope = append(manager.inScope, compiled)
+ }
+ }
+ for _, regex := range outOfScope {
+ if compiled, err := regexp.Compile(regex); err != nil {
+ return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
+ } else {
+ manager.outOfScope = append(manager.outOfScope, compiled)
+ }
+ }
+ if len(manager.inScope) == 0 && len(manager.outOfScope) == 0 {
+ manager.noScope = true
+ }
+ return manager, nil
+}
+
+// Validate returns true if the URL matches scope rules
+func (m *Manager) Validate(URL *url.URL) (bool, error) {
+ if m.noScope {
+ return true, nil
+ }
+
+ urlStr := URL.String()
+
+ urlValidated, err := m.validateURL(urlStr)
+ if err != nil {
+ return false, err
+ }
+ if urlValidated {
+ return true, nil
+ }
+ return false, nil
+}
+
+func (m *Manager) validateURL(URL string) (bool, error) {
+ for _, item := range m.outOfScope {
+ if item.MatchString(URL) {
+ return false, nil
+ }
+ }
+ if len(m.inScope) == 0 {
+ return true, nil
+ }
+
+ var inScopeMatched bool
+ for _, item := range m.inScope {
+ if item.MatchString(URL) {
+ inScopeMatched = true
+ break
+ }
+ }
+ return inScopeMatched, nil
+}
diff --git a/internal/server/scope/scope_test.go b/internal/server/scope/scope_test.go
new file mode 100644
index 0000000000..d2256363db
--- /dev/null
+++ b/internal/server/scope/scope_test.go
@@ -0,0 +1,26 @@
+package scope
+
+import (
+ "testing"
+
+ urlutil "github.com/projectdiscovery/utils/url"
+ "github.com/stretchr/testify/require"
+)
+
+func TestManagerValidate(t *testing.T) {
+ t.Run("url", func(t *testing.T) {
+ manager, err := NewManager([]string{`example`}, []string{`logout\.php`})
+ require.NoError(t, err, "could not create scope manager")
+
+ parsed, _ := urlutil.Parse("https://test.com/index.php/example")
+ validated, err := manager.Validate(parsed.URL)
+ require.NoError(t, err, "could not validate url")
+ require.True(t, validated, "could not get correct in-scope validation")
+
+ parsed, _ = urlutil.Parse("https://test.com/logout.php")
+ validated, err = manager.Validate(parsed.URL)
+ require.NoError(t, err, "could not validate url")
+ require.False(t, validated, "could not get correct out-scope validation")
+ })
+
+}
diff --git a/internal/server/server.go b/internal/server/server.go
new file mode 100644
index 0000000000..987967a4d4
--- /dev/null
+++ b/internal/server/server.go
@@ -0,0 +1,297 @@
+package server
+
+import (
+ _ "embed"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/alitto/pond"
+ "github.com/labstack/echo/v4"
+ "github.com/labstack/echo/v4/middleware"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/internal/server/scope"
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
+ "github.com/projectdiscovery/utils/env"
+)
+
+// DASTServer is a server that performs execution of fuzzing templates
+// on user input passed to the API.
+type DASTServer struct {
+ echo *echo.Echo
+ options *Options
+ tasksPool *pond.WorkerPool
+ deduplicator *requestDeduplicator
+ scopeManager *scope.Manager
+ startTime time.Time
+
+ // metrics
+ endpointsInQueue atomic.Int64
+ endpointsBeingTested atomic.Int64
+
+ nucleiExecutor *nucleiExecutor
+}
+
+// Options contains the configuration options for the server.
+type Options struct {
+ // Address is the address to bind the server to
+ Address string
+ // Token is the token to use for authentication (optional)
+ Token string
+ // Templates is the list of templates to use for fuzzing
+ Templates []string
+ // Verbose is a flag that controls verbose output
+ Verbose bool
+
+ // Scope fields for fuzzer
+ InScope []string
+ OutScope []string
+
+ OutputWriter output.Writer
+
+ NucleiExecutorOptions *NucleiExecutorOptions
+}
+
+// New creates a new instance of the DAST server.
+func New(options *Options) (*DASTServer, error) {
+ // If the user has specified no templates, use the default ones
+ // for DAST only.
+ if len(options.Templates) == 0 {
+ options.Templates = []string{"dast/"}
+ }
+ // Disable bulk mode and single threaded execution
+ // by auto adjusting in case of default values
+ if options.NucleiExecutorOptions.Options.BulkSize == 25 && options.NucleiExecutorOptions.Options.TemplateThreads == 25 {
+ options.NucleiExecutorOptions.Options.BulkSize = 1
+ options.NucleiExecutorOptions.Options.TemplateThreads = 1
+ }
+ maxWorkers := env.GetEnvOrDefault[int]("FUZZ_MAX_WORKERS", 1)
+ bufferSize := env.GetEnvOrDefault[int]("FUZZ_BUFFER_SIZE", 10000)
+
+ server := &DASTServer{
+ options: options,
+ tasksPool: pond.New(maxWorkers, bufferSize),
+ deduplicator: newRequestDeduplicator(),
+ startTime: time.Now(),
+ }
+ server.setupHandlers(false)
+
+ executor, err := newNucleiExecutor(options.NucleiExecutorOptions)
+ if err != nil {
+ return nil, err
+ }
+ server.nucleiExecutor = executor
+
+ scopeManager, err := scope.NewManager(
+ options.InScope,
+ options.OutScope,
+ )
+ if err != nil {
+ return nil, err
+ }
+ server.scopeManager = scopeManager
+
+ var builder strings.Builder
+ builder.WriteString(fmt.Sprintf("Using %d parallel tasks with %d buffer", maxWorkers, bufferSize))
+ if options.Token != "" {
+ builder.WriteString(" (with token)")
+ }
+ gologger.Info().Msgf("%s", builder.String())
+ gologger.Info().Msgf("Connection URL: %s", server.buildURL("/requests"))
+ gologger.Info().Msgf("Stats UI URL: %s", server.buildURL("/stats"))
+
+ return server, nil
+}
+
+func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
+ server := &DASTServer{
+ nucleiExecutor: &nucleiExecutor{
+ executorOpts: protocols.ExecutorOptions{
+ FuzzStatsDB: fuzzStatsDB,
+ },
+ },
+ }
+ server.setupHandlers(true)
+ gologger.Info().Msgf("Stats UI URL: %s", server.buildURL("/stats"))
+
+ return server, nil
+}
+
+func (s *DASTServer) Close() {
+ s.nucleiExecutor.Close()
+ s.echo.Close()
+ s.tasksPool.StopAndWaitFor(1 * time.Minute)
+}
+
+func (s *DASTServer) buildURL(endpoint string) string {
+ values := make(url.Values)
+ if s.options.Token != "" {
+ values.Set("token", s.options.Token)
+ }
+
+ // Use url.URL struct to safely construct the URL
+ u := &url.URL{
+ Scheme: "http",
+ Host: s.options.Address,
+ Path: endpoint,
+ RawQuery: values.Encode(),
+ }
+ return u.String()
+}
+
+func (s *DASTServer) setupHandlers(onlyStats bool) {
+ e := echo.New()
+ e.Use(middleware.Recover())
+ if s.options.Verbose {
+ cfg := middleware.DefaultLoggerConfig
+ cfg.Skipper = func(c echo.Context) bool {
+ // Skip /stats and /stats.json
+ return c.Request().URL.Path == "/stats" || c.Request().URL.Path == "/stats.json"
+ }
+ e.Use(middleware.LoggerWithConfig(cfg))
+ }
+ e.Use(middleware.CORS())
+
+ if s.options.Token != "" {
+ e.Use(middleware.KeyAuthWithConfig(middleware.KeyAuthConfig{
+ KeyLookup: "query:token",
+ Validator: func(key string, c echo.Context) (bool, error) {
+ return key == s.options.Token, nil
+ },
+ }))
+ }
+
+ e.HideBanner = true
+ // POST /requests - Queue a request for fuzzing
+ if !onlyStats {
+ e.POST("/requests", s.handleRequest)
+ }
+ e.GET("/stats", s.handleStats)
+ e.GET("/stats.json", s.handleStatsJSON)
+
+ s.echo = e
+}
+
+func (s *DASTServer) Start() error {
+ if err := s.echo.Start(s.options.Address); err != nil && err != http.ErrServerClosed {
+ return err
+ }
+ return nil
+}
+
+// PostReuestsHandlerRequest is the request body for the /requests POST handler.
+type PostRequestsHandlerRequest struct {
+ RawHTTP string `json:"raw_http"`
+ URL string `json:"url"`
+}
+
+func (s *DASTServer) handleRequest(c echo.Context) error {
+ var req PostRequestsHandlerRequest
+ if err := c.Bind(&req); err != nil {
+ fmt.Printf("Error binding request: %s\n", err)
+ return err
+ }
+
+ // Validate the request
+ if req.RawHTTP == "" || req.URL == "" {
+ fmt.Printf("Missing required fields\n")
+ return c.JSON(400, map[string]string{"error": "missing required fields"})
+ }
+
+ s.endpointsInQueue.Add(1)
+ s.tasksPool.Submit(func() {
+ s.consumeTaskRequest(req)
+ })
+ return c.NoContent(200)
+}
+
+type StatsResponse struct {
+ DASTServerInfo DASTServerInfo `json:"dast_server_info"`
+ DASTScanStatistics DASTScanStatistics `json:"dast_scan_statistics"`
+ DASTScanStatusStatistics map[string]int64 `json:"dast_scan_status_statistics"`
+ DASTScanSeverityBreakdown map[string]int64 `json:"dast_scan_severity_breakdown"`
+ DASTScanErrorStatistics map[string]int64 `json:"dast_scan_error_statistics"`
+ DASTScanStartTime time.Time `json:"dast_scan_start_time"`
+}
+
+type DASTServerInfo struct {
+ NucleiVersion string `json:"nuclei_version"`
+ NucleiTemplateVersion string `json:"nuclei_template_version"`
+ NucleiDastServerAPI string `json:"nuclei_dast_server_api"`
+ ServerAuthEnabled bool `json:"sever_auth_enabled"`
+}
+
+type DASTScanStatistics struct {
+ EndpointsInQueue int64 `json:"endpoints_in_queue"`
+ EndpointsBeingTested int64 `json:"endpoints_being_tested"`
+ TotalTemplatesLoaded int64 `json:"total_dast_templates_loaded"`
+ TotalTemplatesTested int64 `json:"total_dast_templates_tested"`
+ TotalMatchedResults int64 `json:"total_matched_results"`
+ TotalComponentsTested int64 `json:"total_components_tested"`
+ TotalEndpointsTested int64 `json:"total_endpoints_tested"`
+ TotalFuzzedRequests int64 `json:"total_fuzzed_requests"`
+ TotalErroredRequests int64 `json:"total_errored_requests"`
+}
+
+func (s *DASTServer) getStats() (StatsResponse, error) {
+ cfg := config.DefaultConfig
+
+ resp := StatsResponse{
+ DASTServerInfo: DASTServerInfo{
+ NucleiVersion: config.Version,
+ NucleiTemplateVersion: cfg.TemplateVersion,
+ NucleiDastServerAPI: s.buildURL("/requests"),
+ ServerAuthEnabled: s.options.Token != "",
+ },
+ DASTScanStartTime: s.startTime,
+ DASTScanStatistics: DASTScanStatistics{
+ EndpointsInQueue: s.endpointsInQueue.Load(),
+ EndpointsBeingTested: s.endpointsBeingTested.Load(),
+ TotalTemplatesLoaded: int64(len(s.nucleiExecutor.store.Templates())),
+ },
+ }
+ if s.nucleiExecutor.executorOpts.FuzzStatsDB != nil {
+ fuzzStats := s.nucleiExecutor.executorOpts.FuzzStatsDB.GetStats()
+ resp.DASTScanSeverityBreakdown = fuzzStats.SeverityCounts
+ resp.DASTScanStatusStatistics = fuzzStats.StatusCodes
+ resp.DASTScanStatistics.TotalMatchedResults = fuzzStats.TotalMatchedResults
+ resp.DASTScanStatistics.TotalComponentsTested = fuzzStats.TotalComponentsTested
+ resp.DASTScanStatistics.TotalEndpointsTested = fuzzStats.TotalEndpointsTested
+ resp.DASTScanStatistics.TotalFuzzedRequests = fuzzStats.TotalFuzzedRequests
+ resp.DASTScanStatistics.TotalTemplatesTested = fuzzStats.TotalTemplatesTested
+ resp.DASTScanStatistics.TotalErroredRequests = fuzzStats.TotalErroredRequests
+ resp.DASTScanErrorStatistics = fuzzStats.ErrorGroupedStats
+ }
+ return resp, nil
+}
+
+//go:embed templates/index.html
+var indexTemplate string
+
+func (s *DASTServer) handleStats(c echo.Context) error {
+ stats, err := s.getStats()
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+
+ tmpl, err := template.New("index").Parse(indexTemplate)
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+ return tmpl.Execute(c.Response().Writer, stats)
+}
+
+func (s *DASTServer) handleStatsJSON(c echo.Context) error {
+ resp, err := s.getStats()
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+ return c.JSONPretty(200, resp, " ")
+}
diff --git a/internal/server/templates/index.html b/internal/server/templates/index.html
new file mode 100644
index 0000000000..fa3488e9b4
--- /dev/null
+++ b/internal/server/templates/index.html
@@ -0,0 +1,342 @@
+
+
+
+
+ DAST Scan Report
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Nuclei Version{{.DASTServerInfo.NucleiVersion}}
+
Template Version{{.DASTServerInfo.NucleiTemplateVersion}}
+
DAST Server API{{.DASTServerInfo.NucleiDastServerAPI}}
+
Auth Status{{if .DASTServerInfo.ServerAuthEnabled}}ENABLED{{else}}DISABLED{{end}}
+
+
+
+
+
Total Results{{.DASTScanStatistics.TotalMatchedResults}} findings
+
Endpoints In Queue{{.DASTScanStatistics.EndpointsInQueue}}
+
Currently Testing{{.DASTScanStatistics.EndpointsBeingTested}}
+
Components Tested{{.DASTScanStatistics.TotalComponentsTested}}
+
Endpoints Tested{{.DASTScanStatistics.TotalEndpointsTested}}
+
Templates Loaded{{.DASTScanStatistics.TotalTemplatesLoaded}}
+
Templates Tested{{.DASTScanStatistics.TotalTemplatesTested}}
+
Total Requests{{.DASTScanStatistics.TotalFuzzedRequests}}
+
Total Errors{{.DASTScanStatistics.TotalErroredRequests}}
+
+
+
+
+
+
+
Critical
+
{{index .DASTScanSeverityBreakdown "critical"}} findings
+
+
+
High
+
{{index .DASTScanSeverityBreakdown "high"}} findings
+
+
+
Medium
+
{{index .DASTScanSeverityBreakdown "medium"}} findings
+
+
+
Low
+
{{index .DASTScanSeverityBreakdown "low"}} findings
+
+
+
Info
+
{{index .DASTScanSeverityBreakdown "info"}} findings
+
+
+
+
+
+
+
+
Response Codes
+ {{range $status, $count := .DASTScanStatusStatistics}}
+
{{$status}}{{$count}} times
+ {{end}}
+
+
+
+
+
+ {{range $error, $count := .DASTScanErrorStatistics}}
+
+
{{$error}}
+
{{$count}} times
+
+ {{end}}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pkg/authprovider/authx/cookies_auth.go b/pkg/authprovider/authx/cookies_auth.go
index 7f3e756a71..0b94e85481 100644
--- a/pkg/authprovider/authx/cookies_auth.go
+++ b/pkg/authprovider/authx/cookies_auth.go
@@ -2,6 +2,7 @@ package authx
import (
"net/http"
+ "slices"
"github.com/projectdiscovery/retryablehttp-go"
)
@@ -33,11 +34,27 @@ func (s *CookiesAuthStrategy) Apply(req *http.Request) {
// ApplyOnRR applies the cookies auth strategy to the retryable request
func (s *CookiesAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
+ existingCookies := req.Cookies()
+
+ for _, newCookie := range s.Data.Cookies {
+ for i, existing := range existingCookies {
+ if existing.Name == newCookie.Key {
+ existingCookies = slices.Delete(existingCookies, i, i+1)
+ break
+ }
+ }
+ }
+
+ // Clear and reset remaining cookies
+ req.Header.Del("Cookie")
+ for _, cookie := range existingCookies {
+ req.AddCookie(cookie)
+ }
+ // Add new cookies
for _, cookie := range s.Data.Cookies {
- c := &http.Cookie{
+ req.AddCookie(&http.Cookie{
Name: cookie.Key,
Value: cookie.Value,
- }
- req.AddCookie(c)
+ })
}
}
diff --git a/pkg/authprovider/authx/dynamic.go b/pkg/authprovider/authx/dynamic.go
index 0e210cf5e7..f61fc5d31c 100644
--- a/pkg/authprovider/authx/dynamic.go
+++ b/pkg/authprovider/authx/dynamic.go
@@ -9,6 +9,7 @@ import (
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
errorutil "github.com/projectdiscovery/utils/errors"
+ sliceutil "github.com/projectdiscovery/utils/slice"
)
type LazyFetchSecret func(d *Dynamic) error
@@ -22,7 +23,8 @@ var (
// ex: username and password are dynamic secrets, the actual secret is the token obtained
// after authenticating with the username and password
type Dynamic struct {
- Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
+ *Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
+ Secrets []*Secret `yaml:"secrets"`
TemplatePath string `json:"template" yaml:"template"`
Variables []KV `json:"variables" yaml:"variables"`
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
@@ -33,6 +35,22 @@ type Dynamic struct {
error error `json:"-" yaml:"-"` // error if any
}
+func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
+ var domains []string
+ var domainRegex []string
+ for _, secret := range d.Secrets {
+ domains = append(domains, secret.Domains...)
+ domainRegex = append(domainRegex, secret.DomainsRegex...)
+ }
+ if d.Secret != nil {
+ domains = append(domains, d.Secret.Domains...)
+ domainRegex = append(domainRegex, d.Secret.DomainsRegex...)
+ }
+ uniqueDomains := sliceutil.Dedupe(domains)
+ uniqueDomainRegex := sliceutil.Dedupe(domainRegex)
+ return uniqueDomains, uniqueDomainRegex
+}
+
func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &d); err != nil {
return err
@@ -41,7 +59,7 @@ func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &s); err != nil {
return err
}
- d.Secret = s
+ d.Secret = &s
return nil
}
@@ -54,9 +72,18 @@ func (d *Dynamic) Validate() error {
if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret")
}
- d.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
- if err := d.Secret.Validate(); err != nil {
- return err
+
+ if d.Secret != nil {
+ d.Secret.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
+ if err := d.Secret.Validate(); err != nil {
+ return err
+ }
+ }
+ for _, secret := range d.Secrets {
+ secret.skipCookieParse = true
+ if err := secret.Validate(); err != nil {
+ return err
+ }
}
return nil
}
@@ -74,76 +101,98 @@ func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
return fmt.Errorf("no extracted values found for dynamic secret")
}
- // evaluate headers
- for i, header := range d.Headers {
- if strings.Contains(header.Value, "{{") {
- header.Value = replacer.Replace(header.Value, d.Extracted)
+ if d.Secret != nil {
+ if err := d.applyValuesToSecret(d.Secret); err != nil {
+ return err
}
- if strings.Contains(header.Key, "{{") {
- header.Key = replacer.Replace(header.Key, d.Extracted)
- }
- d.Headers[i] = header
}
- // evaluate cookies
- for i, cookie := range d.Cookies {
- if strings.Contains(cookie.Value, "{{") {
- cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
- }
- if strings.Contains(cookie.Key, "{{") {
- cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
+ for _, secret := range d.Secrets {
+ if err := d.applyValuesToSecret(secret); err != nil {
+ return err
}
- if strings.Contains(cookie.Raw, "{{") {
- cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
- }
- d.Cookies[i] = cookie
}
+ return nil
+ }
+}
- // evaluate query params
- for i, query := range d.Params {
- if strings.Contains(query.Value, "{{") {
- query.Value = replacer.Replace(query.Value, d.Extracted)
- }
- if strings.Contains(query.Key, "{{") {
- query.Key = replacer.Replace(query.Key, d.Extracted)
- }
- d.Params[i] = query
+func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
+ // evaluate headers
+ for i, header := range secret.Headers {
+ if strings.Contains(header.Value, "{{") {
+ header.Value = replacer.Replace(header.Value, d.Extracted)
+ }
+ if strings.Contains(header.Key, "{{") {
+ header.Key = replacer.Replace(header.Key, d.Extracted)
}
+ secret.Headers[i] = header
+ }
- // check username, password and token
- if strings.Contains(d.Username, "{{") {
- d.Username = replacer.Replace(d.Username, d.Extracted)
+ // evaluate cookies
+ for i, cookie := range secret.Cookies {
+ if strings.Contains(cookie.Value, "{{") {
+ cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
}
- if strings.Contains(d.Password, "{{") {
- d.Password = replacer.Replace(d.Password, d.Extracted)
+ if strings.Contains(cookie.Key, "{{") {
+ cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
}
- if strings.Contains(d.Token, "{{") {
- d.Token = replacer.Replace(d.Token, d.Extracted)
+ if strings.Contains(cookie.Raw, "{{") {
+ cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
}
+ secret.Cookies[i] = cookie
+ }
+
+ // evaluate query params
+ for i, query := range secret.Params {
+ if strings.Contains(query.Value, "{{") {
+ query.Value = replacer.Replace(query.Value, d.Extracted)
+ }
+ if strings.Contains(query.Key, "{{") {
+ query.Key = replacer.Replace(query.Key, d.Extracted)
+ }
+ secret.Params[i] = query
+ }
- // now attempt to parse the cookies
- d.skipCookieParse = false
- for i, cookie := range d.Cookies {
- if cookie.Raw != "" {
- if err := cookie.Parse(); err != nil {
- return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
- }
- d.Cookies[i] = cookie
+ // check username, password and token
+ if strings.Contains(secret.Username, "{{") {
+ secret.Username = replacer.Replace(secret.Username, d.Extracted)
+ }
+ if strings.Contains(secret.Password, "{{") {
+ secret.Password = replacer.Replace(secret.Password, d.Extracted)
+ }
+ if strings.Contains(secret.Token, "{{") {
+ secret.Token = replacer.Replace(secret.Token, d.Extracted)
+ }
+
+ // now attempt to parse the cookies
+ secret.skipCookieParse = false
+ for i, cookie := range secret.Cookies {
+ if cookie.Raw != "" {
+ if err := cookie.Parse(); err != nil {
+ return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
}
+ secret.Cookies[i] = cookie
}
- return nil
}
+ return nil
}
-// GetStrategy returns the auth strategy for the dynamic secret
-func (d *Dynamic) GetStrategy() AuthStrategy {
+// GetStrategy returns the auth strategies for the dynamic secret
+func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched {
_ = d.Fetch(true)
}
if d.error != nil {
return nil
}
- return d.Secret.GetStrategy()
+ var strategies []AuthStrategy
+ if d.Secret != nil {
+ strategies = append(strategies, d.Secret.GetStrategy())
+ }
+ for _, secret := range d.Secrets {
+ strategies = append(strategies, secret.GetStrategy())
+ }
+ return strategies
}
// Fetch fetches the dynamic secret
diff --git a/pkg/authprovider/authx/strategy.go b/pkg/authprovider/authx/strategy.go
index 8204083989..54ff8e81c4 100644
--- a/pkg/authprovider/authx/strategy.go
+++ b/pkg/authprovider/authx/strategy.go
@@ -24,16 +24,22 @@ type DynamicAuthStrategy struct {
// Apply applies the strategy to the request
func (d *DynamicAuthStrategy) Apply(req *http.Request) {
- strategy := d.Dynamic.GetStrategy()
- if strategy != nil {
- strategy.Apply(req)
+ strategies := d.Dynamic.GetStrategies()
+ if strategies == nil {
+ return
+ }
+ for _, s := range strategies {
+ if s == nil {
+ continue
+ }
+ s.Apply(req)
}
}
// ApplyOnRR applies the strategy to the retryable request
func (d *DynamicAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
- strategy := d.Dynamic.GetStrategy()
- if strategy != nil {
- strategy.ApplyOnRR(req)
+ strategy := d.Dynamic.GetStrategies()
+ for _, s := range strategy {
+ s.ApplyOnRR(req)
}
}
diff --git a/pkg/authprovider/file.go b/pkg/authprovider/file.go
index 3a32a94fe4..64cfcb8793 100644
--- a/pkg/authprovider/file.go
+++ b/pkg/authprovider/file.go
@@ -85,8 +85,10 @@ func (f *FileAuthProvider) init() {
}
}
for _, dynamic := range f.store.Dynamic {
- if len(dynamic.DomainsRegex) > 0 {
- for _, domain := range dynamic.DomainsRegex {
+ domain, domainsRegex := dynamic.GetDomainAndDomainRegex()
+
+ if len(domainsRegex) > 0 {
+ for _, domain := range domainsRegex {
if f.compiled == nil {
f.compiled = make(map[*regexp.Regexp][]authx.AuthStrategy)
}
@@ -101,7 +103,7 @@ func (f *FileAuthProvider) init() {
}
}
}
- for _, domain := range dynamic.Domains {
+ for _, domain := range domain {
if f.domains == nil {
f.domains = make(map[string][]authx.AuthStrategy)
}
diff --git a/pkg/catalog/loader/loader.go b/pkg/catalog/loader/loader.go
index ad6180da27..cf875d4e6b 100644
--- a/pkg/catalog/loader/loader.go
+++ b/pkg/catalog/loader/loader.go
@@ -530,7 +530,8 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// Skip DAST filter when loading auth templates
if store.ID() != AuthStoreId && store.config.ExecutorOptions.Options.DAST {
// check if the template is a DAST template
- if parsed.IsFuzzing() {
+ // also allow global matchers template to be loaded
+ if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
loadTemplate(parsed)
}
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
diff --git a/pkg/fuzz/analyzers/analyzers.go b/pkg/fuzz/analyzers/analyzers.go
index 8eedb6b71b..6266e8bb01 100644
--- a/pkg/fuzz/analyzers/analyzers.go
+++ b/pkg/fuzz/analyzers/analyzers.go
@@ -81,18 +81,11 @@ func ApplyPayloadTransformations(value string) string {
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-const (
- letterIdxBits = 6 // 6 bits to represent a letter index
- letterIdxMask = 1< 0 && varY > 0 {
+ o.correlation = covXY / (math.Sqrt(varX) * math.Sqrt(varY))
+ } else {
+ o.correlation = 0.0
}
}
@@ -164,8 +210,17 @@ func (o *simpleLinearRegression) Predict(x float64) float64 {
return o.slope*x + o.intercept
}
-func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64,
-) bool {
- return o.correlation > 1.0-correlationErrorRange &&
- math.Abs(expectedSlope-o.slope) < slopeErrorRange
+func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64) bool {
+ if o.count < 2 {
+ return true
+ }
+ // Check if slope is within error range of expected slope
+ // Also consider cases where slope is approximately 2x of expected slope
+ // as this can happen with time-based responses
+ slopeDiff := math.Abs(expectedSlope - o.slope)
+ slope2xDiff := math.Abs(expectedSlope*2 - o.slope)
+ if slopeDiff > slopeErrorRange && slope2xDiff > slopeErrorRange {
+ return false
+ }
+ return o.correlation > 1.0-correlationErrorRange
}
diff --git a/pkg/fuzz/analyzers/time/time_delay_test.go b/pkg/fuzz/analyzers/time/time_delay_test.go
index 8a71243595..91b2ba657a 100644
--- a/pkg/fuzz/analyzers/time/time_delay_test.go
+++ b/pkg/fuzz/analyzers/time/time_delay_test.go
@@ -3,141 +3,498 @@
package time
import (
- "math"
"math/rand"
+ "reflect"
"testing"
"time"
-
- "github.com/stretchr/testify/require"
)
-const (
- correlationErrorRange = float64(0.1)
- slopeErrorRange = float64(0.2)
-)
+// This test suite verifies the timing dependency detection algorithm by testing various scenarios:
+//
+// Test Categories:
+// 1. Perfect Linear Cases
+// - TestPerfectLinear: Basic case with slope=1, no noise
+// - TestPerfectLinearSlopeOne_NoNoise: Similar to above but with different parameters
+// - TestPerfectLinearSlopeTwo_NoNoise: Tests detection of slope=2 relationship
+//
+// 2. Noisy Cases
+// - TestLinearWithNoise: Verifies detection works with moderate noise (±0.2s)
+// - TestNoisyLinear: Similar but with different noise parameters
+// - TestHighNoiseConcealsSlope: Verifies detection fails with extreme noise (±5s)
+//
+// 3. No Correlation Cases
+// - TestNoCorrelation: Basic case where delay has no effect
+// - TestNoCorrelationHighBaseline: High baseline (~15s) masks any delay effect
+// - TestNegativeSlopeScenario: Verifies detection rejects negative correlations
+//
+// 4. Edge Cases
+// - TestMinimalData: Tests behavior with minimal data points (2 requests)
+// - TestLargeNumberOfRequests: Tests stability with many data points (20 requests)
+// - TestChangingBaseline: Tests detection with shifting baseline mid-test
+// - TestHighBaselineLowSlope: Tests detection of subtle correlations (slope=0.85)
+//
+// ZAP Test Cases:
+//
+// 1. Alternating Sequence Tests
+// - TestAlternatingSequences: Verifies correct alternation between high and low delays
+//
+// 2. Non-Injectable Cases
+// - TestNonInjectableQuickFail: Tests quick failure when response time < requested delay
+// - TestSlowNonInjectableCase: Tests early termination with consistently high response times
+// - TestRealWorldNonInjectableCase: Tests behavior with real-world response patterns
+//
+// 3. Error Tolerance Tests
+// - TestSmallErrorDependence: Verifies detection works with small random variations
+//
+// Key Parameters Tested:
+// - requestsLimit: Number of requests to make (2-20)
+// - highSleepTimeSeconds: Maximum delay to test (typically 5s)
+// - correlationErrorRange: Acceptable deviation from perfect correlation (0.05-0.3)
+// - slopeErrorRange: Acceptable deviation from expected slope (0.1-1.5)
+//
+// The test suite uses various mock senders (perfectLinearSender, noCorrelationSender, etc.)
+// to simulate different timing behaviors and verify the detection algorithm works correctly
+// across a wide range of scenarios.
-var rng = rand.New(rand.NewSource(time.Now().UnixNano()))
+// Mock request sender that simulates a perfect linear relationship:
+// Observed delay = baseline + requested_delay
+func perfectLinearSender(baseline float64) func(delay int) (float64, error) {
+ return func(delay int) (float64, error) {
+ // simulate some processing time
+ time.Sleep(10 * time.Millisecond) // just a small artificial sleep to mimic network
+ return baseline + float64(delay), nil
+ }
+}
-func Test_should_generate_alternating_sequences(t *testing.T) {
- var generatedDelays []float64
- reqSender := func(delay int) (float64, error) {
- generatedDelays = append(generatedDelays, float64(delay))
- return float64(delay), nil
+// Mock request sender that simulates no correlation:
+// The response time is random around a certain constant baseline, ignoring requested delay.
+func noCorrelationSender(baseline, noiseAmplitude float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ noise := 0.0
+ if noiseAmplitude > 0 {
+ noise = (rand.Float64()*2 - 1) * noiseAmplitude
+ }
+ return baseline + noise, nil
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.True(t, matched)
- require.EqualValues(t, []float64{15, 1, 15, 1}, generatedDelays)
}
-func Test_should_giveup_non_injectable(t *testing.T) {
- var timesCalled int
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- return 0.5, nil
+// Mock request sender that simulates partial linearity but with some noise.
+func noisyLinearSender(baseline float64) func(delay int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ // Add some noise (±0.2s) to a linear relationship
+ noise := 0.2
+ return baseline + float64(delay) + noise, nil
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.Equal(t, 1, timesCalled)
}
-func Test_should_giveup_slow_non_injectable(t *testing.T) {
- var timesCalled int
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- return 10 + rng.Float64()*0.5, nil
+func TestPerfectLinear(t *testing.T) {
+ // Expect near-perfect correlation and slope ~ 1.0
+ requestsLimit := 6 // 3 pairs: enough data for stable regression
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.1
+ slopeErrRange := 0.2
+ baseline := 5.0
+
+ sender := perfectLinearSender(5.0) // baseline 5s, observed = 5s + requested_delay
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match but got none. Reason: %s", reason)
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.LessOrEqual(t, timesCalled, 3)
}
-func Test_should_giveup_slow_non_injectable_realworld(t *testing.T) {
- var timesCalled int
- var iteration = 0
- counts := []float64{21, 11, 21, 11}
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- iteration++
- return counts[iteration-1], nil
+func TestNoCorrelation(t *testing.T) {
+ // Expect no match because requested delay doesn't influence observed delay
+ requestsLimit := 6
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.1
+ slopeErrRange := 0.5
+ baseline := 8.0
+
+ sender := noCorrelationSender(8.0, 0.1)
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatalf("Expected no match but got one. Reason: %s", reason)
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.LessOrEqual(t, timesCalled, 4)
}
-func Test_should_detect_dependence_with_small_error(t *testing.T) {
- reqSender := func(delay int) (float64, error) {
- return float64(delay) + rng.Float64()*0.5, nil
+func TestNoisyLinear(t *testing.T) {
+ // Even with some noise, it should detect a strong positive correlation if
+ // we allow a slightly bigger margin for slope/correlation.
+ requestsLimit := 10 // More requests to average out noise
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.2 // allow some lower correlation due to noise
+ slopeErrRange := 0.5 // slope may deviate slightly
+ baseline := 2.0
+
+ sender := noisyLinearSender(2.0) // baseline 2s, observed ~ 2s + requested_delay ±0.2
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+
+ // We expect a match since it's still roughly linear. The slope should be close to 1.
+ if !match {
+ t.Fatalf("Expected a match in noisy linear test but got none. Reason: %s", reason)
+ }
+}
+
+func TestMinimalData(t *testing.T) {
+ // With too few requests, correlation might not be stable.
+ // Here, we send only 2 requests (1 pair) and see if the logic handles it gracefully.
+ requestsLimit := 2
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.3
+ slopeErrRange := 0.5
+ baseline := 5.0
+
+ // Perfect linear sender again
+ sender := perfectLinearSender(5.0)
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected match but got none. Reason: %s", reason)
}
- matched, reason, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.True(t, matched)
- require.NotEmpty(t, reason)
}
-func Test_LinearRegression_Numerical_stability(t *testing.T) {
- variables := [][]float64{
- {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {1, 1}, {2, 2}, {2, 2}, {2, 2},
+// Utility functions to generate different behaviors
+
+// linearSender returns a sender that calculates observed delay as:
+// observed = baseline + slope * requested_delay + noise
+func linearSender(baseline, slope, noiseAmplitude float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ noise := 0.0
+ if noiseAmplitude > 0 {
+ noise = (rand.Float64()*2 - 1) * noiseAmplitude // random noise in [-noiseAmplitude, noiseAmplitude]
+ }
+ return baseline + slope*float64(delay) + noise, nil
}
- slope := float64(1)
- correlation := float64(1)
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+// negativeSlopeSender just for completeness - higher delay = less observed time
+func negativeSlopeSender(baseline float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ return baseline - float64(delay)*2.0, nil
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_exact_verify(t *testing.T) {
- variables := [][]float64{
- {1, 1}, {2, 3},
+func TestPerfectLinearSlopeOne_NoNoise(t *testing.T) {
+ baseline := 2.0
+ match, reason, err := checkTimingDependency(
+ 10, // requestsLimit
+ 5, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange (allowing slope between 0.8 and 1.2)
+ baseline,
+ linearSender(baseline, 1.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
}
- slope := float64(2)
- correlation := float64(1)
+ if !match {
+ t.Fatalf("Expected a match for perfect linear slope=1. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestPerfectLinearSlopeTwo_NoNoise(t *testing.T) {
+ baseline := 2.0
+ // slope=2 means observed = baseline + 2*requested_delay
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.1, // correlation must still be good
+ 1.5, // allow slope in range (0.5 to 2.5), we should be close to 2.0 anyway
+ baseline,
+ linearSender(baseline, 2.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match for slope=2. Reason: %s", reason)
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_known_verify(t *testing.T) {
- variables := [][]float64{
- {1, 1.348520581}, {2, 2.524046187}, {3, 3.276944688}, {4, 4.735374498}, {5, 5.150291657},
+func TestLinearWithNoise(t *testing.T) {
+ baseline := 5.0
+ // slope=1 but with noise ±0.2 seconds
+ match, reason, err := checkTimingDependency(
+ 12,
+ 5,
+ 0.2, // correlationErrorRange relaxed to account for noise
+ 0.5, // slopeErrorRange also relaxed
+ baseline,
+ linearSender(baseline, 1.0, 0.2),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
}
- slope := float64(0.981487046)
- correlation := float64(0.979228906)
+ if !match {
+ t.Fatalf("Expected a match for noisy linear data. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestNoCorrelationHighBaseline(t *testing.T) {
+ baseline := 15.0
+ // baseline ~15s, requested delays won't matter
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.1, // correlation should be near zero, so no match expected
+ 0.5,
+ baseline,
+ noCorrelationSender(baseline, 0.1),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if match {
+ t.Fatalf("Expected no match for no correlation scenario. Got: %s", reason)
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_nonlinear_verify(t *testing.T) {
- variables := [][]float64{
- {1, 2}, {2, 4}, {3, 8}, {4, 16}, {5, 32},
+func TestNegativeSlopeScenario(t *testing.T) {
+ baseline := 10.0
+ // Increasing delay decreases observed time
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.2,
+ 0.5,
+ baseline,
+ negativeSlopeSender(baseline),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
}
+ if match {
+ t.Fatalf("Expected no match in negative slope scenario. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestLargeNumberOfRequests(t *testing.T) {
+ baseline := 1.0
+ // 20 requests, slope=1.0, no noise. Should be very stable and produce a very high correlation.
+ match, reason, err := checkTimingDependency(
+ 20,
+ 5,
+ 0.05, // very strict correlation requirement
+ 0.1, // very strict slope range
+ baseline,
+ linearSender(baseline, 1.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a strong match with many requests and perfect linearity. Reason: %s", reason)
}
- require.Less(t, regression.correlation, 0.9)
}
-const float64EqualityThreshold = 1e-8
+func TestHighBaselineLowSlope(t *testing.T) {
+ baseline := 15.0
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.2,
+ 0.2, // expecting slope around 0.5, allow range ~0.4 to 0.6
+ baseline,
+ linearSender(baseline, 0.85, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match for slope=0.5 linear scenario. Reason: %s", reason)
+ }
+}
-func almostEqual(a, b float64) bool {
- return math.Abs(a-b) <= float64EqualityThreshold
+func TestHighNoiseConcealsSlope(t *testing.T) {
+ baseline := 5.0
+ // slope=1, but noise=5 seconds is huge and might conceal the correlation.
+ // With large noise, the test may fail to detect correlation.
+ match, reason, err := checkTimingDependency(
+ 12,
+ 5,
+ 0.1, // still strict
+ 0.2, // still strict
+ baseline,
+ linearSender(baseline, 1.0, 5.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ // Expect no match because the noise level is too high to establish a reliable correlation.
+ if match {
+ t.Fatalf("Expected no match due to extreme noise. Reason: %s", reason)
+ }
+}
+
+func TestAlternatingSequences(t *testing.T) {
+ baseline := 0.0
+ var generatedDelays []float64
+ reqSender := func(delay int) (float64, error) {
+ generatedDelays = append(generatedDelays, float64(delay))
+ return float64(delay), nil
+ }
+ match, reason, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match but got none. Reason: %s", reason)
+ }
+ // Verify alternating sequence of delays
+ expectedDelays := []float64{15, 3, 15, 3}
+ if !reflect.DeepEqual(generatedDelays, expectedDelays) {
+ t.Fatalf("Expected delays %v but got %v", expectedDelays, generatedDelays)
+ }
+}
+
+func TestNonInjectableQuickFail(t *testing.T) {
+ baseline := 0.5
+ var timesCalled int
+ reqSender := func(delay int) (float64, error) {
+ timesCalled++
+ return 0.5, nil // Return value less than delay
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for non-injectable case")
+ }
+ if timesCalled != 1 {
+ t.Fatalf("Expected quick fail after 1 call, got %d calls", timesCalled)
+ }
+}
+
+func TestSlowNonInjectableCase(t *testing.T) {
+ baseline := 10.0
+ rng := rand.New(rand.NewSource(time.Now().UnixNano()))
+ var timesCalled int
+ reqSender := func(delay int) (float64, error) {
+ timesCalled++
+ return 10 + rng.Float64()*0.5, nil
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for slow non-injectable case")
+ }
+ if timesCalled > 3 {
+ t.Fatalf("Expected early termination (≤3 calls), got %d calls", timesCalled)
+ }
+}
+
+func TestRealWorldNonInjectableCase(t *testing.T) {
+ baseline := 0.0
+ var iteration int
+ counts := []float64{11, 21, 11, 21, 11}
+ reqSender := func(delay int) (float64, error) {
+ iteration++
+ return counts[iteration-1], nil
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for real-world non-injectable case")
+ }
+ if iteration > 4 {
+ t.Fatalf("Expected ≤4 iterations, got %d", iteration)
+ }
+}
+
+func TestSmallErrorDependence(t *testing.T) {
+ baseline := 0.0
+ rng := rand.New(rand.NewSource(time.Now().UnixNano()))
+ reqSender := func(delay int) (float64, error) {
+ return float64(delay) + rng.Float64()*0.5, nil
+ }
+ match, reason, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected match for small error case. Reason: %s", reason)
+ }
}
diff --git a/pkg/fuzz/component/component.go b/pkg/fuzz/component/component.go
index a15ac2856a..c3500048b1 100644
--- a/pkg/fuzz/component/component.go
+++ b/pkg/fuzz/component/component.go
@@ -67,8 +67,8 @@ const (
var Components = []string{
RequestBodyComponent,
RequestQueryComponent,
- RequestPathComponent,
RequestHeaderComponent,
+ RequestPathComponent,
RequestCookieComponent,
}
diff --git a/pkg/fuzz/component/cookie.go b/pkg/fuzz/component/cookie.go
index 77667c7479..25f29e794a 100644
--- a/pkg/fuzz/component/cookie.go
+++ b/pkg/fuzz/component/cookie.go
@@ -52,10 +52,6 @@ func (c *Cookie) Parse(req *retryablehttp.Request) (bool, error) {
// Iterate iterates through the component
func (c *Cookie) Iterate(callback func(key string, value interface{}) error) (err error) {
c.value.parsed.Iterate(func(key string, value any) bool {
- // Skip ignored cookies
- if _, ok := defaultIgnoredCookieKeys[key]; ok {
- return ok
- }
if errx := callback(key, value); errx != nil {
err = errx
return false
@@ -85,6 +81,7 @@ func (c *Cookie) Delete(key string) error {
// Rebuild returns a new request with the
// component rebuilt
func (c *Cookie) Rebuild() (*retryablehttp.Request, error) {
+ // TODO: Fix cookie duplication with auth-file
cloned := c.req.Clone(context.Background())
cloned.Header.Del("Cookie")
@@ -106,47 +103,3 @@ func (c *Cookie) Clone() Component {
req: c.req.Clone(context.Background()),
}
}
-
-// A list of cookies that are essential to the request and
-// must not be fuzzed.
-var defaultIgnoredCookieKeys = map[string]struct{}{
- "awsELB": {},
- "AWSALB": {},
- "AWSALBCORS": {},
- "__utma": {},
- "__utmb": {},
- "__utmc": {},
- "__utmt": {},
- "__utmz": {},
- "_ga": {},
- "_gat": {},
- "_gid": {},
- "_gcl_au": {},
- "_fbp": {},
- "fr": {},
- "__hstc": {},
- "hubspotutk": {},
- "__hssc": {},
- "__hssrc": {},
- "mp_mixpanel__c": {},
- "JSESSIONID": {},
- "NREUM": {},
- "_pk_id": {},
- "_pk_ref": {},
- "_pk_ses": {},
- "_pk_cvar": {},
- "_pk_hsr": {},
- "_hjIncludedInSample": {},
- "__cfduid": {},
- "cf_use_ob": {},
- "cf_ob_info": {},
- "intercom-session": {},
- "optimizelyEndUserId": {},
- "optimizelySegments": {},
- "optimizelyBuckets": {},
- "optimizelyPendingLogEvents": {},
- "YSC": {},
- "VISITOR_INFO1_LIVE": {},
- "PREF": {},
- "GPS": {},
-}
diff --git a/pkg/fuzz/execute.go b/pkg/fuzz/execute.go
index f97f3149ba..86a96cee95 100644
--- a/pkg/fuzz/execute.go
+++ b/pkg/fuzz/execute.go
@@ -10,6 +10,7 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/component"
+ fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions"
@@ -122,6 +123,18 @@ func (rule *Rule) Execute(input *ExecuteRuleInput) (err error) {
return nil
})
}
+
+ if rule.options.FuzzStatsDB != nil {
+ _ = component.Iterate(func(key string, value interface{}) error {
+ rule.options.FuzzStatsDB.RecordComponentEvent(fuzzStats.ComponentEvent{
+ URL: input.Input.MetaInput.Target(),
+ ComponentType: componentName,
+ ComponentName: fmt.Sprintf("%v", value),
+ })
+ return nil
+ })
+ }
+
finalComponentList = append(finalComponentList, component)
}
if len(displayDebugFuzzPoints) > 0 {
diff --git a/pkg/fuzz/stats/db.go b/pkg/fuzz/stats/db.go
new file mode 100644
index 0000000000..d5caf9a75d
--- /dev/null
+++ b/pkg/fuzz/stats/db.go
@@ -0,0 +1,15 @@
+package stats
+
+import (
+ _ "embed"
+
+ _ "github.com/mattn/go-sqlite3"
+)
+
+type StatsDatabase interface {
+ Close()
+
+ InsertComponent(event ComponentEvent) error
+ InsertMatchedRecord(event FuzzingEvent) error
+ InsertError(event ErrorEvent) error
+}
diff --git a/pkg/fuzz/stats/db_test.go b/pkg/fuzz/stats/db_test.go
new file mode 100644
index 0000000000..e8a5c1e313
--- /dev/null
+++ b/pkg/fuzz/stats/db_test.go
@@ -0,0 +1,24 @@
+package stats
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_NewStatsDatabase(t *testing.T) {
+ db, err := NewSimpleStats()
+ require.NoError(t, err)
+
+ err = db.InsertMatchedRecord(FuzzingEvent{
+ URL: "http://localhost:8080/login",
+ TemplateID: "apache-struts2-001",
+ ComponentType: "path",
+ ComponentName: "/login",
+ PayloadSent: "/login'\"><",
+ StatusCode: 401,
+ })
+ require.NoError(t, err)
+
+ //os.Remove("test.stats.db")
+}
diff --git a/pkg/fuzz/stats/simple.go b/pkg/fuzz/stats/simple.go
new file mode 100644
index 0000000000..4a93aaaa42
--- /dev/null
+++ b/pkg/fuzz/stats/simple.go
@@ -0,0 +1,164 @@
+package stats
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+ "sync/atomic"
+)
+
+type simpleStats struct {
+ totalComponentsTested atomic.Int64
+ totalEndpointsTested atomic.Int64
+ totalFuzzedRequests atomic.Int64
+ totalMatchedResults atomic.Int64
+ totalTemplatesTested atomic.Int64
+ totalErroredRequests atomic.Int64
+
+ statusCodes sync.Map
+ severityCounts sync.Map
+
+ componentsUniqueMap sync.Map
+ endpointsUniqueMap sync.Map
+ templatesUniqueMap sync.Map
+ errorGroupedStats sync.Map
+}
+
+func NewSimpleStats() (*simpleStats, error) {
+ return &simpleStats{
+ totalComponentsTested: atomic.Int64{},
+ totalEndpointsTested: atomic.Int64{},
+ totalMatchedResults: atomic.Int64{},
+ totalFuzzedRequests: atomic.Int64{},
+ totalTemplatesTested: atomic.Int64{},
+ totalErroredRequests: atomic.Int64{},
+ statusCodes: sync.Map{},
+ severityCounts: sync.Map{},
+ componentsUniqueMap: sync.Map{},
+ endpointsUniqueMap: sync.Map{},
+ templatesUniqueMap: sync.Map{},
+ errorGroupedStats: sync.Map{},
+ }, nil
+}
+
+func (s *simpleStats) Close() {}
+
+func (s *simpleStats) InsertComponent(event ComponentEvent) error {
+ componentKey := fmt.Sprintf("%s_%s", event.ComponentName, event.ComponentType)
+ if _, ok := s.componentsUniqueMap.Load(componentKey); !ok {
+ s.componentsUniqueMap.Store(componentKey, true)
+ s.totalComponentsTested.Add(1)
+ }
+
+ parsedURL, err := url.Parse(event.URL)
+ if err != nil {
+ return err
+ }
+
+ endpointsKey := fmt.Sprintf("%s_%s", event.siteName, parsedURL.Path)
+ if _, ok := s.endpointsUniqueMap.Load(endpointsKey); !ok {
+ s.endpointsUniqueMap.Store(endpointsKey, true)
+ s.totalEndpointsTested.Add(1)
+ }
+
+ return nil
+}
+
+func (s *simpleStats) InsertMatchedRecord(event FuzzingEvent) error {
+ s.totalFuzzedRequests.Add(1)
+
+ s.incrementStatusCode(event.StatusCode)
+ if event.Matched {
+ s.totalMatchedResults.Add(1)
+
+ s.incrementSeverityCount(event.Severity)
+ }
+
+ if _, ok := s.templatesUniqueMap.Load(event.TemplateID); !ok {
+ s.templatesUniqueMap.Store(event.TemplateID, true)
+ s.totalTemplatesTested.Add(1)
+ }
+ return nil
+}
+
+func (s *simpleStats) InsertError(event ErrorEvent) error {
+ s.totalErroredRequests.Add(1)
+
+ value, _ := s.errorGroupedStats.LoadOrStore(event.Error, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+ return nil
+}
+
+type SimpleStatsResponse struct {
+ TotalMatchedResults int64
+ TotalComponentsTested int64
+ TotalEndpointsTested int64
+ TotalFuzzedRequests int64
+ TotalTemplatesTested int64
+ TotalErroredRequests int64
+ StatusCodes map[string]int64
+ SeverityCounts map[string]int64
+ ErrorGroupedStats map[string]int64
+}
+
+func (s *simpleStats) GetStatistics() SimpleStatsResponse {
+ statusStats := make(map[string]int64)
+ s.statusCodes.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ statusStats[formatStatusCode(key.(int))] = count.Load()
+ }
+ return true
+ })
+
+ severityStats := make(map[string]int64)
+ s.severityCounts.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ severityStats[key.(string)] = count.Load()
+ }
+ return true
+ })
+
+ errorStats := make(map[string]int64)
+ s.errorGroupedStats.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ errorStats[key.(string)] = count.Load()
+ }
+ return true
+ })
+
+ return SimpleStatsResponse{
+ TotalMatchedResults: s.totalMatchedResults.Load(),
+ StatusCodes: statusStats,
+ SeverityCounts: severityStats,
+ TotalComponentsTested: s.totalComponentsTested.Load(),
+ TotalEndpointsTested: s.totalEndpointsTested.Load(),
+ TotalFuzzedRequests: s.totalFuzzedRequests.Load(),
+ TotalTemplatesTested: s.totalTemplatesTested.Load(),
+ TotalErroredRequests: s.totalErroredRequests.Load(),
+ ErrorGroupedStats: errorStats,
+ }
+}
+
+func (s *simpleStats) incrementStatusCode(statusCode int) {
+ value, _ := s.statusCodes.LoadOrStore(statusCode, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+}
+
+func (s *simpleStats) incrementSeverityCount(severity string) {
+ value, _ := s.severityCounts.LoadOrStore(severity, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+}
+
+func formatStatusCode(code int) string {
+ escapedText := strings.ToTitle(strings.ReplaceAll(http.StatusText(code), " ", "_"))
+ formatted := fmt.Sprintf("%d_%s", code, escapedText)
+ return formatted
+}
diff --git a/pkg/fuzz/stats/stats.go b/pkg/fuzz/stats/stats.go
new file mode 100644
index 0000000000..87ed5c379a
--- /dev/null
+++ b/pkg/fuzz/stats/stats.go
@@ -0,0 +1,106 @@
+// Package stats implements a statistics recording module for
+// nuclei fuzzing.
+package stats
+
+import (
+ "fmt"
+ "log"
+ "net/url"
+
+ "github.com/pkg/errors"
+)
+
+// Tracker is a stats tracker module for fuzzing server
+type Tracker struct {
+ database *simpleStats
+}
+
+// NewTracker creates a new tracker instance
+func NewTracker() (*Tracker, error) {
+ db, err := NewSimpleStats()
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create new tracker")
+ }
+
+ tracker := &Tracker{
+ database: db,
+ }
+ return tracker, nil
+}
+
+func (t *Tracker) GetStats() SimpleStatsResponse {
+ return t.database.GetStatistics()
+}
+
+// Close closes the tracker
+func (t *Tracker) Close() {
+ t.database.Close()
+}
+
+// FuzzingEvent is a fuzzing event
+type FuzzingEvent struct {
+ URL string
+ ComponentType string
+ ComponentName string
+ TemplateID string
+ PayloadSent string
+ StatusCode int
+ Matched bool
+ RawRequest string
+ RawResponse string
+ Severity string
+
+ siteName string
+}
+
+func (t *Tracker) RecordResultEvent(event FuzzingEvent) {
+ event.siteName = getCorrectSiteName(event.URL)
+ if err := t.database.InsertMatchedRecord(event); err != nil {
+ log.Printf("could not insert matched record: %s", err)
+ }
+}
+
+type ComponentEvent struct {
+ URL string
+ ComponentType string
+ ComponentName string
+
+ siteName string
+}
+
+func (t *Tracker) RecordComponentEvent(event ComponentEvent) {
+ event.siteName = getCorrectSiteName(event.URL)
+ if err := t.database.InsertComponent(event); err != nil {
+ log.Printf("could not insert component record: %s", err)
+ }
+}
+
+type ErrorEvent struct {
+ TemplateID string
+ URL string
+ Error string
+}
+
+func (t *Tracker) RecordErrorEvent(event ErrorEvent) {
+ if err := t.database.InsertError(event); err != nil {
+ log.Printf("could not insert error record: %s", err)
+ }
+}
+
+func getCorrectSiteName(originalURL string) string {
+ parsed, err := url.Parse(originalURL)
+ if err != nil {
+ return ""
+ }
+
+ // Site is the host:port combo
+ siteName := parsed.Host
+ if parsed.Port() == "" {
+ if parsed.Scheme == "https" {
+ siteName = fmt.Sprintf("%s:443", siteName)
+ } else if parsed.Scheme == "http" {
+ siteName = fmt.Sprintf("%s:80", siteName)
+ }
+ }
+ return siteName
+}
diff --git a/pkg/input/formats/burp/burp.go b/pkg/input/formats/burp/burp.go
index 6ad5f548b5..9b2a362dfe 100644
--- a/pkg/input/formats/burp/burp.go
+++ b/pkg/input/formats/burp/burp.go
@@ -2,7 +2,7 @@ package burp
import (
"encoding/base64"
- "os"
+ "io"
"strings"
"github.com/pkg/errors"
@@ -35,14 +35,8 @@ func (j *BurpFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *BurpFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open data file")
- }
- defer file.Close()
-
- items, err := burpxml.Parse(file, true)
+func (j *BurpFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ items, err := burpxml.Parse(input, true)
if err != nil {
return errors.Wrap(err, "could not decode burp xml schema")
}
diff --git a/pkg/input/formats/burp/burp_test.go b/pkg/input/formats/burp/burp_test.go
index 330218a9e5..97e80c534f 100644
--- a/pkg/input/formats/burp/burp_test.go
+++ b/pkg/input/formats/burp/burp_test.go
@@ -1,6 +1,7 @@
package burp
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -14,10 +15,14 @@ func TestBurpParse(t *testing.T) {
var gotMethodsToURLs []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/formats.go b/pkg/input/formats/formats.go
index af2b4569c6..03c65d3fea 100644
--- a/pkg/input/formats/formats.go
+++ b/pkg/input/formats/formats.go
@@ -2,6 +2,7 @@ package formats
import (
"errors"
+ "io"
"os"
"strings"
@@ -35,7 +36,7 @@ type Format interface {
Name() string
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
- Parse(input string, resultsCb ParseReqRespCallback) error
+ Parse(input io.Reader, resultsCb ParseReqRespCallback, filePath string) error
// SetOptions sets the options for the input format
SetOptions(options InputFormatOptions)
}
diff --git a/pkg/input/formats/json/json.go b/pkg/input/formats/json/json.go
index 69e628c684..38c2117fcb 100644
--- a/pkg/input/formats/json/json.go
+++ b/pkg/input/formats/json/json.go
@@ -3,7 +3,6 @@ package json
import (
"encoding/json"
"io"
- "os"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
@@ -46,14 +45,8 @@ func (j *JSONFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *JSONFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open json file")
- }
- defer file.Close()
-
- decoder := json.NewDecoder(file)
+func (j *JSONFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ decoder := json.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)
diff --git a/pkg/input/formats/json/json_test.go b/pkg/input/formats/json/json_test.go
index b72bf4c197..a6734f083e 100644
--- a/pkg/input/formats/json/json_test.go
+++ b/pkg/input/formats/json/json_test.go
@@ -1,6 +1,7 @@
package json
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -41,11 +42,15 @@ func TestJSONFormatterParse(t *testing.T) {
proxifyInputFile := "../testdata/ginandjuice.proxify.json"
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
var urls []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/openapi/openapi.go b/pkg/input/formats/openapi/openapi.go
index afbe379fd2..c2086636b4 100644
--- a/pkg/input/formats/openapi/openapi.go
+++ b/pkg/input/formats/openapi/openapi.go
@@ -1,6 +1,8 @@
package openapi
import (
+ "io"
+
"github.com/getkin/kin-openapi/openapi3"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
@@ -29,9 +31,9 @@ func (j *OpenAPIFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *OpenAPIFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
+func (j *OpenAPIFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
loader := openapi3.NewLoader()
- schema, err := loader.LoadFromFile(input)
+ schema, err := loader.LoadFromIoReader(input)
if err != nil {
return errors.Wrap(err, "could not decode openapi 3.0 schema")
}
diff --git a/pkg/input/formats/openapi/openapi_test.go b/pkg/input/formats/openapi/openapi_test.go
index f48385a808..c202bdcbee 100644
--- a/pkg/input/formats/openapi/openapi_test.go
+++ b/pkg/input/formats/openapi/openapi_test.go
@@ -1,6 +1,7 @@
package openapi
import (
+ "os"
"strings"
"testing"
@@ -41,11 +42,15 @@ func TestOpenAPIParser(t *testing.T) {
gotMethodsToURLs := make(map[string][]string)
- err := format.Parse(proxifyInputFile, func(rr *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(rr *types.RequestResponse) bool {
gotMethodsToURLs[rr.Request.Method] = append(gotMethodsToURLs[rr.Request.Method],
strings.Replace(rr.URL.String(), baseURL, "{{baseUrl}}", 1))
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/swagger/swagger.go b/pkg/input/formats/swagger/swagger.go
index 30a7564ecc..8310b16902 100644
--- a/pkg/input/formats/swagger/swagger.go
+++ b/pkg/input/formats/swagger/swagger.go
@@ -3,15 +3,14 @@ package swagger
import (
"encoding/json"
"io"
- "os"
"path"
"github.com/getkin/kin-openapi/openapi2"
"github.com/getkin/kin-openapi/openapi3"
+ "github.com/invopop/yaml"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats/openapi"
- "github.com/invopop/yaml"
"github.com/getkin/kin-openapi/openapi2conv"
)
@@ -39,24 +38,19 @@ func (j *SwaggerFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *SwaggerFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open data file")
- }
- defer file.Close()
-
+func (j *SwaggerFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
schemav2 := &openapi2.T{}
- ext := path.Ext(input)
-
+ ext := path.Ext(filePath)
+ var err error
if ext == ".yaml" || ext == ".yml" {
- data, err_data := io.ReadAll(file)
- if err_data != nil {
+ var data []byte
+ data, err = io.ReadAll(input)
+ if err != nil {
return errors.Wrap(err, "could not read data file")
}
err = yaml.Unmarshal(data, schemav2)
} else {
- err = json.NewDecoder(file).Decode(schemav2)
+ err = json.NewDecoder(input).Decode(schemav2)
}
if err != nil {
return errors.Wrap(err, "could not decode openapi 2.0 schema")
diff --git a/pkg/input/formats/swagger/swagger_test.go b/pkg/input/formats/swagger/swagger_test.go
index 065ae78f63..caed82a13b 100644
--- a/pkg/input/formats/swagger/swagger_test.go
+++ b/pkg/input/formats/swagger/swagger_test.go
@@ -1,6 +1,7 @@
package swagger
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -14,10 +15,14 @@ func TestSwaggerAPIParser(t *testing.T) {
var gotMethodsToURLs []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/yaml/multidoc.go b/pkg/input/formats/yaml/multidoc.go
index dc258408c1..6d75e0334a 100644
--- a/pkg/input/formats/yaml/multidoc.go
+++ b/pkg/input/formats/yaml/multidoc.go
@@ -2,7 +2,6 @@ package yaml
import (
"io"
- "os"
"strings"
"github.com/pkg/errors"
@@ -46,14 +45,8 @@ func (j *YamlMultiDocFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *YamlMultiDocFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open json file")
- }
- defer file.Close()
-
- decoder := YamlUtil.NewDecoder(file)
+func (j *YamlMultiDocFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ decoder := YamlUtil.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)
diff --git a/pkg/input/formats/yaml/multidoc_test.go b/pkg/input/formats/yaml/multidoc_test.go
index 6275eae593..0b91e774a3 100644
--- a/pkg/input/formats/yaml/multidoc_test.go
+++ b/pkg/input/formats/yaml/multidoc_test.go
@@ -1,6 +1,7 @@
package yaml
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -17,11 +18,15 @@ func TestYamlFormatterParse(t *testing.T) {
"https://ginandjuice.shop/users/3",
}
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
var urls []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
- })
+ }, proxifyInputFile)
require.Nilf(t, err, "error parsing yaml file: %v", err)
require.Len(t, urls, len(expectedUrls), "invalid number of urls")
require.ElementsMatch(t, urls, expectedUrls, "invalid urls")
diff --git a/pkg/input/provider/http/multiformat.go b/pkg/input/provider/http/multiformat.go
index d58970fec5..a534879c17 100644
--- a/pkg/input/provider/http/multiformat.go
+++ b/pkg/input/provider/http/multiformat.go
@@ -1,6 +1,9 @@
package http
import (
+ "bytes"
+ "io"
+ "os"
"strings"
"github.com/pkg/errors"
@@ -23,17 +26,25 @@ type HttpMultiFormatOptions struct {
InputFile string
// InputMode is the mode of input
InputMode string
+
+ // optional input reader
+ InputContents string
}
// HttpInputProvider implements an input provider for nuclei that loads
// inputs from multiple formats like burp, openapi, postman,proxify, etc.
type HttpInputProvider struct {
format formats.Format
+ inputData []byte
inputFile string
count int64
}
// NewHttpInputProvider creates a new input provider for nuclei from a file
+// or an input string
+//
+// The first preference is given to input file if provided
+// otherwise it will use the input string
func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, error) {
var format formats.Format
for _, provider := range providersList {
@@ -48,14 +59,40 @@ func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, err
// Do a first pass over the input to identify any errors
// and get the count of the input file as well
count := int64(0)
- parseErr := format.Parse(opts.InputFile, func(request *types.RequestResponse) bool {
+ var inputFile *os.File
+ var inputReader io.Reader
+ if opts.InputFile != "" {
+ file, err := os.Open(opts.InputFile)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not open input file")
+ }
+ inputFile = file
+ inputReader = file
+ } else {
+ inputReader = strings.NewReader(opts.InputContents)
+ }
+ defer func() {
+ if inputFile != nil {
+ inputFile.Close()
+ }
+ }()
+
+ data, err := io.ReadAll(inputReader)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not read input file")
+ }
+ if len(data) == 0 {
+ return nil, errors.New("input file is empty")
+ }
+
+ parseErr := format.Parse(bytes.NewReader(data), func(request *types.RequestResponse) bool {
count++
return false
- })
+ }, opts.InputFile)
if parseErr != nil {
return nil, errors.Wrap(parseErr, "could not parse input file")
}
- return &HttpInputProvider{format: format, inputFile: opts.InputFile, count: count}, nil
+ return &HttpInputProvider{format: format, inputData: data, inputFile: opts.InputFile, count: count}, nil
}
// Count returns the number of items for input provider
@@ -65,12 +102,12 @@ func (i *HttpInputProvider) Count() int64 {
// Iterate over all inputs in order
func (i *HttpInputProvider) Iterate(callback func(value *contextargs.MetaInput) bool) {
- err := i.format.Parse(i.inputFile, func(request *types.RequestResponse) bool {
+ err := i.format.Parse(bytes.NewReader(i.inputData), func(request *types.RequestResponse) bool {
metaInput := contextargs.NewMetaInput()
metaInput.ReqResp = request
metaInput.Input = request.URL.String()
return callback(metaInput)
- })
+ }, i.inputFile)
if err != nil {
gologger.Warning().Msgf("Could not parse input file while iterating: %s\n", err)
}
diff --git a/pkg/output/output.go b/pkg/output/output.go
index 84201c0d73..63c761b077 100644
--- a/pkg/output/output.go
+++ b/pkg/output/output.go
@@ -73,6 +73,10 @@ type StandardWriter struct {
DisableStdout bool
AddNewLinesOutputFile bool // by default this is only done for stdout
KeysToRedact []string
+
+ // JSONLogRequestHook is a hook that can be used to log request/response
+ // when using custom server code with output
+ JSONLogRequestHook func(*JSONLogRequest)
}
var decolorizerRegex = regexp.MustCompile(`\x1B\[[0-9;]*[a-zA-Z]`)
@@ -348,7 +352,7 @@ type JSONLogRequest struct {
// Request writes a log the requests trace log
func (w *StandardWriter) Request(templatePath, input, requestType string, requestErr error) {
- if w.traceFile == nil && w.errorFile == nil {
+ if w.traceFile == nil && w.errorFile == nil && w.JSONLogRequestHook == nil {
return
}
request := &JSONLogRequest{
@@ -397,6 +401,11 @@ func (w *StandardWriter) Request(templatePath, input, requestType string, reques
if val := errkit.GetAttrValue(requestErr, "address"); val.Any() != nil {
request.Address = val.String()
}
+
+ if w.JSONLogRequestHook != nil {
+ w.JSONLogRequestHook(request)
+ }
+
data, err := jsoniter.Marshal(request)
if err != nil {
return
diff --git a/pkg/protocols/http/http.go b/pkg/protocols/http/http.go
index 2243cab9e9..31f85bf44a 100644
--- a/pkg/protocols/http/http.go
+++ b/pkg/protocols/http/http.go
@@ -320,8 +320,8 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error {
timeoutVal = 5
}
- // Add 3x buffer to the timeout
- customTimeout = int(math.Ceil(float64(timeoutVal) * 3))
+ // Add 5x buffer to the timeout
+ customTimeout = int(math.Ceil(float64(timeoutVal) * 5))
}
if customTimeout > 0 {
connectionConfiguration.Connection.CustomMaxTimeout = time.Duration(customTimeout) * time.Second
diff --git a/pkg/protocols/http/httpclientpool/clientpool.go b/pkg/protocols/http/httpclientpool/clientpool.go
index 042ee7ac0e..75db806178 100644
--- a/pkg/protocols/http/httpclientpool/clientpool.go
+++ b/pkg/protocols/http/httpclientpool/clientpool.go
@@ -111,6 +111,7 @@ func (c *Configuration) Clone() *Configuration {
if c.Connection != nil {
cloneConnection := &ConnectionConfiguration{
DisableKeepAlive: c.Connection.DisableKeepAlive,
+ CustomMaxTimeout: c.Connection.CustomMaxTimeout,
}
if c.Connection.HasCookieJar() {
cookiejar := *c.Connection.GetCookieJar()
diff --git a/pkg/protocols/http/request.go b/pkg/protocols/http/request.go
index 4ce9e57f55..ff616ed87b 100644
--- a/pkg/protocols/http/request.go
+++ b/pkg/protocols/http/request.go
@@ -20,6 +20,7 @@ import (
"github.com/projectdiscovery/fastdialer/fastdialer"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/analyzers"
+ fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/operators"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
@@ -1022,6 +1023,21 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ
callback(event)
+ if request.options.FuzzStatsDB != nil && generatedRequest.fuzzGeneratedRequest.Request != nil {
+ request.options.FuzzStatsDB.RecordResultEvent(fuzzStats.FuzzingEvent{
+ URL: input.MetaInput.Target(),
+ TemplateID: request.options.TemplateID,
+ ComponentType: generatedRequest.fuzzGeneratedRequest.Component.Name(),
+ ComponentName: generatedRequest.fuzzGeneratedRequest.Parameter,
+ PayloadSent: generatedRequest.fuzzGeneratedRequest.Value,
+ StatusCode: respChain.Response().StatusCode,
+ Matched: event.HasResults(),
+ RawRequest: string(dumpedRequest),
+ RawResponse: respChain.FullResponse().String(),
+ Severity: request.options.TemplateInfo.SeverityHolder.Severity.String(),
+ })
+ }
+
// Skip further responses if we have stop-at-first-match and a match
if (request.options.Options.StopAtFirstMatch || request.options.StopAtFirstMatch || request.StopAtFirstMatch) && event.HasResults() {
return nil
diff --git a/pkg/protocols/protocols.go b/pkg/protocols/protocols.go
index 9ead70321d..7b7f71d485 100644
--- a/pkg/protocols/protocols.go
+++ b/pkg/protocols/protocols.go
@@ -14,6 +14,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/catalog"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
"github.com/projectdiscovery/nuclei/v3/pkg/js/compiler"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
@@ -99,6 +100,8 @@ type ExecutorOptions struct {
InputHelper *input.Helper
// FuzzParamsFrequency is a cache for parameter frequency
FuzzParamsFrequency *frequency.Tracker
+ // FuzzStatsDB is a database for fuzzing stats
+ FuzzStatsDB *stats.Tracker
Operators []*operators.Operators // only used by offlinehttp module
diff --git a/pkg/types/types.go b/pkg/types/types.go
index aa43683ecd..413dd24e47 100644
--- a/pkg/types/types.go
+++ b/pkg/types/types.go
@@ -419,6 +419,18 @@ type Options struct {
ProbeConcurrency int
// Dast only runs DAST templates
DAST bool
+ // DASTServer is the flag to start nuclei as a DAST server
+ DASTServer bool
+ // DASTServerToken is the token optional for the dast server
+ DASTServerToken string
+ // DASTServerAddress is the address for the dast server
+ DASTServerAddress string
+ // DASTReport enables dast report server & final report generation
+ DASTReport bool
+ // Scope contains a list of regexes for in-scope URLS
+ Scope goflags.StringSlice
+ // OutOfScope contains a list of regexes for out-scope URLS
+ OutOfScope goflags.StringSlice
// HttpApiEndpoint is the experimental http api endpoint
HttpApiEndpoint string
// ListTemplateProfiles lists all available template profiles