diff --git a/cmd/nuclei/main.go b/cmd/nuclei/main.go
index e619e0822f..05c8a2c5a4 100644
--- a/cmd/nuclei/main.go
+++ b/cmd/nuclei/main.go
@@ -185,6 +185,11 @@ func main() {
go func() {
for range c {
gologger.Info().Msgf("CTRL+C pressed: Exiting\n")
+ if options.DASTServer {
+ nucleiRunner.Close()
+ os.Exit(1)
+ }
+
gologger.Info().Msgf("Attempting graceful shutdown...")
if options.EnableCloudUpload {
gologger.Info().Msgf("Uploading scan results to cloud...")
@@ -358,9 +363,15 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.StringVarP(&options.FuzzingMode, "fuzzing-mode", "fm", "", "overrides fuzzing mode set in template (multiple, single)"),
flagSet.BoolVar(&fuzzFlag, "fuzz", false, "enable loading fuzzing templates (Deprecated: use -dast instead)"),
flagSet.BoolVar(&options.DAST, "dast", false, "enable / run dast (fuzz) nuclei templates"),
+ flagSet.BoolVarP(&options.DASTServer, "dast-server", "dts", false, "enable dast server mode (live fuzzing)"),
+ flagSet.BoolVarP(&options.DASTReport, "dast-report", "dtr", false, "write dast scan report to file"),
+ flagSet.StringVarP(&options.DASTServerToken, "dast-server-token", "dtst", "", "dast server token (optional)"),
+ flagSet.StringVarP(&options.DASTServerAddress, "dast-server-address", "dtsa", "localhost:9055", "dast server address"),
flagSet.BoolVarP(&options.DisplayFuzzPoints, "display-fuzz-points", "dfp", false, "display fuzz points in the output for debugging"),
flagSet.IntVar(&options.FuzzParamFrequency, "fuzz-param-frequency", 10, "frequency of uninteresting parameters for fuzzing before skipping"),
flagSet.StringVarP(&options.FuzzAggressionLevel, "fuzz-aggression", "fa", "low", "fuzzing aggression level controls payload count for fuzz (low, medium, high)"),
+ flagSet.StringSliceVarP(&options.Scope, "fuzz-scope", "cs", nil, "in scope url regex to be followed by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
+ flagSet.StringSliceVarP(&options.OutOfScope, "fuzz-out-scope", "cos", nil, "out of scope url regex to be excluded by fuzzer", goflags.FileCommaSeparatedStringSliceOptions),
)
flagSet.CreateGroup("uncover", "Uncover",
@@ -447,6 +458,7 @@ on extensive configurability, massive extensibility and ease of use.`)
flagSet.BoolVarP(&options.StatsJSON, "stats-json", "sj", false, "display statistics in JSONL(ines) format"),
flagSet.IntVarP(&options.StatsInterval, "stats-interval", "si", 5, "number of seconds to wait between showing a statistics update"),
flagSet.IntVarP(&options.MetricsPort, "metrics-port", "mp", 9092, "port to expose nuclei metrics on"),
+ flagSet.BoolVarP(&options.HTTPStats, "http-stats", "hps", false, "enable http status capturing (experimental)"),
)
flagSet.CreateGroup("cloud", "Cloud",
diff --git a/go.mod b/go.mod
index 1190a1a0c8..cd60350834 100644
--- a/go.mod
+++ b/go.mod
@@ -51,6 +51,7 @@ require (
github.com/DataDog/gostackparse v0.6.0
github.com/Masterminds/semver/v3 v3.2.1
github.com/Mzack9999/gcache v0.0.0-20230410081825-519e28eab057
+ github.com/alitto/pond v1.9.2
github.com/antchfx/xmlquery v1.3.17
github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
github.com/aws/aws-sdk-go-v2 v1.19.0
@@ -75,7 +76,7 @@ require (
github.com/h2non/filetype v1.1.3
github.com/invopop/yaml v0.3.1
github.com/kitabisa/go-ci v1.0.3
- github.com/labstack/echo/v4 v4.10.2
+ github.com/labstack/echo/v4 v4.12.0
github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa
github.com/lib/pq v1.10.9
github.com/mattn/go-sqlite3 v1.14.22
@@ -359,7 +360,7 @@ require (
github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect
github.com/jcmturner/gokrb5/v8 v8.4.4
github.com/kevinburke/ssh_config v1.2.0 // indirect
- github.com/labstack/gommon v0.4.0 // indirect
+ github.com/labstack/gommon v0.4.2 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/nwaples/rardecode v1.1.3 // indirect
github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect
diff --git a/go.sum b/go.sum
index 03adb633d7..bf2bf2d9c7 100644
--- a/go.sum
+++ b/go.sum
@@ -114,6 +114,8 @@ github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAu
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74 h1:Kk6a4nehpJ3UuJRqlA3JxYxBZEqCeOmATOvrbT4p9RA=
github.com/alexbrainman/sspi v0.0.0-20210105120005-909beea2cc74/go.mod h1:cEWa1LVoE5KvSD9ONXsZrj0z6KqySlCCNKHlLzbqAt4=
+github.com/alitto/pond v1.9.2 h1:9Qb75z/scEZVCoSU+osVmQ0I0JOeLfdTDafrbcJ8CLs=
+github.com/alitto/pond v1.9.2/go.mod h1:xQn3P/sHTYcU/1BR3i86IGIrilcrGC2LiS+E2+CJWsI=
github.com/andybalholm/brotli v1.0.1/go.mod h1:loMXtMfwqflxFJPmdbJO0a3KNoPuLBgiu3qAvBg8x/Y=
github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA=
github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA=
@@ -692,10 +694,10 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
-github.com/labstack/echo/v4 v4.10.2 h1:n1jAhnq/elIFTHr1EYpiYtyKgx4RW9ccVgkqByZaN2M=
-github.com/labstack/echo/v4 v4.10.2/go.mod h1:OEyqf2//K1DFdE57vw2DRgWY0M7s65IVQO2FzvI4J5k=
-github.com/labstack/gommon v0.4.0 h1:y7cvthEAEbU0yHOf4axH8ZG2NH8knB9iNSoTO8dyIk8=
-github.com/labstack/gommon v0.4.0/go.mod h1:uW6kP17uPlLJsD3ijUYn3/M5bAxtlZhMI6m3MFxTMTM=
+github.com/labstack/echo/v4 v4.12.0 h1:IKpw49IMryVB2p1a4dzwlhP1O2Tf2E0Ir/450lH+kI0=
+github.com/labstack/echo/v4 v4.12.0/go.mod h1:UP9Cr2DJXbOK3Kr9ONYzNowSh7HP0aG0ShAyycHSJvM=
+github.com/labstack/gommon v0.4.2 h1:F8qTUNXgG1+6WQmqoUWnz8WiEU60mXVVw0P4ht1WRA0=
+github.com/labstack/gommon v0.4.2/go.mod h1:QlUFxVM+SNXhDL/Z7YhocGIBYOiwB0mXm1+1bAPHPyU=
github.com/ledongthuc/pdf v0.0.0-20220302134840-0c2507a12d80/go.mod h1:imJHygn/1yfhB7XSJJKlFZKl/J+dCPAknuiaGOshXAs=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
@@ -723,12 +725,10 @@ github.com/mackerelio/go-osstat v0.2.4/go.mod h1:Zy+qzGdZs3A9cuIqmgbJvwbmLQH9dJv
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
github.com/mailru/easyjson v0.7.7/go.mod h1:xzfreul335JAWq5oZzymOObrkdz5UnU4kGfJJLY9Nlc=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
-github.com/mattn/go-colorable v0.1.11/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
-github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
@@ -1104,7 +1104,6 @@ github.com/urfave/cli v1.20.0/go.mod h1:70zkFmudgCuE/ngEzBv17Jvp/497gISqfk5gWijb
github.com/urfave/cli v1.22.1/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
-github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/valyala/fasttemplate v1.2.2 h1:lxLXG0uE3Qnshl9QyaK6XJxMXlQZELvChBOCmQD0Loo=
github.com/valyala/fasttemplate v1.2.2/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/weppos/publicsuffix-go v0.12.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k=
@@ -1420,10 +1419,7 @@ golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20210603081109-ebe580a85c40/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210616094352-59db8d763f22/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20211103235746-7861aae1554b/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220114195835-da31bd327af9/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220310020820-b874c991c1a5/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -1662,7 +1658,6 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
-gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA=
gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
gotest.tools/v3 v3.3.0 h1:MfDY1b1/0xN1CyMlQDac0ziEy9zJQd9CXBRRDHw2jJo=
diff --git a/internal/runner/lazy.go b/internal/runner/lazy.go
index 5cb91cfd09..30cca8e1d9 100644
--- a/internal/runner/lazy.go
+++ b/internal/runner/lazy.go
@@ -114,8 +114,13 @@ func GetLazyAuthFetchCallback(opts *AuthLazyFetchOptions) authx.LazyFetchSecret
}
// dynamic values
for k, v := range e.OperatorsResult.DynamicValues {
- if len(v) > 0 {
- data[k] = v[0]
+ // Iterate through all the values and choose the
+ // largest value as the extracted value
+ for _, value := range v {
+ oldVal, ok := data[k]
+ if !ok || len(value) > len(oldVal.(string)) {
+ data[k] = value
+ }
}
}
// named extractors
diff --git a/internal/runner/options.go b/internal/runner/options.go
index e36c248a64..65cd4ae790 100644
--- a/internal/runner/options.go
+++ b/internal/runner/options.go
@@ -171,6 +171,11 @@ func ValidateOptions(options *types.Options) error {
if options.Validate {
validateTemplatePaths(config.DefaultConfig.TemplatesDirectory, options.Templates, options.Workflows)
}
+ if options.DAST {
+ if err := validateDASTOptions(options); err != nil {
+ return err
+ }
+ }
// Verify if any of the client certificate options were set since it requires all three to work properly
if options.HasClientCertificates() {
@@ -274,6 +279,14 @@ func validateMissingGitLabOptions(options *types.Options) []string {
return missing
}
+func validateDASTOptions(options *types.Options) error {
+ // Ensure the DAST server token meets minimum length requirement
+ if len(options.DASTServerToken) > 0 && len(options.DASTServerToken) < 16 {
+ return fmt.Errorf("DAST server token must be at least 16 characters long")
+ }
+ return nil
+}
+
func createReportingOptions(options *types.Options) (*reporting.Options, error) {
var reportingOptions = &reporting.Options{}
if options.ReportingConfig != "" {
diff --git a/internal/runner/runner.go b/internal/runner/runner.go
index ad2a8207ab..b9b0643a9b 100644
--- a/internal/runner/runner.go
+++ b/internal/runner/runner.go
@@ -3,8 +3,6 @@ package runner
import (
"context"
"fmt"
- "net/http"
- _ "net/http/pprof"
"os"
"path/filepath"
"reflect"
@@ -13,11 +11,13 @@ import (
"time"
"github.com/projectdiscovery/nuclei/v3/internal/pdcp"
+ "github.com/projectdiscovery/nuclei/v3/internal/server"
"github.com/projectdiscovery/nuclei/v3/pkg/authprovider"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
"github.com/projectdiscovery/nuclei/v3/pkg/input/provider"
"github.com/projectdiscovery/nuclei/v3/pkg/installer"
"github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
+ outputstats "github.com/projectdiscovery/nuclei/v3/pkg/output/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/scan/events"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
uncoverlib "github.com/projectdiscovery/uncover"
@@ -25,6 +25,7 @@ import (
"github.com/projectdiscovery/utils/env"
fileutil "github.com/projectdiscovery/utils/file"
permissionutil "github.com/projectdiscovery/utils/permission"
+ pprofutil "github.com/projectdiscovery/utils/pprof"
updateutils "github.com/projectdiscovery/utils/update"
"github.com/logrusorgru/aurora"
@@ -40,6 +41,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
"github.com/projectdiscovery/nuclei/v3/pkg/core"
"github.com/projectdiscovery/nuclei/v3/pkg/external/customtemplates"
+ fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/input"
parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
"github.com/projectdiscovery/nuclei/v3/pkg/output"
@@ -88,18 +90,20 @@ type Runner struct {
rateLimiter *ratelimit.Limiter
hostErrors hosterrorscache.CacheInterface
resumeCfg *types.ResumeCfg
- pprofServer *http.Server
+ pprofServer *pprofutil.PprofServer
pdcpUploadErrMsg string
inputProvider provider.InputProvider
fuzzFrequencyCache *frequency.Tracker
+ httpStats *outputstats.Tracker
+
//general purpose temporary directory
tmpDir string
parser parser.Parser
httpApiEndpoint *httpapi.Server
+ fuzzStats *fuzzStats.Tracker
+ dastServer *server.DASTServer
}
-const pprofServerAddress = "127.0.0.1:8086"
-
// New creates a new client for running the enumeration process.
func New(options *types.Options) (*Runner, error) {
runner := &Runner{
@@ -216,15 +220,8 @@ func New(options *types.Options) (*Runner, error) {
templates.SeverityColorizer = colorizer.New(runner.colorizer)
if options.EnablePprof {
- server := &http.Server{
- Addr: pprofServerAddress,
- Handler: http.DefaultServeMux,
- }
- gologger.Info().Msgf("Listening pprof debug server on: %s", pprofServerAddress)
- runner.pprofServer = server
- go func() {
- _ = server.ListenAndServe()
- }()
+ runner.pprofServer = pprofutil.NewPprofServer()
+ runner.pprofServer.Start()
}
if options.HttpApiEndpoint != "" {
@@ -256,6 +253,10 @@ func New(options *types.Options) (*Runner, error) {
}
// setup a proxy writer to automatically upload results to PDCP
runner.output = runner.setupPDCPUpload(outputWriter)
+ if options.HTTPStats {
+ runner.httpStats = outputstats.NewTracker()
+ runner.output = output.NewMultiWriter(runner.output, output.NewTrackerWriter(runner.httpStats))
+ }
if options.JSONL && options.EnableProgressBar {
options.StatsJSON = true
@@ -296,6 +297,37 @@ func New(options *types.Options) (*Runner, error) {
}
runner.resumeCfg = resumeCfg
+ if options.DASTReport || options.DASTServer {
+ var err error
+ runner.fuzzStats, err = fuzzStats.NewTracker()
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create fuzz stats db")
+ }
+ if !options.DASTServer {
+ dastServer, err := server.NewStatsServer(runner.fuzzStats)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create dast server")
+ }
+ runner.dastServer = dastServer
+ }
+ }
+
+ if runner.fuzzStats != nil {
+ outputWriter.JSONLogRequestHook = func(request *output.JSONLogRequest) {
+ if request.Error == "none" || request.Error == "" {
+ return
+ }
+ runner.fuzzStats.RecordErrorEvent(fuzzStats.ErrorEvent{
+ TemplateID: request.Template,
+ URL: request.Input,
+ Error: request.Error,
+ })
+ }
+ }
+
+ // setup a proxy writer to automatically upload results to PDCP
+ runner.output = runner.setupPDCPUpload(outputWriter)
+
opts := interactsh.DefaultOptions(runner.output, runner.issuesClient, runner.progress)
opts.Debug = runner.options.Debug
opts.NoColor = runner.options.NoColor
@@ -362,6 +394,12 @@ func (r *Runner) runStandardEnumeration(executerOpts protocols.ExecutorOptions,
// Close releases all the resources and cleans up
func (r *Runner) Close() {
+ if r.dastServer != nil {
+ r.dastServer.Close()
+ }
+ if r.httpStats != nil {
+ r.httpStats.DisplayTopStats(r.options.NoColor)
+ }
// dump hosterrors cache
if r.hostErrors != nil {
r.hostErrors.Close()
@@ -380,7 +418,7 @@ func (r *Runner) Close() {
}
protocolinit.Close()
if r.pprofServer != nil {
- _ = r.pprofServer.Shutdown(context.Background())
+ r.pprofServer.Stop()
}
if r.rateLimiter != nil {
r.rateLimiter.Stop()
@@ -439,6 +477,41 @@ func (r *Runner) setupPDCPUpload(writer output.Writer) output.Writer {
// RunEnumeration sets up the input layer for giving input nuclei.
// binary and runs the actual enumeration
func (r *Runner) RunEnumeration() error {
+ // If the user has asked for DAST server mode, run the live
+ // DAST fuzzing server.
+ if r.options.DASTServer {
+ execurOpts := &server.NucleiExecutorOptions{
+ Options: r.options,
+ Output: r.output,
+ Progress: r.progress,
+ Catalog: r.catalog,
+ IssuesClient: r.issuesClient,
+ RateLimiter: r.rateLimiter,
+ Interactsh: r.interactsh,
+ ProjectFile: r.projectFile,
+ Browser: r.browser,
+ Colorizer: r.colorizer,
+ Parser: r.parser,
+ TemporaryDirectory: r.tmpDir,
+ FuzzStatsDB: r.fuzzStats,
+ }
+ dastServer, err := server.New(&server.Options{
+ Address: r.options.DASTServerAddress,
+ Templates: r.options.Templates,
+ OutputWriter: r.output,
+ Verbose: r.options.Verbose,
+ Token: r.options.DASTServerToken,
+ InScope: r.options.Scope,
+ OutScope: r.options.OutOfScope,
+ NucleiExecutorOptions: execurOpts,
+ })
+ if err != nil {
+ return err
+ }
+ r.dastServer = dastServer
+ return dastServer.Start()
+ }
+
// If user asked for new templates to be executed, collect the list from the templates' directory.
if r.options.NewTemplates {
if arr := config.DefaultConfig.GetNewAdditions(); len(arr) > 0 {
@@ -624,6 +697,14 @@ func (r *Runner) RunEnumeration() error {
Retries: r.options.Retries,
}, "")
+ if r.dastServer != nil {
+ go func() {
+ if err := r.dastServer.Start(); err != nil {
+ gologger.Error().Msgf("could not start dast server: %v", err)
+ }
+ }()
+ }
+
enumeration := false
var results *atomic.Bool
results, err = r.runStandardEnumeration(executorOpts, store, executorEngine)
@@ -633,6 +714,9 @@ func (r *Runner) RunEnumeration() error {
return err
}
+ if executorOpts.FuzzStatsDB != nil {
+ executorOpts.FuzzStatsDB.Close()
+ }
if r.interactsh != nil {
matched := r.interactsh.Close()
if matched {
diff --git a/internal/server/dedupe.go b/internal/server/dedupe.go
new file mode 100644
index 0000000000..f5c5b775bf
--- /dev/null
+++ b/internal/server/dedupe.go
@@ -0,0 +1,122 @@
+package server
+
+import (
+ "crypto/sha256"
+ "encoding/hex"
+ "net/url"
+ "sort"
+ "strings"
+ "sync"
+
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
+ mapsutil "github.com/projectdiscovery/utils/maps"
+)
+
+var dynamicHeaders = map[string]bool{
+ "date": true,
+ "if-modified-since": true,
+ "if-unmodified-since": true,
+ "cache-control": true,
+ "if-none-match": true,
+ "if-match": true,
+ "authorization": true,
+ "cookie": true,
+ "x-csrf-token": true,
+ "content-length": true,
+ "content-md5": true,
+ "host": true,
+ "x-request-id": true,
+ "x-correlation-id": true,
+ "user-agent": true,
+ "referer": true,
+}
+
+type requestDeduplicator struct {
+ hashes map[string]struct{}
+ lock *sync.RWMutex
+}
+
+func newRequestDeduplicator() *requestDeduplicator {
+ return &requestDeduplicator{
+ hashes: make(map[string]struct{}),
+ lock: &sync.RWMutex{},
+ }
+}
+
+func (r *requestDeduplicator) isDuplicate(req *types.RequestResponse) bool {
+ hash, err := hashRequest(req)
+ if err != nil {
+ return false
+ }
+
+ r.lock.RLock()
+ _, ok := r.hashes[hash]
+ r.lock.RUnlock()
+ if ok {
+ return true
+ }
+
+ r.lock.Lock()
+ r.hashes[hash] = struct{}{}
+ r.lock.Unlock()
+ return false
+}
+
+func hashRequest(req *types.RequestResponse) (string, error) {
+ normalizedURL, err := normalizeURL(req.URL.URL)
+ if err != nil {
+ return "", err
+ }
+
+ var hashContent strings.Builder
+ hashContent.WriteString(req.Request.Method)
+ hashContent.WriteString(normalizedURL)
+
+ headers := sortedNonDynamicHeaders(req.Request.Headers)
+ for _, header := range headers {
+ hashContent.WriteString(header.Key)
+ hashContent.WriteString(header.Value)
+ }
+
+ if len(req.Request.Body) > 0 {
+ hashContent.Write([]byte(req.Request.Body))
+ }
+
+ // Calculate the SHA256 hash
+ hash := sha256.Sum256([]byte(hashContent.String()))
+ return hex.EncodeToString(hash[:]), nil
+}
+
+func normalizeURL(u *url.URL) (string, error) {
+ query := u.Query()
+ sortedQuery := make(url.Values)
+ for k, v := range query {
+ sort.Strings(v)
+ sortedQuery[k] = v
+ }
+ u.RawQuery = sortedQuery.Encode()
+
+ if u.Path == "" {
+ u.Path = "/"
+ }
+ return u.String(), nil
+}
+
+type header struct {
+ Key string
+ Value string
+}
+
+func sortedNonDynamicHeaders(headers mapsutil.OrderedMap[string, string]) []header {
+ var result []header
+ headers.Iterate(func(k, v string) bool {
+ if !dynamicHeaders[strings.ToLower(k)] {
+ result = append(result, header{Key: k, Value: v})
+ }
+ return true
+ })
+ sort.Slice(result, func(i, j int) bool {
+ return result[i].Key < result[j].Key
+ })
+ return result
+}
diff --git a/internal/server/nuclei_sdk.go b/internal/server/nuclei_sdk.go
new file mode 100644
index 0000000000..aad3377437
--- /dev/null
+++ b/internal/server/nuclei_sdk.go
@@ -0,0 +1,199 @@
+package server
+
+import (
+ "context"
+ "fmt"
+ _ "net/http/pprof"
+ "strings"
+
+ "github.com/logrusorgru/aurora"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/frequency"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/http"
+ "github.com/projectdiscovery/nuclei/v3/pkg/projectfile"
+ "gopkg.in/yaml.v3"
+
+ "github.com/pkg/errors"
+ "github.com/projectdiscovery/ratelimit"
+
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog"
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader"
+ "github.com/projectdiscovery/nuclei/v3/pkg/core"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input"
+ "github.com/projectdiscovery/nuclei/v3/pkg/loader/parser"
+ parsers "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output"
+ "github.com/projectdiscovery/nuclei/v3/pkg/progress"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/globalmatchers"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/hosterrorscache"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/utils/excludematchers"
+ browserEngine "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless/engine"
+ "github.com/projectdiscovery/nuclei/v3/pkg/reporting"
+ "github.com/projectdiscovery/nuclei/v3/pkg/templates"
+ "github.com/projectdiscovery/nuclei/v3/pkg/types"
+)
+
+type nucleiExecutor struct {
+ engine *core.Engine
+ store *loader.Store
+ options *NucleiExecutorOptions
+ executorOpts protocols.ExecutorOptions
+}
+
+type NucleiExecutorOptions struct {
+ Options *types.Options
+ Output output.Writer
+ Progress progress.Progress
+ Catalog catalog.Catalog
+ IssuesClient reporting.Client
+ RateLimiter *ratelimit.Limiter
+ Interactsh *interactsh.Client
+ ProjectFile *projectfile.ProjectFile
+ Browser *browserEngine.Browser
+ FuzzStatsDB *stats.Tracker
+ Colorizer aurora.Aurora
+ Parser parser.Parser
+ TemporaryDirectory string
+}
+
+func newNucleiExecutor(opts *NucleiExecutorOptions) (*nucleiExecutor, error) {
+ fuzzFreqCache := frequency.New(frequency.DefaultMaxTrackCount, opts.Options.FuzzParamFrequency)
+ resumeCfg := types.NewResumeCfg()
+
+ // Create the executor options which will be used throughout the execution
+ // stage by the nuclei engine modules.
+ executorOpts := protocols.ExecutorOptions{
+ Output: opts.Output,
+ Options: opts.Options,
+ Progress: opts.Progress,
+ Catalog: opts.Catalog,
+ IssuesClient: opts.IssuesClient,
+ RateLimiter: opts.RateLimiter,
+ Interactsh: opts.Interactsh,
+ ProjectFile: opts.ProjectFile,
+ Browser: opts.Browser,
+ Colorizer: opts.Colorizer,
+ ResumeCfg: resumeCfg,
+ ExcludeMatchers: excludematchers.New(opts.Options.ExcludeMatchers),
+ InputHelper: input.NewHelper(),
+ TemporaryDirectory: opts.TemporaryDirectory,
+ Parser: opts.Parser,
+ FuzzParamsFrequency: fuzzFreqCache,
+ GlobalMatchers: globalmatchers.New(),
+ FuzzStatsDB: opts.FuzzStatsDB,
+ }
+
+ if opts.Options.ShouldUseHostError() {
+ maxHostError := opts.Options.MaxHostError
+ if maxHostError == 30 {
+ maxHostError = 100 // auto adjust for fuzzings
+ }
+ if opts.Options.TemplateThreads > maxHostError {
+ gologger.Info().Msgf("Adjusting max-host-error to the concurrency value: %d", opts.Options.TemplateThreads)
+
+ maxHostError = opts.Options.TemplateThreads
+ }
+
+ cache := hosterrorscache.New(maxHostError, hosterrorscache.DefaultMaxHostsCount, opts.Options.TrackError)
+ cache.SetVerbose(opts.Options.Verbose)
+
+ executorOpts.HostErrorsCache = cache
+ }
+
+ executorEngine := core.New(opts.Options)
+ executorEngine.SetExecuterOptions(executorOpts)
+
+ workflowLoader, err := parsers.NewLoader(&executorOpts)
+ if err != nil {
+ return nil, errors.Wrap(err, "Could not create loader options.")
+ }
+ executorOpts.WorkflowLoader = workflowLoader
+
+ // If using input-file flags, only load http fuzzing based templates.
+ loaderConfig := loader.NewConfig(opts.Options, opts.Catalog, executorOpts)
+ if !strings.EqualFold(opts.Options.InputFileMode, "list") || opts.Options.DAST || opts.Options.DASTServer {
+ // if input type is not list (implicitly enable fuzzing)
+ opts.Options.DAST = true
+ }
+ store, err := loader.New(loaderConfig)
+ if err != nil {
+ return nil, errors.Wrap(err, "Could not create loader options.")
+ }
+ store.Load()
+
+ return &nucleiExecutor{
+ engine: executorEngine,
+ store: store,
+ options: opts,
+ executorOpts: executorOpts,
+ }, nil
+}
+
+// proxifyRequest is a request for proxify
+type proxifyRequest struct {
+ URL string `json:"url"`
+ Request struct {
+ Header map[string]string `json:"header"`
+ Body string `json:"body"`
+ Raw string `json:"raw"`
+ } `json:"request"`
+}
+
+func (n *nucleiExecutor) ExecuteScan(target PostRequestsHandlerRequest) error {
+ finalTemplates := []*templates.Template{}
+ finalTemplates = append(finalTemplates, n.store.Templates()...)
+ finalTemplates = append(finalTemplates, n.store.Workflows()...)
+
+ if len(finalTemplates) == 0 {
+ return errors.New("no templates provided for scan")
+ }
+
+ payload := proxifyRequest{
+ URL: target.URL,
+ Request: struct {
+ Header map[string]string `json:"header"`
+ Body string `json:"body"`
+ Raw string `json:"raw"`
+ }{
+ Raw: target.RawHTTP,
+ },
+ }
+
+ marshalledYaml, err := yaml.Marshal(payload)
+ if err != nil {
+ return fmt.Errorf("error marshalling yaml: %s", err)
+ }
+
+ inputProvider, err := http.NewHttpInputProvider(&http.HttpMultiFormatOptions{
+ InputContents: string(marshalledYaml),
+ InputMode: "yaml",
+ Options: formats.InputFormatOptions{
+ Variables: make(map[string]interface{}),
+ },
+ })
+ if err != nil {
+ return errors.Wrap(err, "could not create input provider")
+ }
+
+ // We don't care about the result as its a boolean
+ // stating whether we got matches or not
+ _ = n.engine.ExecuteScanWithOpts(context.Background(), finalTemplates, inputProvider, true)
+ return nil
+}
+
+func (n *nucleiExecutor) Close() {
+ if n.executorOpts.FuzzStatsDB != nil {
+ n.executorOpts.FuzzStatsDB.Close()
+ }
+ if n.options.Interactsh != nil {
+ _ = n.options.Interactsh.Close()
+ }
+ if n.executorOpts.InputHelper != nil {
+ _ = n.executorOpts.InputHelper.Close()
+ }
+
+}
diff --git a/internal/server/requests_worker.go b/internal/server/requests_worker.go
new file mode 100644
index 0000000000..e811a005ac
--- /dev/null
+++ b/internal/server/requests_worker.go
@@ -0,0 +1,58 @@
+package server
+
+import (
+ "path"
+
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/internal/server/scope"
+ "github.com/projectdiscovery/nuclei/v3/pkg/input/types"
+)
+
+func (s *DASTServer) consumeTaskRequest(req PostRequestsHandlerRequest) {
+ defer s.endpointsInQueue.Add(-1)
+
+ parsedReq, err := types.ParseRawRequestWithURL(req.RawHTTP, req.URL)
+ if err != nil {
+ gologger.Warning().Msgf("Could not parse raw request: %s\n", err)
+ return
+ }
+
+ if parsedReq.URL.Scheme != "http" && parsedReq.URL.Scheme != "https" {
+ gologger.Warning().Msgf("Invalid scheme: %s\n", parsedReq.URL.Scheme)
+ return
+ }
+
+ // Check filenames and don't allow non-interesting files
+ extension := path.Base(parsedReq.URL.Path)
+ if extension != "/" && extension != "" && scope.IsUninterestingPath(extension) {
+ gologger.Warning().Msgf("Uninteresting path: %s\n", parsedReq.URL.Path)
+ return
+ }
+
+ inScope, err := s.scopeManager.Validate(parsedReq.URL.URL)
+ if err != nil {
+ gologger.Warning().Msgf("Could not validate scope: %s\n", err)
+ return
+ }
+ if !inScope {
+ gologger.Warning().Msgf("Request is out of scope: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+ return
+ }
+
+ if s.deduplicator.isDuplicate(parsedReq) {
+ gologger.Warning().Msgf("Duplicate request detected: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+ return
+ }
+
+ gologger.Verbose().Msgf("Fuzzing request: %s %s\n", parsedReq.Request.Method, parsedReq.URL.String())
+
+ s.endpointsBeingTested.Add(1)
+ defer s.endpointsBeingTested.Add(-1)
+
+ // Fuzz the request finally
+ err = s.nucleiExecutor.ExecuteScan(req)
+ if err != nil {
+ gologger.Warning().Msgf("Could not run nuclei: %s\n", err)
+ return
+ }
+}
diff --git a/internal/server/scope/extensions.go b/internal/server/scope/extensions.go
new file mode 100644
index 0000000000..f7e5929189
--- /dev/null
+++ b/internal/server/scope/extensions.go
@@ -0,0 +1,33 @@
+package scope
+
+import "path"
+
+func IsUninterestingPath(uriPath string) bool {
+ extension := path.Ext(uriPath)
+ if _, ok := excludedExtensions[extension]; ok {
+ return true
+ }
+ return false
+}
+
+var excludedExtensions = map[string]struct{}{
+ ".jpg": {}, ".jpeg": {}, ".png": {}, ".gif": {}, ".bmp": {}, ".tiff": {}, ".ico": {},
+ ".mp4": {}, ".avi": {}, ".mov": {}, ".wmv": {}, ".flv": {}, ".mkv": {}, ".webm": {},
+ ".mp3": {}, ".wav": {}, ".aac": {}, ".flac": {}, ".ogg": {}, ".wma": {},
+ ".zip": {}, ".rar": {}, ".7z": {}, ".tar": {}, ".gz": {}, ".bz2": {},
+ ".exe": {}, ".bin": {}, ".iso": {}, ".img": {},
+ ".doc": {}, ".docx": {}, ".xls": {}, ".xlsx": {}, ".ppt": {}, ".pptx": {},
+ ".pdf": {}, ".psd": {}, ".ai": {}, ".eps": {}, ".indd": {},
+ ".swf": {}, ".fla": {}, ".css": {}, ".scss": {}, ".less": {},
+ ".js": {}, ".ts": {}, ".jsx": {}, ".tsx": {},
+ ".xml": {}, ".json": {}, ".yaml": {}, ".yml": {},
+ ".csv": {}, ".txt": {}, ".log": {}, ".md": {},
+ ".ttf": {}, ".otf": {}, ".woff": {}, ".woff2": {}, ".eot": {},
+ ".svg": {}, ".svgz": {}, ".webp": {}, ".tif": {},
+ ".mpg": {}, ".mpeg": {}, ".weba": {},
+ ".m4a": {}, ".m4v": {}, ".3gp": {}, ".3g2": {},
+ ".ogv": {}, ".ogm": {}, ".oga": {}, ".ogx": {},
+ ".srt": {}, ".min.js": {}, ".min.css": {}, ".js.map": {},
+ ".min.js.map": {}, ".chunk.css.map": {}, ".hub.js.map": {},
+ ".hub.css.map": {}, ".map": {},
+}
diff --git a/internal/server/scope/scope.go b/internal/server/scope/scope.go
new file mode 100644
index 0000000000..31c74a76de
--- /dev/null
+++ b/internal/server/scope/scope.go
@@ -0,0 +1,77 @@
+// From Katana
+package scope
+
+import (
+ "fmt"
+ "net/url"
+ "regexp"
+)
+
+// Manager manages scope for crawling process
+type Manager struct {
+ inScope []*regexp.Regexp
+ outOfScope []*regexp.Regexp
+ noScope bool
+}
+
+// NewManager returns a new scope manager for crawling
+func NewManager(inScope, outOfScope []string) (*Manager, error) {
+ manager := &Manager{}
+
+ for _, regex := range inScope {
+ if compiled, err := regexp.Compile(regex); err != nil {
+ return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
+ } else {
+ manager.inScope = append(manager.inScope, compiled)
+ }
+ }
+ for _, regex := range outOfScope {
+ if compiled, err := regexp.Compile(regex); err != nil {
+ return nil, fmt.Errorf("could not compile regex %s: %s", regex, err)
+ } else {
+ manager.outOfScope = append(manager.outOfScope, compiled)
+ }
+ }
+ if len(manager.inScope) == 0 && len(manager.outOfScope) == 0 {
+ manager.noScope = true
+ }
+ return manager, nil
+}
+
+// Validate returns true if the URL matches scope rules
+func (m *Manager) Validate(URL *url.URL) (bool, error) {
+ if m.noScope {
+ return true, nil
+ }
+
+ urlStr := URL.String()
+
+ urlValidated, err := m.validateURL(urlStr)
+ if err != nil {
+ return false, err
+ }
+ if urlValidated {
+ return true, nil
+ }
+ return false, nil
+}
+
+func (m *Manager) validateURL(URL string) (bool, error) {
+ for _, item := range m.outOfScope {
+ if item.MatchString(URL) {
+ return false, nil
+ }
+ }
+ if len(m.inScope) == 0 {
+ return true, nil
+ }
+
+ var inScopeMatched bool
+ for _, item := range m.inScope {
+ if item.MatchString(URL) {
+ inScopeMatched = true
+ break
+ }
+ }
+ return inScopeMatched, nil
+}
diff --git a/internal/server/scope/scope_test.go b/internal/server/scope/scope_test.go
new file mode 100644
index 0000000000..d2256363db
--- /dev/null
+++ b/internal/server/scope/scope_test.go
@@ -0,0 +1,26 @@
+package scope
+
+import (
+ "testing"
+
+ urlutil "github.com/projectdiscovery/utils/url"
+ "github.com/stretchr/testify/require"
+)
+
+func TestManagerValidate(t *testing.T) {
+ t.Run("url", func(t *testing.T) {
+ manager, err := NewManager([]string{`example`}, []string{`logout\.php`})
+ require.NoError(t, err, "could not create scope manager")
+
+ parsed, _ := urlutil.Parse("https://test.com/index.php/example")
+ validated, err := manager.Validate(parsed.URL)
+ require.NoError(t, err, "could not validate url")
+ require.True(t, validated, "could not get correct in-scope validation")
+
+ parsed, _ = urlutil.Parse("https://test.com/logout.php")
+ validated, err = manager.Validate(parsed.URL)
+ require.NoError(t, err, "could not validate url")
+ require.False(t, validated, "could not get correct out-scope validation")
+ })
+
+}
diff --git a/internal/server/server.go b/internal/server/server.go
new file mode 100644
index 0000000000..259923272f
--- /dev/null
+++ b/internal/server/server.go
@@ -0,0 +1,296 @@
+package server
+
+import (
+ _ "embed"
+ "fmt"
+ "html/template"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync/atomic"
+ "time"
+
+ "github.com/alitto/pond"
+ "github.com/labstack/echo/v4"
+ "github.com/labstack/echo/v4/middleware"
+ "github.com/projectdiscovery/gologger"
+ "github.com/projectdiscovery/nuclei/v3/internal/server/scope"
+ "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config"
+ "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output"
+ "github.com/projectdiscovery/nuclei/v3/pkg/protocols"
+ "github.com/projectdiscovery/utils/env"
+)
+
+// DASTServer is a server that performs execution of fuzzing templates
+// on user input passed to the API.
+type DASTServer struct {
+ echo *echo.Echo
+ options *Options
+ tasksPool *pond.WorkerPool
+ deduplicator *requestDeduplicator
+ scopeManager *scope.Manager
+ startTime time.Time
+
+ // metrics
+ endpointsInQueue atomic.Int64
+ endpointsBeingTested atomic.Int64
+
+ nucleiExecutor *nucleiExecutor
+}
+
+// Options contains the configuration options for the server.
+type Options struct {
+ // Address is the address to bind the server to
+ Address string
+ // Token is the token to use for authentication (optional)
+ Token string
+ // Templates is the list of templates to use for fuzzing
+ Templates []string
+ // Verbose is a flag that controls verbose output
+ Verbose bool
+
+ // Scope fields for fuzzer
+ InScope []string
+ OutScope []string
+
+ OutputWriter output.Writer
+
+ NucleiExecutorOptions *NucleiExecutorOptions
+}
+
+// New creates a new instance of the DAST server.
+func New(options *Options) (*DASTServer, error) {
+ // If the user has specified no templates, use the default ones
+ // for DAST only.
+ if len(options.Templates) == 0 {
+ options.Templates = []string{"dast/"}
+ }
+ // Disable bulk mode and single threaded execution
+ // by auto adjusting in case of default values
+ if options.NucleiExecutorOptions.Options.BulkSize == 25 && options.NucleiExecutorOptions.Options.TemplateThreads == 25 {
+ options.NucleiExecutorOptions.Options.BulkSize = 1
+ options.NucleiExecutorOptions.Options.TemplateThreads = 1
+ }
+ maxWorkers := env.GetEnvOrDefault[int]("FUZZ_MAX_WORKERS", 1)
+ bufferSize := env.GetEnvOrDefault[int]("FUZZ_BUFFER_SIZE", 10000)
+
+ server := &DASTServer{
+ options: options,
+ tasksPool: pond.New(maxWorkers, bufferSize),
+ deduplicator: newRequestDeduplicator(),
+ startTime: time.Now(),
+ }
+ server.setupHandlers(false)
+
+ executor, err := newNucleiExecutor(options.NucleiExecutorOptions)
+ if err != nil {
+ return nil, err
+ }
+ server.nucleiExecutor = executor
+
+ scopeManager, err := scope.NewManager(
+ options.InScope,
+ options.OutScope,
+ )
+ if err != nil {
+ return nil, err
+ }
+ server.scopeManager = scopeManager
+
+ var builder strings.Builder
+ gologger.Debug().Msgf("Using %d parallel tasks with %d buffer", maxWorkers, bufferSize)
+ if options.Token != "" {
+ builder.WriteString(" (with token)")
+ }
+ gologger.Info().Msgf("DAST Server API: %s", server.buildURL("/fuzz"))
+ gologger.Info().Msgf("DAST Server Stats URL: %s", server.buildURL("/stats"))
+
+ return server, nil
+}
+
+func NewStatsServer(fuzzStatsDB *stats.Tracker) (*DASTServer, error) {
+ server := &DASTServer{
+ nucleiExecutor: &nucleiExecutor{
+ executorOpts: protocols.ExecutorOptions{
+ FuzzStatsDB: fuzzStatsDB,
+ },
+ },
+ }
+ server.setupHandlers(true)
+ gologger.Info().Msgf("Stats UI URL: %s", server.buildURL("/stats"))
+
+ return server, nil
+}
+
+func (s *DASTServer) Close() {
+ s.nucleiExecutor.Close()
+ s.echo.Close()
+ s.tasksPool.StopAndWaitFor(1 * time.Minute)
+}
+
+func (s *DASTServer) buildURL(endpoint string) string {
+ values := make(url.Values)
+ if s.options.Token != "" {
+ values.Set("token", s.options.Token)
+ }
+
+ // Use url.URL struct to safely construct the URL
+ u := &url.URL{
+ Scheme: "http",
+ Host: s.options.Address,
+ Path: endpoint,
+ RawQuery: values.Encode(),
+ }
+ return u.String()
+}
+
+func (s *DASTServer) setupHandlers(onlyStats bool) {
+ e := echo.New()
+ e.Use(middleware.Recover())
+ if s.options.Verbose {
+ cfg := middleware.DefaultLoggerConfig
+ cfg.Skipper = func(c echo.Context) bool {
+ // Skip /stats and /stats.json
+ return c.Request().URL.Path == "/stats" || c.Request().URL.Path == "/stats.json"
+ }
+ e.Use(middleware.LoggerWithConfig(cfg))
+ }
+ e.Use(middleware.CORS())
+
+ if s.options.Token != "" {
+ e.Use(middleware.KeyAuthWithConfig(middleware.KeyAuthConfig{
+ KeyLookup: "query:token",
+ Validator: func(key string, c echo.Context) (bool, error) {
+ return key == s.options.Token, nil
+ },
+ }))
+ }
+
+ e.HideBanner = true
+ // POST /fuzz - Queue a request for fuzzing
+ if !onlyStats {
+ e.POST("/fuzz", s.handleRequest)
+ }
+ e.GET("/stats", s.handleStats)
+ e.GET("/stats.json", s.handleStatsJSON)
+
+ s.echo = e
+}
+
+func (s *DASTServer) Start() error {
+ if err := s.echo.Start(s.options.Address); err != nil && err != http.ErrServerClosed {
+ return err
+ }
+ return nil
+}
+
+// PostReuestsHandlerRequest is the request body for the /fuzz POST handler.
+type PostRequestsHandlerRequest struct {
+ RawHTTP string `json:"raw_http"`
+ URL string `json:"url"`
+}
+
+func (s *DASTServer) handleRequest(c echo.Context) error {
+ var req PostRequestsHandlerRequest
+ if err := c.Bind(&req); err != nil {
+ fmt.Printf("Error binding request: %s\n", err)
+ return err
+ }
+
+ // Validate the request
+ if req.RawHTTP == "" || req.URL == "" {
+ fmt.Printf("Missing required fields\n")
+ return c.JSON(400, map[string]string{"error": "missing required fields"})
+ }
+
+ s.endpointsInQueue.Add(1)
+ s.tasksPool.Submit(func() {
+ s.consumeTaskRequest(req)
+ })
+ return c.NoContent(200)
+}
+
+type StatsResponse struct {
+ DASTServerInfo DASTServerInfo `json:"dast_server_info"`
+ DASTScanStatistics DASTScanStatistics `json:"dast_scan_statistics"`
+ DASTScanStatusStatistics map[string]int64 `json:"dast_scan_status_statistics"`
+ DASTScanSeverityBreakdown map[string]int64 `json:"dast_scan_severity_breakdown"`
+ DASTScanErrorStatistics map[string]int64 `json:"dast_scan_error_statistics"`
+ DASTScanStartTime time.Time `json:"dast_scan_start_time"`
+}
+
+type DASTServerInfo struct {
+ NucleiVersion string `json:"nuclei_version"`
+ NucleiTemplateVersion string `json:"nuclei_template_version"`
+ NucleiDastServerAPI string `json:"nuclei_dast_server_api"`
+ ServerAuthEnabled bool `json:"sever_auth_enabled"`
+}
+
+type DASTScanStatistics struct {
+ EndpointsInQueue int64 `json:"endpoints_in_queue"`
+ EndpointsBeingTested int64 `json:"endpoints_being_tested"`
+ TotalTemplatesLoaded int64 `json:"total_dast_templates_loaded"`
+ TotalTemplatesTested int64 `json:"total_dast_templates_tested"`
+ TotalMatchedResults int64 `json:"total_matched_results"`
+ TotalComponentsTested int64 `json:"total_components_tested"`
+ TotalEndpointsTested int64 `json:"total_endpoints_tested"`
+ TotalFuzzedRequests int64 `json:"total_fuzzed_requests"`
+ TotalErroredRequests int64 `json:"total_errored_requests"`
+}
+
+func (s *DASTServer) getStats() (StatsResponse, error) {
+ cfg := config.DefaultConfig
+
+ resp := StatsResponse{
+ DASTServerInfo: DASTServerInfo{
+ NucleiVersion: config.Version,
+ NucleiTemplateVersion: cfg.TemplateVersion,
+ NucleiDastServerAPI: s.buildURL("/fuzz"),
+ ServerAuthEnabled: s.options.Token != "",
+ },
+ DASTScanStartTime: s.startTime,
+ DASTScanStatistics: DASTScanStatistics{
+ EndpointsInQueue: s.endpointsInQueue.Load(),
+ EndpointsBeingTested: s.endpointsBeingTested.Load(),
+ TotalTemplatesLoaded: int64(len(s.nucleiExecutor.store.Templates())),
+ },
+ }
+ if s.nucleiExecutor.executorOpts.FuzzStatsDB != nil {
+ fuzzStats := s.nucleiExecutor.executorOpts.FuzzStatsDB.GetStats()
+ resp.DASTScanSeverityBreakdown = fuzzStats.SeverityCounts
+ resp.DASTScanStatusStatistics = fuzzStats.StatusCodes
+ resp.DASTScanStatistics.TotalMatchedResults = fuzzStats.TotalMatchedResults
+ resp.DASTScanStatistics.TotalComponentsTested = fuzzStats.TotalComponentsTested
+ resp.DASTScanStatistics.TotalEndpointsTested = fuzzStats.TotalEndpointsTested
+ resp.DASTScanStatistics.TotalFuzzedRequests = fuzzStats.TotalFuzzedRequests
+ resp.DASTScanStatistics.TotalTemplatesTested = fuzzStats.TotalTemplatesTested
+ resp.DASTScanStatistics.TotalErroredRequests = fuzzStats.TotalErroredRequests
+ resp.DASTScanErrorStatistics = fuzzStats.ErrorGroupedStats
+ }
+ return resp, nil
+}
+
+//go:embed templates/index.html
+var indexTemplate string
+
+func (s *DASTServer) handleStats(c echo.Context) error {
+ stats, err := s.getStats()
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+
+ tmpl, err := template.New("index").Parse(indexTemplate)
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+ return tmpl.Execute(c.Response().Writer, stats)
+}
+
+func (s *DASTServer) handleStatsJSON(c echo.Context) error {
+ resp, err := s.getStats()
+ if err != nil {
+ return c.JSON(500, map[string]string{"error": err.Error()})
+ }
+ return c.JSONPretty(200, resp, " ")
+}
diff --git a/internal/server/templates/index.html b/internal/server/templates/index.html
new file mode 100644
index 0000000000..4686dabf96
--- /dev/null
+++ b/internal/server/templates/index.html
@@ -0,0 +1,342 @@
+
+
+
+
+ DAST Scan Report
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Nuclei Version{{.DASTServerInfo.NucleiVersion}}
+
Template Version{{.DASTServerInfo.NucleiTemplateVersion}}
+
DAST Server API{{.DASTServerInfo.NucleiDastServerAPI}}
+
Auth Status{{if .DASTServerInfo.ServerAuthEnabled}}ENABLED{{else}}DISABLED{{end}}
+
+
+
+
+
Total Results{{.DASTScanStatistics.TotalMatchedResults}} findings
+
Endpoints In Queue{{.DASTScanStatistics.EndpointsInQueue}}
+
Currently Testing{{.DASTScanStatistics.EndpointsBeingTested}}
+
Components Tested{{.DASTScanStatistics.TotalComponentsTested}}
+
Endpoints Tested{{.DASTScanStatistics.TotalEndpointsTested}}
+
Templates Loaded{{.DASTScanStatistics.TotalTemplatesLoaded}}
+
Templates Tested{{.DASTScanStatistics.TotalTemplatesTested}}
+
Total Requests{{.DASTScanStatistics.TotalFuzzedRequests}}
+
Total Errors{{.DASTScanStatistics.TotalErroredRequests}}
+
+
+
+
+
+
+
Critical
+
{{index .DASTScanSeverityBreakdown "critical"}} findings
+
+
+
High
+
{{index .DASTScanSeverityBreakdown "high"}} findings
+
+
+
Medium
+
{{index .DASTScanSeverityBreakdown "medium"}} findings
+
+
+
Low
+
{{index .DASTScanSeverityBreakdown "low"}} findings
+
+
+
Info
+
{{index .DASTScanSeverityBreakdown "info"}} findings
+
+
+
+
+
+
+
+
Response Codes
+ {{range $status, $count := .DASTScanStatusStatistics}}
+
{{$status}}{{$count}} times
+ {{end}}
+
+
+
+
+
+ {{range $error, $count := .DASTScanErrorStatistics}}
+
+
{{$error}}
+
{{$count}} times
+
+ {{end}}
+
+
+
+
+
+
\ No newline at end of file
diff --git a/pkg/authprovider/authx/cookies_auth.go b/pkg/authprovider/authx/cookies_auth.go
index 7f3e756a71..0b94e85481 100644
--- a/pkg/authprovider/authx/cookies_auth.go
+++ b/pkg/authprovider/authx/cookies_auth.go
@@ -2,6 +2,7 @@ package authx
import (
"net/http"
+ "slices"
"github.com/projectdiscovery/retryablehttp-go"
)
@@ -33,11 +34,27 @@ func (s *CookiesAuthStrategy) Apply(req *http.Request) {
// ApplyOnRR applies the cookies auth strategy to the retryable request
func (s *CookiesAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
+ existingCookies := req.Cookies()
+
+ for _, newCookie := range s.Data.Cookies {
+ for i, existing := range existingCookies {
+ if existing.Name == newCookie.Key {
+ existingCookies = slices.Delete(existingCookies, i, i+1)
+ break
+ }
+ }
+ }
+
+ // Clear and reset remaining cookies
+ req.Header.Del("Cookie")
+ for _, cookie := range existingCookies {
+ req.AddCookie(cookie)
+ }
+ // Add new cookies
for _, cookie := range s.Data.Cookies {
- c := &http.Cookie{
+ req.AddCookie(&http.Cookie{
Name: cookie.Key,
Value: cookie.Value,
- }
- req.AddCookie(c)
+ })
}
}
diff --git a/pkg/authprovider/authx/dynamic.go b/pkg/authprovider/authx/dynamic.go
index e70f86e1fb..0efee1ea68 100644
--- a/pkg/authprovider/authx/dynamic.go
+++ b/pkg/authprovider/authx/dynamic.go
@@ -9,6 +9,7 @@ import (
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/replacer"
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
errorutil "github.com/projectdiscovery/utils/errors"
+ sliceutil "github.com/projectdiscovery/utils/slice"
)
type LazyFetchSecret func(d *Dynamic) error
@@ -22,7 +23,8 @@ var (
// ex: username and password are dynamic secrets, the actual secret is the token obtained
// after authenticating with the username and password
type Dynamic struct {
- Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
+ *Secret `yaml:",inline"` // this is a static secret that will be generated after the dynamic secret is resolved
+ Secrets []*Secret `yaml:"secrets"`
TemplatePath string `json:"template" yaml:"template"`
Variables []KV `json:"variables" yaml:"variables"`
Input string `json:"input" yaml:"input"` // (optional) target for the dynamic secret
@@ -33,6 +35,22 @@ type Dynamic struct {
error error `json:"-" yaml:"-"` // error if any
}
+func (d *Dynamic) GetDomainAndDomainRegex() ([]string, []string) {
+ var domains []string
+ var domainRegex []string
+ for _, secret := range d.Secrets {
+ domains = append(domains, secret.Domains...)
+ domainRegex = append(domainRegex, secret.DomainsRegex...)
+ }
+ if d.Secret != nil {
+ domains = append(domains, d.Secret.Domains...)
+ domainRegex = append(domainRegex, d.Secret.DomainsRegex...)
+ }
+ uniqueDomains := sliceutil.Dedupe(domains)
+ uniqueDomainRegex := sliceutil.Dedupe(domainRegex)
+ return uniqueDomains, uniqueDomainRegex
+}
+
func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &d); err != nil {
return err
@@ -41,7 +59,7 @@ func (d *Dynamic) UnmarshalJSON(data []byte) error {
if err := json.Unmarshal(data, &s); err != nil {
return err
}
- d.Secret = s
+ d.Secret = &s
return nil
}
@@ -54,9 +72,18 @@ func (d *Dynamic) Validate() error {
if len(d.Variables) == 0 {
return errorutil.New("variables are required for dynamic secret")
}
- d.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
- if err := d.Secret.Validate(); err != nil {
- return err
+
+ if d.Secret != nil {
+ d.Secret.skipCookieParse = true // skip cookie parsing in dynamic secrets during validation
+ if err := d.Secret.Validate(); err != nil {
+ return err
+ }
+ }
+ for _, secret := range d.Secrets {
+ secret.skipCookieParse = true
+ if err := secret.Validate(); err != nil {
+ return err
+ }
}
return nil
}
@@ -74,76 +101,98 @@ func (d *Dynamic) SetLazyFetchCallback(callback LazyFetchSecret) {
return fmt.Errorf("no extracted values found for dynamic secret")
}
- // evaluate headers
- for i, header := range d.Headers {
- if strings.Contains(header.Value, "{{") {
- header.Value = replacer.Replace(header.Value, d.Extracted)
+ if d.Secret != nil {
+ if err := d.applyValuesToSecret(d.Secret); err != nil {
+ return err
}
- if strings.Contains(header.Key, "{{") {
- header.Key = replacer.Replace(header.Key, d.Extracted)
- }
- d.Headers[i] = header
}
- // evaluate cookies
- for i, cookie := range d.Cookies {
- if strings.Contains(cookie.Value, "{{") {
- cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
- }
- if strings.Contains(cookie.Key, "{{") {
- cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
+ for _, secret := range d.Secrets {
+ if err := d.applyValuesToSecret(secret); err != nil {
+ return err
}
- if strings.Contains(cookie.Raw, "{{") {
- cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
- }
- d.Cookies[i] = cookie
}
+ return nil
+ }
+}
- // evaluate query params
- for i, query := range d.Params {
- if strings.Contains(query.Value, "{{") {
- query.Value = replacer.Replace(query.Value, d.Extracted)
- }
- if strings.Contains(query.Key, "{{") {
- query.Key = replacer.Replace(query.Key, d.Extracted)
- }
- d.Params[i] = query
+func (d *Dynamic) applyValuesToSecret(secret *Secret) error {
+ // evaluate headers
+ for i, header := range secret.Headers {
+ if strings.Contains(header.Value, "{{") {
+ header.Value = replacer.Replace(header.Value, d.Extracted)
+ }
+ if strings.Contains(header.Key, "{{") {
+ header.Key = replacer.Replace(header.Key, d.Extracted)
}
+ secret.Headers[i] = header
+ }
- // check username, password and token
- if strings.Contains(d.Username, "{{") {
- d.Username = replacer.Replace(d.Username, d.Extracted)
+ // evaluate cookies
+ for i, cookie := range secret.Cookies {
+ if strings.Contains(cookie.Value, "{{") {
+ cookie.Value = replacer.Replace(cookie.Value, d.Extracted)
}
- if strings.Contains(d.Password, "{{") {
- d.Password = replacer.Replace(d.Password, d.Extracted)
+ if strings.Contains(cookie.Key, "{{") {
+ cookie.Key = replacer.Replace(cookie.Key, d.Extracted)
}
- if strings.Contains(d.Token, "{{") {
- d.Token = replacer.Replace(d.Token, d.Extracted)
+ if strings.Contains(cookie.Raw, "{{") {
+ cookie.Raw = replacer.Replace(cookie.Raw, d.Extracted)
}
+ secret.Cookies[i] = cookie
+ }
+
+ // evaluate query params
+ for i, query := range secret.Params {
+ if strings.Contains(query.Value, "{{") {
+ query.Value = replacer.Replace(query.Value, d.Extracted)
+ }
+ if strings.Contains(query.Key, "{{") {
+ query.Key = replacer.Replace(query.Key, d.Extracted)
+ }
+ secret.Params[i] = query
+ }
- // now attempt to parse the cookies
- d.skipCookieParse = false
- for i, cookie := range d.Cookies {
- if cookie.Raw != "" {
- if err := cookie.Parse(); err != nil {
- return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
- }
- d.Cookies[i] = cookie
+ // check username, password and token
+ if strings.Contains(secret.Username, "{{") {
+ secret.Username = replacer.Replace(secret.Username, d.Extracted)
+ }
+ if strings.Contains(secret.Password, "{{") {
+ secret.Password = replacer.Replace(secret.Password, d.Extracted)
+ }
+ if strings.Contains(secret.Token, "{{") {
+ secret.Token = replacer.Replace(secret.Token, d.Extracted)
+ }
+
+ // now attempt to parse the cookies
+ secret.skipCookieParse = false
+ for i, cookie := range secret.Cookies {
+ if cookie.Raw != "" {
+ if err := cookie.Parse(); err != nil {
+ return fmt.Errorf("[%s] invalid raw cookie in cookiesAuth: %s", d.TemplatePath, err)
}
+ secret.Cookies[i] = cookie
}
- return nil
}
+ return nil
}
-// GetStrategy returns the auth strategy for the dynamic secret
-func (d *Dynamic) GetStrategy() AuthStrategy {
+// GetStrategy returns the auth strategies for the dynamic secret
+func (d *Dynamic) GetStrategies() []AuthStrategy {
if !d.fetched {
_ = d.Fetch(true)
}
if d.error != nil {
return nil
}
- return d.Secret.GetStrategy()
+ var strategies []AuthStrategy
+ if d.Secret != nil {
+ strategies = append(strategies, d.Secret.GetStrategy())
+ }
+ for _, secret := range d.Secrets {
+ strategies = append(strategies, secret.GetStrategy())
+ }
+ return strategies
}
// Fetch fetches the dynamic secret
diff --git a/pkg/authprovider/authx/strategy.go b/pkg/authprovider/authx/strategy.go
index 8204083989..54ff8e81c4 100644
--- a/pkg/authprovider/authx/strategy.go
+++ b/pkg/authprovider/authx/strategy.go
@@ -24,16 +24,22 @@ type DynamicAuthStrategy struct {
// Apply applies the strategy to the request
func (d *DynamicAuthStrategy) Apply(req *http.Request) {
- strategy := d.Dynamic.GetStrategy()
- if strategy != nil {
- strategy.Apply(req)
+ strategies := d.Dynamic.GetStrategies()
+ if strategies == nil {
+ return
+ }
+ for _, s := range strategies {
+ if s == nil {
+ continue
+ }
+ s.Apply(req)
}
}
// ApplyOnRR applies the strategy to the retryable request
func (d *DynamicAuthStrategy) ApplyOnRR(req *retryablehttp.Request) {
- strategy := d.Dynamic.GetStrategy()
- if strategy != nil {
- strategy.ApplyOnRR(req)
+ strategy := d.Dynamic.GetStrategies()
+ for _, s := range strategy {
+ s.ApplyOnRR(req)
}
}
diff --git a/pkg/authprovider/file.go b/pkg/authprovider/file.go
index 3a32a94fe4..64cfcb8793 100644
--- a/pkg/authprovider/file.go
+++ b/pkg/authprovider/file.go
@@ -85,8 +85,10 @@ func (f *FileAuthProvider) init() {
}
}
for _, dynamic := range f.store.Dynamic {
- if len(dynamic.DomainsRegex) > 0 {
- for _, domain := range dynamic.DomainsRegex {
+ domain, domainsRegex := dynamic.GetDomainAndDomainRegex()
+
+ if len(domainsRegex) > 0 {
+ for _, domain := range domainsRegex {
if f.compiled == nil {
f.compiled = make(map[*regexp.Regexp][]authx.AuthStrategy)
}
@@ -101,7 +103,7 @@ func (f *FileAuthProvider) init() {
}
}
}
- for _, domain := range dynamic.Domains {
+ for _, domain := range domain {
if f.domains == nil {
f.domains = make(map[string][]authx.AuthStrategy)
}
diff --git a/pkg/catalog/loader/loader.go b/pkg/catalog/loader/loader.go
index 31ac7f41e9..b9b145cf46 100644
--- a/pkg/catalog/loader/loader.go
+++ b/pkg/catalog/loader/loader.go
@@ -542,7 +542,8 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ
// Skip DAST filter when loading auth templates
if store.ID() != AuthStoreId && store.config.ExecutorOptions.Options.DAST {
// check if the template is a DAST template
- if parsed.IsFuzzing() {
+ // also allow global matchers template to be loaded
+ if parsed.IsFuzzing() || parsed.Options.GlobalMatchers != nil && parsed.Options.GlobalMatchers.HasMatchers() {
loadTemplate(parsed)
}
} else if len(parsed.RequestsHeadless) > 0 && !store.config.ExecutorOptions.Options.Headless {
diff --git a/pkg/fuzz/analyzers/analyzers.go b/pkg/fuzz/analyzers/analyzers.go
index 8eedb6b71b..6266e8bb01 100644
--- a/pkg/fuzz/analyzers/analyzers.go
+++ b/pkg/fuzz/analyzers/analyzers.go
@@ -81,18 +81,11 @@ func ApplyPayloadTransformations(value string) string {
}
const letterBytes = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
-const (
- letterIdxBits = 6 // 6 bits to represent a letter index
- letterIdxMask = 1< 0 && varY > 0 {
+ o.correlation = covXY / (math.Sqrt(varX) * math.Sqrt(varY))
+ } else {
+ o.correlation = 0.0
}
}
@@ -164,8 +210,17 @@ func (o *simpleLinearRegression) Predict(x float64) float64 {
return o.slope*x + o.intercept
}
-func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64,
-) bool {
- return o.correlation > 1.0-correlationErrorRange &&
- math.Abs(expectedSlope-o.slope) < slopeErrorRange
+func (o *simpleLinearRegression) IsWithinConfidence(correlationErrorRange float64, expectedSlope float64, slopeErrorRange float64) bool {
+ if o.count < 2 {
+ return true
+ }
+ // Check if slope is within error range of expected slope
+ // Also consider cases where slope is approximately 2x of expected slope
+ // as this can happen with time-based responses
+ slopeDiff := math.Abs(expectedSlope - o.slope)
+ slope2xDiff := math.Abs(expectedSlope*2 - o.slope)
+ if slopeDiff > slopeErrorRange && slope2xDiff > slopeErrorRange {
+ return false
+ }
+ return o.correlation > 1.0-correlationErrorRange
}
diff --git a/pkg/fuzz/analyzers/time/time_delay_test.go b/pkg/fuzz/analyzers/time/time_delay_test.go
index 8a71243595..91b2ba657a 100644
--- a/pkg/fuzz/analyzers/time/time_delay_test.go
+++ b/pkg/fuzz/analyzers/time/time_delay_test.go
@@ -3,141 +3,498 @@
package time
import (
- "math"
"math/rand"
+ "reflect"
"testing"
"time"
-
- "github.com/stretchr/testify/require"
)
-const (
- correlationErrorRange = float64(0.1)
- slopeErrorRange = float64(0.2)
-)
+// This test suite verifies the timing dependency detection algorithm by testing various scenarios:
+//
+// Test Categories:
+// 1. Perfect Linear Cases
+// - TestPerfectLinear: Basic case with slope=1, no noise
+// - TestPerfectLinearSlopeOne_NoNoise: Similar to above but with different parameters
+// - TestPerfectLinearSlopeTwo_NoNoise: Tests detection of slope=2 relationship
+//
+// 2. Noisy Cases
+// - TestLinearWithNoise: Verifies detection works with moderate noise (±0.2s)
+// - TestNoisyLinear: Similar but with different noise parameters
+// - TestHighNoiseConcealsSlope: Verifies detection fails with extreme noise (±5s)
+//
+// 3. No Correlation Cases
+// - TestNoCorrelation: Basic case where delay has no effect
+// - TestNoCorrelationHighBaseline: High baseline (~15s) masks any delay effect
+// - TestNegativeSlopeScenario: Verifies detection rejects negative correlations
+//
+// 4. Edge Cases
+// - TestMinimalData: Tests behavior with minimal data points (2 requests)
+// - TestLargeNumberOfRequests: Tests stability with many data points (20 requests)
+// - TestChangingBaseline: Tests detection with shifting baseline mid-test
+// - TestHighBaselineLowSlope: Tests detection of subtle correlations (slope=0.85)
+//
+// ZAP Test Cases:
+//
+// 1. Alternating Sequence Tests
+// - TestAlternatingSequences: Verifies correct alternation between high and low delays
+//
+// 2. Non-Injectable Cases
+// - TestNonInjectableQuickFail: Tests quick failure when response time < requested delay
+// - TestSlowNonInjectableCase: Tests early termination with consistently high response times
+// - TestRealWorldNonInjectableCase: Tests behavior with real-world response patterns
+//
+// 3. Error Tolerance Tests
+// - TestSmallErrorDependence: Verifies detection works with small random variations
+//
+// Key Parameters Tested:
+// - requestsLimit: Number of requests to make (2-20)
+// - highSleepTimeSeconds: Maximum delay to test (typically 5s)
+// - correlationErrorRange: Acceptable deviation from perfect correlation (0.05-0.3)
+// - slopeErrorRange: Acceptable deviation from expected slope (0.1-1.5)
+//
+// The test suite uses various mock senders (perfectLinearSender, noCorrelationSender, etc.)
+// to simulate different timing behaviors and verify the detection algorithm works correctly
+// across a wide range of scenarios.
-var rng = rand.New(rand.NewSource(time.Now().UnixNano()))
+// Mock request sender that simulates a perfect linear relationship:
+// Observed delay = baseline + requested_delay
+func perfectLinearSender(baseline float64) func(delay int) (float64, error) {
+ return func(delay int) (float64, error) {
+ // simulate some processing time
+ time.Sleep(10 * time.Millisecond) // just a small artificial sleep to mimic network
+ return baseline + float64(delay), nil
+ }
+}
-func Test_should_generate_alternating_sequences(t *testing.T) {
- var generatedDelays []float64
- reqSender := func(delay int) (float64, error) {
- generatedDelays = append(generatedDelays, float64(delay))
- return float64(delay), nil
+// Mock request sender that simulates no correlation:
+// The response time is random around a certain constant baseline, ignoring requested delay.
+func noCorrelationSender(baseline, noiseAmplitude float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ noise := 0.0
+ if noiseAmplitude > 0 {
+ noise = (rand.Float64()*2 - 1) * noiseAmplitude
+ }
+ return baseline + noise, nil
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.True(t, matched)
- require.EqualValues(t, []float64{15, 1, 15, 1}, generatedDelays)
}
-func Test_should_giveup_non_injectable(t *testing.T) {
- var timesCalled int
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- return 0.5, nil
+// Mock request sender that simulates partial linearity but with some noise.
+func noisyLinearSender(baseline float64) func(delay int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ // Add some noise (±0.2s) to a linear relationship
+ noise := 0.2
+ return baseline + float64(delay) + noise, nil
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.Equal(t, 1, timesCalled)
}
-func Test_should_giveup_slow_non_injectable(t *testing.T) {
- var timesCalled int
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- return 10 + rng.Float64()*0.5, nil
+func TestPerfectLinear(t *testing.T) {
+ // Expect near-perfect correlation and slope ~ 1.0
+ requestsLimit := 6 // 3 pairs: enough data for stable regression
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.1
+ slopeErrRange := 0.2
+ baseline := 5.0
+
+ sender := perfectLinearSender(5.0) // baseline 5s, observed = 5s + requested_delay
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match but got none. Reason: %s", reason)
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.LessOrEqual(t, timesCalled, 3)
}
-func Test_should_giveup_slow_non_injectable_realworld(t *testing.T) {
- var timesCalled int
- var iteration = 0
- counts := []float64{21, 11, 21, 11}
- reqSender := func(delay int) (float64, error) {
- timesCalled++
- iteration++
- return counts[iteration-1], nil
+func TestNoCorrelation(t *testing.T) {
+ // Expect no match because requested delay doesn't influence observed delay
+ requestsLimit := 6
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.1
+ slopeErrRange := 0.5
+ baseline := 8.0
+
+ sender := noCorrelationSender(8.0, 0.1)
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatalf("Expected no match but got one. Reason: %s", reason)
}
- matched, _, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.False(t, matched)
- require.LessOrEqual(t, timesCalled, 4)
}
-func Test_should_detect_dependence_with_small_error(t *testing.T) {
- reqSender := func(delay int) (float64, error) {
- return float64(delay) + rng.Float64()*0.5, nil
+func TestNoisyLinear(t *testing.T) {
+ // Even with some noise, it should detect a strong positive correlation if
+ // we allow a slightly bigger margin for slope/correlation.
+ requestsLimit := 10 // More requests to average out noise
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.2 // allow some lower correlation due to noise
+ slopeErrRange := 0.5 // slope may deviate slightly
+ baseline := 2.0
+
+ sender := noisyLinearSender(2.0) // baseline 2s, observed ~ 2s + requested_delay ±0.2
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+
+ // We expect a match since it's still roughly linear. The slope should be close to 1.
+ if !match {
+ t.Fatalf("Expected a match in noisy linear test but got none. Reason: %s", reason)
+ }
+}
+
+func TestMinimalData(t *testing.T) {
+ // With too few requests, correlation might not be stable.
+ // Here, we send only 2 requests (1 pair) and see if the logic handles it gracefully.
+ requestsLimit := 2
+ highSleepTimeSeconds := 5
+ corrErrRange := 0.3
+ slopeErrRange := 0.5
+ baseline := 5.0
+
+ // Perfect linear sender again
+ sender := perfectLinearSender(5.0)
+ match, reason, err := checkTimingDependency(
+ requestsLimit,
+ highSleepTimeSeconds,
+ corrErrRange,
+ slopeErrRange,
+ baseline,
+ sender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected match but got none. Reason: %s", reason)
}
- matched, reason, err := checkTimingDependency(4, 15, correlationErrorRange, slopeErrorRange, reqSender)
- require.NoError(t, err)
- require.True(t, matched)
- require.NotEmpty(t, reason)
}
-func Test_LinearRegression_Numerical_stability(t *testing.T) {
- variables := [][]float64{
- {1, 1}, {2, 2}, {3, 3}, {4, 4}, {5, 5}, {1, 1}, {2, 2}, {2, 2}, {2, 2},
+// Utility functions to generate different behaviors
+
+// linearSender returns a sender that calculates observed delay as:
+// observed = baseline + slope * requested_delay + noise
+func linearSender(baseline, slope, noiseAmplitude float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ noise := 0.0
+ if noiseAmplitude > 0 {
+ noise = (rand.Float64()*2 - 1) * noiseAmplitude // random noise in [-noiseAmplitude, noiseAmplitude]
+ }
+ return baseline + slope*float64(delay) + noise, nil
}
- slope := float64(1)
- correlation := float64(1)
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+// negativeSlopeSender just for completeness - higher delay = less observed time
+func negativeSlopeSender(baseline float64) func(int) (float64, error) {
+ return func(delay int) (float64, error) {
+ time.Sleep(10 * time.Millisecond)
+ return baseline - float64(delay)*2.0, nil
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_exact_verify(t *testing.T) {
- variables := [][]float64{
- {1, 1}, {2, 3},
+func TestPerfectLinearSlopeOne_NoNoise(t *testing.T) {
+ baseline := 2.0
+ match, reason, err := checkTimingDependency(
+ 10, // requestsLimit
+ 5, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange (allowing slope between 0.8 and 1.2)
+ baseline,
+ linearSender(baseline, 1.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
}
- slope := float64(2)
- correlation := float64(1)
+ if !match {
+ t.Fatalf("Expected a match for perfect linear slope=1. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestPerfectLinearSlopeTwo_NoNoise(t *testing.T) {
+ baseline := 2.0
+ // slope=2 means observed = baseline + 2*requested_delay
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.1, // correlation must still be good
+ 1.5, // allow slope in range (0.5 to 2.5), we should be close to 2.0 anyway
+ baseline,
+ linearSender(baseline, 2.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match for slope=2. Reason: %s", reason)
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_known_verify(t *testing.T) {
- variables := [][]float64{
- {1, 1.348520581}, {2, 2.524046187}, {3, 3.276944688}, {4, 4.735374498}, {5, 5.150291657},
+func TestLinearWithNoise(t *testing.T) {
+ baseline := 5.0
+ // slope=1 but with noise ±0.2 seconds
+ match, reason, err := checkTimingDependency(
+ 12,
+ 5,
+ 0.2, // correlationErrorRange relaxed to account for noise
+ 0.5, // slopeErrorRange also relaxed
+ baseline,
+ linearSender(baseline, 1.0, 0.2),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
}
- slope := float64(0.981487046)
- correlation := float64(0.979228906)
+ if !match {
+ t.Fatalf("Expected a match for noisy linear data. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestNoCorrelationHighBaseline(t *testing.T) {
+ baseline := 15.0
+ // baseline ~15s, requested delays won't matter
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.1, // correlation should be near zero, so no match expected
+ 0.5,
+ baseline,
+ noCorrelationSender(baseline, 0.1),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if match {
+ t.Fatalf("Expected no match for no correlation scenario. Got: %s", reason)
}
- require.True(t, almostEqual(regression.slope, slope))
- require.True(t, almostEqual(regression.correlation, correlation))
}
-func Test_LinearRegression_nonlinear_verify(t *testing.T) {
- variables := [][]float64{
- {1, 2}, {2, 4}, {3, 8}, {4, 16}, {5, 32},
+func TestNegativeSlopeScenario(t *testing.T) {
+ baseline := 10.0
+ // Increasing delay decreases observed time
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.2,
+ 0.5,
+ baseline,
+ negativeSlopeSender(baseline),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
}
+ if match {
+ t.Fatalf("Expected no match in negative slope scenario. Reason: %s", reason)
+ }
+}
- regression := newSimpleLinearRegression()
- for _, v := range variables {
- regression.AddPoint(v[0], v[1])
+func TestLargeNumberOfRequests(t *testing.T) {
+ baseline := 1.0
+ // 20 requests, slope=1.0, no noise. Should be very stable and produce a very high correlation.
+ match, reason, err := checkTimingDependency(
+ 20,
+ 5,
+ 0.05, // very strict correlation requirement
+ 0.1, // very strict slope range
+ baseline,
+ linearSender(baseline, 1.0, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a strong match with many requests and perfect linearity. Reason: %s", reason)
}
- require.Less(t, regression.correlation, 0.9)
}
-const float64EqualityThreshold = 1e-8
+func TestHighBaselineLowSlope(t *testing.T) {
+ baseline := 15.0
+ match, reason, err := checkTimingDependency(
+ 10,
+ 5,
+ 0.2,
+ 0.2, // expecting slope around 0.5, allow range ~0.4 to 0.6
+ baseline,
+ linearSender(baseline, 0.85, 0.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match for slope=0.5 linear scenario. Reason: %s", reason)
+ }
+}
-func almostEqual(a, b float64) bool {
- return math.Abs(a-b) <= float64EqualityThreshold
+func TestHighNoiseConcealsSlope(t *testing.T) {
+ baseline := 5.0
+ // slope=1, but noise=5 seconds is huge and might conceal the correlation.
+ // With large noise, the test may fail to detect correlation.
+ match, reason, err := checkTimingDependency(
+ 12,
+ 5,
+ 0.1, // still strict
+ 0.2, // still strict
+ baseline,
+ linearSender(baseline, 1.0, 5.0),
+ )
+ if err != nil {
+ t.Fatalf("Error: %v", err)
+ }
+ // Expect no match because the noise level is too high to establish a reliable correlation.
+ if match {
+ t.Fatalf("Expected no match due to extreme noise. Reason: %s", reason)
+ }
+}
+
+func TestAlternatingSequences(t *testing.T) {
+ baseline := 0.0
+ var generatedDelays []float64
+ reqSender := func(delay int) (float64, error) {
+ generatedDelays = append(generatedDelays, float64(delay))
+ return float64(delay), nil
+ }
+ match, reason, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected a match but got none. Reason: %s", reason)
+ }
+ // Verify alternating sequence of delays
+ expectedDelays := []float64{15, 3, 15, 3}
+ if !reflect.DeepEqual(generatedDelays, expectedDelays) {
+ t.Fatalf("Expected delays %v but got %v", expectedDelays, generatedDelays)
+ }
+}
+
+func TestNonInjectableQuickFail(t *testing.T) {
+ baseline := 0.5
+ var timesCalled int
+ reqSender := func(delay int) (float64, error) {
+ timesCalled++
+ return 0.5, nil // Return value less than delay
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for non-injectable case")
+ }
+ if timesCalled != 1 {
+ t.Fatalf("Expected quick fail after 1 call, got %d calls", timesCalled)
+ }
+}
+
+func TestSlowNonInjectableCase(t *testing.T) {
+ baseline := 10.0
+ rng := rand.New(rand.NewSource(time.Now().UnixNano()))
+ var timesCalled int
+ reqSender := func(delay int) (float64, error) {
+ timesCalled++
+ return 10 + rng.Float64()*0.5, nil
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for slow non-injectable case")
+ }
+ if timesCalled > 3 {
+ t.Fatalf("Expected early termination (≤3 calls), got %d calls", timesCalled)
+ }
+}
+
+func TestRealWorldNonInjectableCase(t *testing.T) {
+ baseline := 0.0
+ var iteration int
+ counts := []float64{11, 21, 11, 21, 11}
+ reqSender := func(delay int) (float64, error) {
+ iteration++
+ return counts[iteration-1], nil
+ }
+ match, _, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if match {
+ t.Fatal("Expected no match for real-world non-injectable case")
+ }
+ if iteration > 4 {
+ t.Fatalf("Expected ≤4 iterations, got %d", iteration)
+ }
+}
+
+func TestSmallErrorDependence(t *testing.T) {
+ baseline := 0.0
+ rng := rand.New(rand.NewSource(time.Now().UnixNano()))
+ reqSender := func(delay int) (float64, error) {
+ return float64(delay) + rng.Float64()*0.5, nil
+ }
+ match, reason, err := checkTimingDependency(
+ 4, // requestsLimit
+ 15, // highSleepTimeSeconds
+ 0.1, // correlationErrorRange
+ 0.2, // slopeErrorRange
+ baseline,
+ reqSender,
+ )
+ if err != nil {
+ t.Fatalf("Unexpected error: %v", err)
+ }
+ if !match {
+ t.Fatalf("Expected match for small error case. Reason: %s", reason)
+ }
}
diff --git a/pkg/fuzz/component/component.go b/pkg/fuzz/component/component.go
index a15ac2856a..c3500048b1 100644
--- a/pkg/fuzz/component/component.go
+++ b/pkg/fuzz/component/component.go
@@ -67,8 +67,8 @@ const (
var Components = []string{
RequestBodyComponent,
RequestQueryComponent,
- RequestPathComponent,
RequestHeaderComponent,
+ RequestPathComponent,
RequestCookieComponent,
}
diff --git a/pkg/fuzz/component/cookie.go b/pkg/fuzz/component/cookie.go
index 77667c7479..25f29e794a 100644
--- a/pkg/fuzz/component/cookie.go
+++ b/pkg/fuzz/component/cookie.go
@@ -52,10 +52,6 @@ func (c *Cookie) Parse(req *retryablehttp.Request) (bool, error) {
// Iterate iterates through the component
func (c *Cookie) Iterate(callback func(key string, value interface{}) error) (err error) {
c.value.parsed.Iterate(func(key string, value any) bool {
- // Skip ignored cookies
- if _, ok := defaultIgnoredCookieKeys[key]; ok {
- return ok
- }
if errx := callback(key, value); errx != nil {
err = errx
return false
@@ -85,6 +81,7 @@ func (c *Cookie) Delete(key string) error {
// Rebuild returns a new request with the
// component rebuilt
func (c *Cookie) Rebuild() (*retryablehttp.Request, error) {
+ // TODO: Fix cookie duplication with auth-file
cloned := c.req.Clone(context.Background())
cloned.Header.Del("Cookie")
@@ -106,47 +103,3 @@ func (c *Cookie) Clone() Component {
req: c.req.Clone(context.Background()),
}
}
-
-// A list of cookies that are essential to the request and
-// must not be fuzzed.
-var defaultIgnoredCookieKeys = map[string]struct{}{
- "awsELB": {},
- "AWSALB": {},
- "AWSALBCORS": {},
- "__utma": {},
- "__utmb": {},
- "__utmc": {},
- "__utmt": {},
- "__utmz": {},
- "_ga": {},
- "_gat": {},
- "_gid": {},
- "_gcl_au": {},
- "_fbp": {},
- "fr": {},
- "__hstc": {},
- "hubspotutk": {},
- "__hssc": {},
- "__hssrc": {},
- "mp_mixpanel__c": {},
- "JSESSIONID": {},
- "NREUM": {},
- "_pk_id": {},
- "_pk_ref": {},
- "_pk_ses": {},
- "_pk_cvar": {},
- "_pk_hsr": {},
- "_hjIncludedInSample": {},
- "__cfduid": {},
- "cf_use_ob": {},
- "cf_ob_info": {},
- "intercom-session": {},
- "optimizelyEndUserId": {},
- "optimizelySegments": {},
- "optimizelyBuckets": {},
- "optimizelyPendingLogEvents": {},
- "YSC": {},
- "VISITOR_INFO1_LIVE": {},
- "PREF": {},
- "GPS": {},
-}
diff --git a/pkg/fuzz/execute.go b/pkg/fuzz/execute.go
index ff9881ce92..ea4a3e0fbf 100644
--- a/pkg/fuzz/execute.go
+++ b/pkg/fuzz/execute.go
@@ -9,6 +9,7 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
"github.com/projectdiscovery/nuclei/v3/pkg/fuzz/component"
+ fuzzStats "github.com/projectdiscovery/nuclei/v3/pkg/fuzz/stats"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs"
"github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions"
@@ -122,6 +123,18 @@ func (rule *Rule) Execute(input *ExecuteRuleInput) (err error) {
return nil
})
}
+
+ if rule.options.FuzzStatsDB != nil {
+ _ = component.Iterate(func(key string, value interface{}) error {
+ rule.options.FuzzStatsDB.RecordComponentEvent(fuzzStats.ComponentEvent{
+ URL: input.Input.MetaInput.Target(),
+ ComponentType: componentName,
+ ComponentName: fmt.Sprintf("%v", value),
+ })
+ return nil
+ })
+ }
+
finalComponentList = append(finalComponentList, component)
}
if len(displayDebugFuzzPoints) > 0 {
diff --git a/pkg/fuzz/stats/db.go b/pkg/fuzz/stats/db.go
new file mode 100644
index 0000000000..d5caf9a75d
--- /dev/null
+++ b/pkg/fuzz/stats/db.go
@@ -0,0 +1,15 @@
+package stats
+
+import (
+ _ "embed"
+
+ _ "github.com/mattn/go-sqlite3"
+)
+
+type StatsDatabase interface {
+ Close()
+
+ InsertComponent(event ComponentEvent) error
+ InsertMatchedRecord(event FuzzingEvent) error
+ InsertError(event ErrorEvent) error
+}
diff --git a/pkg/fuzz/stats/db_test.go b/pkg/fuzz/stats/db_test.go
new file mode 100644
index 0000000000..e8a5c1e313
--- /dev/null
+++ b/pkg/fuzz/stats/db_test.go
@@ -0,0 +1,24 @@
+package stats
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/require"
+)
+
+func Test_NewStatsDatabase(t *testing.T) {
+ db, err := NewSimpleStats()
+ require.NoError(t, err)
+
+ err = db.InsertMatchedRecord(FuzzingEvent{
+ URL: "http://localhost:8080/login",
+ TemplateID: "apache-struts2-001",
+ ComponentType: "path",
+ ComponentName: "/login",
+ PayloadSent: "/login'\"><",
+ StatusCode: 401,
+ })
+ require.NoError(t, err)
+
+ //os.Remove("test.stats.db")
+}
diff --git a/pkg/fuzz/stats/simple.go b/pkg/fuzz/stats/simple.go
new file mode 100644
index 0000000000..4a93aaaa42
--- /dev/null
+++ b/pkg/fuzz/stats/simple.go
@@ -0,0 +1,164 @@
+package stats
+
+import (
+ "fmt"
+ "net/http"
+ "net/url"
+ "strings"
+ "sync"
+ "sync/atomic"
+)
+
+type simpleStats struct {
+ totalComponentsTested atomic.Int64
+ totalEndpointsTested atomic.Int64
+ totalFuzzedRequests atomic.Int64
+ totalMatchedResults atomic.Int64
+ totalTemplatesTested atomic.Int64
+ totalErroredRequests atomic.Int64
+
+ statusCodes sync.Map
+ severityCounts sync.Map
+
+ componentsUniqueMap sync.Map
+ endpointsUniqueMap sync.Map
+ templatesUniqueMap sync.Map
+ errorGroupedStats sync.Map
+}
+
+func NewSimpleStats() (*simpleStats, error) {
+ return &simpleStats{
+ totalComponentsTested: atomic.Int64{},
+ totalEndpointsTested: atomic.Int64{},
+ totalMatchedResults: atomic.Int64{},
+ totalFuzzedRequests: atomic.Int64{},
+ totalTemplatesTested: atomic.Int64{},
+ totalErroredRequests: atomic.Int64{},
+ statusCodes: sync.Map{},
+ severityCounts: sync.Map{},
+ componentsUniqueMap: sync.Map{},
+ endpointsUniqueMap: sync.Map{},
+ templatesUniqueMap: sync.Map{},
+ errorGroupedStats: sync.Map{},
+ }, nil
+}
+
+func (s *simpleStats) Close() {}
+
+func (s *simpleStats) InsertComponent(event ComponentEvent) error {
+ componentKey := fmt.Sprintf("%s_%s", event.ComponentName, event.ComponentType)
+ if _, ok := s.componentsUniqueMap.Load(componentKey); !ok {
+ s.componentsUniqueMap.Store(componentKey, true)
+ s.totalComponentsTested.Add(1)
+ }
+
+ parsedURL, err := url.Parse(event.URL)
+ if err != nil {
+ return err
+ }
+
+ endpointsKey := fmt.Sprintf("%s_%s", event.siteName, parsedURL.Path)
+ if _, ok := s.endpointsUniqueMap.Load(endpointsKey); !ok {
+ s.endpointsUniqueMap.Store(endpointsKey, true)
+ s.totalEndpointsTested.Add(1)
+ }
+
+ return nil
+}
+
+func (s *simpleStats) InsertMatchedRecord(event FuzzingEvent) error {
+ s.totalFuzzedRequests.Add(1)
+
+ s.incrementStatusCode(event.StatusCode)
+ if event.Matched {
+ s.totalMatchedResults.Add(1)
+
+ s.incrementSeverityCount(event.Severity)
+ }
+
+ if _, ok := s.templatesUniqueMap.Load(event.TemplateID); !ok {
+ s.templatesUniqueMap.Store(event.TemplateID, true)
+ s.totalTemplatesTested.Add(1)
+ }
+ return nil
+}
+
+func (s *simpleStats) InsertError(event ErrorEvent) error {
+ s.totalErroredRequests.Add(1)
+
+ value, _ := s.errorGroupedStats.LoadOrStore(event.Error, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+ return nil
+}
+
+type SimpleStatsResponse struct {
+ TotalMatchedResults int64
+ TotalComponentsTested int64
+ TotalEndpointsTested int64
+ TotalFuzzedRequests int64
+ TotalTemplatesTested int64
+ TotalErroredRequests int64
+ StatusCodes map[string]int64
+ SeverityCounts map[string]int64
+ ErrorGroupedStats map[string]int64
+}
+
+func (s *simpleStats) GetStatistics() SimpleStatsResponse {
+ statusStats := make(map[string]int64)
+ s.statusCodes.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ statusStats[formatStatusCode(key.(int))] = count.Load()
+ }
+ return true
+ })
+
+ severityStats := make(map[string]int64)
+ s.severityCounts.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ severityStats[key.(string)] = count.Load()
+ }
+ return true
+ })
+
+ errorStats := make(map[string]int64)
+ s.errorGroupedStats.Range(func(key, value interface{}) bool {
+ if count, ok := value.(*atomic.Int64); ok {
+ errorStats[key.(string)] = count.Load()
+ }
+ return true
+ })
+
+ return SimpleStatsResponse{
+ TotalMatchedResults: s.totalMatchedResults.Load(),
+ StatusCodes: statusStats,
+ SeverityCounts: severityStats,
+ TotalComponentsTested: s.totalComponentsTested.Load(),
+ TotalEndpointsTested: s.totalEndpointsTested.Load(),
+ TotalFuzzedRequests: s.totalFuzzedRequests.Load(),
+ TotalTemplatesTested: s.totalTemplatesTested.Load(),
+ TotalErroredRequests: s.totalErroredRequests.Load(),
+ ErrorGroupedStats: errorStats,
+ }
+}
+
+func (s *simpleStats) incrementStatusCode(statusCode int) {
+ value, _ := s.statusCodes.LoadOrStore(statusCode, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+}
+
+func (s *simpleStats) incrementSeverityCount(severity string) {
+ value, _ := s.severityCounts.LoadOrStore(severity, &atomic.Int64{})
+ if counter, ok := value.(*atomic.Int64); ok {
+ counter.Add(1)
+ }
+}
+
+func formatStatusCode(code int) string {
+ escapedText := strings.ToTitle(strings.ReplaceAll(http.StatusText(code), " ", "_"))
+ formatted := fmt.Sprintf("%d_%s", code, escapedText)
+ return formatted
+}
diff --git a/pkg/fuzz/stats/stats.go b/pkg/fuzz/stats/stats.go
new file mode 100644
index 0000000000..87ed5c379a
--- /dev/null
+++ b/pkg/fuzz/stats/stats.go
@@ -0,0 +1,106 @@
+// Package stats implements a statistics recording module for
+// nuclei fuzzing.
+package stats
+
+import (
+ "fmt"
+ "log"
+ "net/url"
+
+ "github.com/pkg/errors"
+)
+
+// Tracker is a stats tracker module for fuzzing server
+type Tracker struct {
+ database *simpleStats
+}
+
+// NewTracker creates a new tracker instance
+func NewTracker() (*Tracker, error) {
+ db, err := NewSimpleStats()
+ if err != nil {
+ return nil, errors.Wrap(err, "could not create new tracker")
+ }
+
+ tracker := &Tracker{
+ database: db,
+ }
+ return tracker, nil
+}
+
+func (t *Tracker) GetStats() SimpleStatsResponse {
+ return t.database.GetStatistics()
+}
+
+// Close closes the tracker
+func (t *Tracker) Close() {
+ t.database.Close()
+}
+
+// FuzzingEvent is a fuzzing event
+type FuzzingEvent struct {
+ URL string
+ ComponentType string
+ ComponentName string
+ TemplateID string
+ PayloadSent string
+ StatusCode int
+ Matched bool
+ RawRequest string
+ RawResponse string
+ Severity string
+
+ siteName string
+}
+
+func (t *Tracker) RecordResultEvent(event FuzzingEvent) {
+ event.siteName = getCorrectSiteName(event.URL)
+ if err := t.database.InsertMatchedRecord(event); err != nil {
+ log.Printf("could not insert matched record: %s", err)
+ }
+}
+
+type ComponentEvent struct {
+ URL string
+ ComponentType string
+ ComponentName string
+
+ siteName string
+}
+
+func (t *Tracker) RecordComponentEvent(event ComponentEvent) {
+ event.siteName = getCorrectSiteName(event.URL)
+ if err := t.database.InsertComponent(event); err != nil {
+ log.Printf("could not insert component record: %s", err)
+ }
+}
+
+type ErrorEvent struct {
+ TemplateID string
+ URL string
+ Error string
+}
+
+func (t *Tracker) RecordErrorEvent(event ErrorEvent) {
+ if err := t.database.InsertError(event); err != nil {
+ log.Printf("could not insert error record: %s", err)
+ }
+}
+
+func getCorrectSiteName(originalURL string) string {
+ parsed, err := url.Parse(originalURL)
+ if err != nil {
+ return ""
+ }
+
+ // Site is the host:port combo
+ siteName := parsed.Host
+ if parsed.Port() == "" {
+ if parsed.Scheme == "https" {
+ siteName = fmt.Sprintf("%s:443", siteName)
+ } else if parsed.Scheme == "http" {
+ siteName = fmt.Sprintf("%s:80", siteName)
+ }
+ }
+ return siteName
+}
diff --git a/pkg/input/formats/burp/burp.go b/pkg/input/formats/burp/burp.go
index 6ad5f548b5..9b2a362dfe 100644
--- a/pkg/input/formats/burp/burp.go
+++ b/pkg/input/formats/burp/burp.go
@@ -2,7 +2,7 @@ package burp
import (
"encoding/base64"
- "os"
+ "io"
"strings"
"github.com/pkg/errors"
@@ -35,14 +35,8 @@ func (j *BurpFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *BurpFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open data file")
- }
- defer file.Close()
-
- items, err := burpxml.Parse(file, true)
+func (j *BurpFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ items, err := burpxml.Parse(input, true)
if err != nil {
return errors.Wrap(err, "could not decode burp xml schema")
}
diff --git a/pkg/input/formats/burp/burp_test.go b/pkg/input/formats/burp/burp_test.go
index 330218a9e5..97e80c534f 100644
--- a/pkg/input/formats/burp/burp_test.go
+++ b/pkg/input/formats/burp/burp_test.go
@@ -1,6 +1,7 @@
package burp
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -14,10 +15,14 @@ func TestBurpParse(t *testing.T) {
var gotMethodsToURLs []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/formats.go b/pkg/input/formats/formats.go
index af2b4569c6..03c65d3fea 100644
--- a/pkg/input/formats/formats.go
+++ b/pkg/input/formats/formats.go
@@ -2,6 +2,7 @@ package formats
import (
"errors"
+ "io"
"os"
"strings"
@@ -35,7 +36,7 @@ type Format interface {
Name() string
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
- Parse(input string, resultsCb ParseReqRespCallback) error
+ Parse(input io.Reader, resultsCb ParseReqRespCallback, filePath string) error
// SetOptions sets the options for the input format
SetOptions(options InputFormatOptions)
}
diff --git a/pkg/input/formats/json/json.go b/pkg/input/formats/json/json.go
index 9296a1c8aa..fdf05aa152 100644
--- a/pkg/input/formats/json/json.go
+++ b/pkg/input/formats/json/json.go
@@ -2,7 +2,6 @@ package json
import (
"io"
- "os"
"github.com/pkg/errors"
"github.com/projectdiscovery/gologger"
@@ -46,14 +45,8 @@ func (j *JSONFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *JSONFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open json file")
- }
- defer file.Close()
-
- decoder := json.NewDecoder(file)
+func (j *JSONFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ decoder := json.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)
diff --git a/pkg/input/formats/json/json_test.go b/pkg/input/formats/json/json_test.go
index b72bf4c197..a6734f083e 100644
--- a/pkg/input/formats/json/json_test.go
+++ b/pkg/input/formats/json/json_test.go
@@ -1,6 +1,7 @@
package json
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -41,11 +42,15 @@ func TestJSONFormatterParse(t *testing.T) {
proxifyInputFile := "../testdata/ginandjuice.proxify.json"
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
var urls []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/openapi/openapi.go b/pkg/input/formats/openapi/openapi.go
index afbe379fd2..c2086636b4 100644
--- a/pkg/input/formats/openapi/openapi.go
+++ b/pkg/input/formats/openapi/openapi.go
@@ -1,6 +1,8 @@
package openapi
import (
+ "io"
+
"github.com/getkin/kin-openapi/openapi3"
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
@@ -29,9 +31,9 @@ func (j *OpenAPIFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *OpenAPIFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
+func (j *OpenAPIFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
loader := openapi3.NewLoader()
- schema, err := loader.LoadFromFile(input)
+ schema, err := loader.LoadFromIoReader(input)
if err != nil {
return errors.Wrap(err, "could not decode openapi 3.0 schema")
}
diff --git a/pkg/input/formats/openapi/openapi_test.go b/pkg/input/formats/openapi/openapi_test.go
index f48385a808..c202bdcbee 100644
--- a/pkg/input/formats/openapi/openapi_test.go
+++ b/pkg/input/formats/openapi/openapi_test.go
@@ -1,6 +1,7 @@
package openapi
import (
+ "os"
"strings"
"testing"
@@ -41,11 +42,15 @@ func TestOpenAPIParser(t *testing.T) {
gotMethodsToURLs := make(map[string][]string)
- err := format.Parse(proxifyInputFile, func(rr *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(rr *types.RequestResponse) bool {
gotMethodsToURLs[rr.Request.Method] = append(gotMethodsToURLs[rr.Request.Method],
strings.Replace(rr.URL.String(), baseURL, "{{baseUrl}}", 1))
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/swagger/swagger.go b/pkg/input/formats/swagger/swagger.go
index 35db19d8da..e33ae931ce 100644
--- a/pkg/input/formats/swagger/swagger.go
+++ b/pkg/input/formats/swagger/swagger.go
@@ -2,7 +2,6 @@ package swagger
import (
"io"
- "os"
"path"
"github.com/getkin/kin-openapi/openapi2"
@@ -12,6 +11,7 @@ import (
"github.com/pkg/errors"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats"
"github.com/projectdiscovery/nuclei/v3/pkg/input/formats/openapi"
+
"github.com/projectdiscovery/nuclei/v3/pkg/utils/json"
)
@@ -38,24 +38,19 @@ func (j *SwaggerFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *SwaggerFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open data file")
- }
- defer file.Close()
-
+func (j *SwaggerFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
schemav2 := &openapi2.T{}
- ext := path.Ext(input)
-
+ ext := path.Ext(filePath)
+ var err error
if ext == ".yaml" || ext == ".yml" {
- data, err_data := io.ReadAll(file)
- if err_data != nil {
+ var data []byte
+ data, err = io.ReadAll(input)
+ if err != nil {
return errors.Wrap(err, "could not read data file")
}
err = yaml.Unmarshal(data, schemav2)
} else {
- err = json.NewDecoder(file).Decode(schemav2)
+ err = json.NewDecoder(input).Decode(schemav2)
}
if err != nil {
return errors.Wrap(err, "could not decode openapi 2.0 schema")
diff --git a/pkg/input/formats/swagger/swagger_test.go b/pkg/input/formats/swagger/swagger_test.go
index 065ae78f63..caed82a13b 100644
--- a/pkg/input/formats/swagger/swagger_test.go
+++ b/pkg/input/formats/swagger/swagger_test.go
@@ -1,6 +1,7 @@
package swagger
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -14,10 +15,14 @@ func TestSwaggerAPIParser(t *testing.T) {
var gotMethodsToURLs []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
gotMethodsToURLs = append(gotMethodsToURLs, request.URL.String())
return false
- })
+ }, proxifyInputFile)
if err != nil {
t.Fatal(err)
}
diff --git a/pkg/input/formats/yaml/multidoc.go b/pkg/input/formats/yaml/multidoc.go
index dc258408c1..6d75e0334a 100644
--- a/pkg/input/formats/yaml/multidoc.go
+++ b/pkg/input/formats/yaml/multidoc.go
@@ -2,7 +2,6 @@ package yaml
import (
"io"
- "os"
"strings"
"github.com/pkg/errors"
@@ -46,14 +45,8 @@ func (j *YamlMultiDocFormat) SetOptions(options formats.InputFormatOptions) {
// Parse parses the input and calls the provided callback
// function for each RawRequest it discovers.
-func (j *YamlMultiDocFormat) Parse(input string, resultsCb formats.ParseReqRespCallback) error {
- file, err := os.Open(input)
- if err != nil {
- return errors.Wrap(err, "could not open json file")
- }
- defer file.Close()
-
- decoder := YamlUtil.NewDecoder(file)
+func (j *YamlMultiDocFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallback, filePath string) error {
+ decoder := YamlUtil.NewDecoder(input)
for {
var request proxifyRequest
err := decoder.Decode(&request)
diff --git a/pkg/input/formats/yaml/multidoc_test.go b/pkg/input/formats/yaml/multidoc_test.go
index 6275eae593..0b91e774a3 100644
--- a/pkg/input/formats/yaml/multidoc_test.go
+++ b/pkg/input/formats/yaml/multidoc_test.go
@@ -1,6 +1,7 @@
package yaml
import (
+ "os"
"testing"
"github.com/projectdiscovery/nuclei/v3/pkg/input/types"
@@ -17,11 +18,15 @@ func TestYamlFormatterParse(t *testing.T) {
"https://ginandjuice.shop/users/3",
}
+ file, err := os.Open(proxifyInputFile)
+ require.Nilf(t, err, "error opening proxify input file: %v", err)
+ defer file.Close()
+
var urls []string
- err := format.Parse(proxifyInputFile, func(request *types.RequestResponse) bool {
+ err = format.Parse(file, func(request *types.RequestResponse) bool {
urls = append(urls, request.URL.String())
return false
- })
+ }, proxifyInputFile)
require.Nilf(t, err, "error parsing yaml file: %v", err)
require.Len(t, urls, len(expectedUrls), "invalid number of urls")
require.ElementsMatch(t, urls, expectedUrls, "invalid urls")
diff --git a/pkg/input/provider/http/multiformat.go b/pkg/input/provider/http/multiformat.go
index d58970fec5..a534879c17 100644
--- a/pkg/input/provider/http/multiformat.go
+++ b/pkg/input/provider/http/multiformat.go
@@ -1,6 +1,9 @@
package http
import (
+ "bytes"
+ "io"
+ "os"
"strings"
"github.com/pkg/errors"
@@ -23,17 +26,25 @@ type HttpMultiFormatOptions struct {
InputFile string
// InputMode is the mode of input
InputMode string
+
+ // optional input reader
+ InputContents string
}
// HttpInputProvider implements an input provider for nuclei that loads
// inputs from multiple formats like burp, openapi, postman,proxify, etc.
type HttpInputProvider struct {
format formats.Format
+ inputData []byte
inputFile string
count int64
}
// NewHttpInputProvider creates a new input provider for nuclei from a file
+// or an input string
+//
+// The first preference is given to input file if provided
+// otherwise it will use the input string
func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, error) {
var format formats.Format
for _, provider := range providersList {
@@ -48,14 +59,40 @@ func NewHttpInputProvider(opts *HttpMultiFormatOptions) (*HttpInputProvider, err
// Do a first pass over the input to identify any errors
// and get the count of the input file as well
count := int64(0)
- parseErr := format.Parse(opts.InputFile, func(request *types.RequestResponse) bool {
+ var inputFile *os.File
+ var inputReader io.Reader
+ if opts.InputFile != "" {
+ file, err := os.Open(opts.InputFile)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not open input file")
+ }
+ inputFile = file
+ inputReader = file
+ } else {
+ inputReader = strings.NewReader(opts.InputContents)
+ }
+ defer func() {
+ if inputFile != nil {
+ inputFile.Close()
+ }
+ }()
+
+ data, err := io.ReadAll(inputReader)
+ if err != nil {
+ return nil, errors.Wrap(err, "could not read input file")
+ }
+ if len(data) == 0 {
+ return nil, errors.New("input file is empty")
+ }
+
+ parseErr := format.Parse(bytes.NewReader(data), func(request *types.RequestResponse) bool {
count++
return false
- })
+ }, opts.InputFile)
if parseErr != nil {
return nil, errors.Wrap(parseErr, "could not parse input file")
}
- return &HttpInputProvider{format: format, inputFile: opts.InputFile, count: count}, nil
+ return &HttpInputProvider{format: format, inputData: data, inputFile: opts.InputFile, count: count}, nil
}
// Count returns the number of items for input provider
@@ -65,12 +102,12 @@ func (i *HttpInputProvider) Count() int64 {
// Iterate over all inputs in order
func (i *HttpInputProvider) Iterate(callback func(value *contextargs.MetaInput) bool) {
- err := i.format.Parse(i.inputFile, func(request *types.RequestResponse) bool {
+ err := i.format.Parse(bytes.NewReader(i.inputData), func(request *types.RequestResponse) bool {
metaInput := contextargs.NewMetaInput()
metaInput.ReqResp = request
metaInput.Input = request.URL.String()
return callback(metaInput)
- })
+ }, i.inputFile)
if err != nil {
gologger.Warning().Msgf("Could not parse input file while iterating: %s\n", err)
}
diff --git a/pkg/output/multi_writer.go b/pkg/output/multi_writer.go
index 997b00bc07..8ea729b4b8 100644
--- a/pkg/output/multi_writer.go
+++ b/pkg/output/multi_writer.go
@@ -8,6 +8,8 @@ type MultiWriter struct {
writers []Writer
}
+var _ Writer = &MultiWriter{}
+
// NewMultiWriter creates a new MultiWriter instance
func NewMultiWriter(writers ...Writer) *MultiWriter {
return &MultiWriter{writers: writers}
@@ -57,3 +59,9 @@ func (mw *MultiWriter) WriteStoreDebugData(host, templateID, eventType string, d
writer.WriteStoreDebugData(host, templateID, eventType, data)
}
}
+
+func (mw *MultiWriter) RequestStatsLog(statusCode, response string) {
+ for _, writer := range mw.writers {
+ writer.RequestStatsLog(statusCode, response)
+ }
+}
diff --git a/pkg/output/output.go b/pkg/output/output.go
index 84201c0d73..2ccbd2c1d9 100644
--- a/pkg/output/output.go
+++ b/pkg/output/output.go
@@ -50,6 +50,8 @@ type Writer interface {
WriteFailure(*InternalWrappedEvent) error
// Request logs a request in the trace log
Request(templateID, url, requestType string, err error)
+ // RequestStatsLog logs a request stats log
+ RequestStatsLog(statusCode, response string)
// WriteStoreDebugData writes the request/response debug data to file
WriteStoreDebugData(host, templateID, eventType string, data string)
}
@@ -73,8 +75,14 @@ type StandardWriter struct {
DisableStdout bool
AddNewLinesOutputFile bool // by default this is only done for stdout
KeysToRedact []string
+
+ // JSONLogRequestHook is a hook that can be used to log request/response
+ // when using custom server code with output
+ JSONLogRequestHook func(*JSONLogRequest)
}
+var _ Writer = &StandardWriter{}
+
var decolorizerRegex = regexp.MustCompile(`\x1B\[[0-9;]*[a-zA-Z]`)
// InternalEvent is an internal output generation structure for nuclei.
@@ -348,18 +356,40 @@ type JSONLogRequest struct {
// Request writes a log the requests trace log
func (w *StandardWriter) Request(templatePath, input, requestType string, requestErr error) {
- if w.traceFile == nil && w.errorFile == nil {
+ if w.traceFile == nil && w.errorFile == nil && w.JSONLogRequestHook == nil {
+ return
+ }
+
+ request := getJSONLogRequestFromError(templatePath, input, requestType, requestErr)
+ if w.timestamp {
+ ts := time.Now()
+ request.Timestamp = &ts
+ }
+ data, err := jsoniter.Marshal(request)
+ if err != nil {
return
}
+
+ if w.JSONLogRequestHook != nil {
+ w.JSONLogRequestHook(request)
+ }
+
+ if w.traceFile != nil {
+ _, _ = w.traceFile.Write(data)
+ }
+
+ if requestErr != nil && w.errorFile != nil {
+ _, _ = w.errorFile.Write(data)
+ }
+}
+
+func getJSONLogRequestFromError(templatePath, input, requestType string, requestErr error) *JSONLogRequest {
request := &JSONLogRequest{
Template: templatePath,
Input: input,
Type: requestType,
}
- if w.timestamp {
- ts := time.Now()
- request.Timestamp = &ts
- }
+
parsed, _ := urlutil.ParseAbsoluteURL(input, false)
if parsed != nil {
request.Address = parsed.Hostname()
@@ -397,18 +427,7 @@ func (w *StandardWriter) Request(templatePath, input, requestType string, reques
if val := errkit.GetAttrValue(requestErr, "address"); val.Any() != nil {
request.Address = val.String()
}
- data, err := jsoniter.Marshal(request)
- if err != nil {
- return
- }
-
- if w.traceFile != nil {
- _, _ = w.traceFile.Write(data)
- }
-
- if requestErr != nil && w.errorFile != nil {
- _, _ = w.errorFile.Write(data)
- }
+ return request
}
// Colorizer returns the colorizer instance for writer
@@ -512,6 +531,13 @@ func sanitizeFileName(fileName string) string {
}
func (w *StandardWriter) WriteStoreDebugData(host, templateID, eventType string, data string) {
if w.storeResponse {
+ if len(host) > 60 {
+ host = host[:57] + "..."
+ }
+ if len(templateID) > 100 {
+ templateID = templateID[:97] + "..."
+ }
+
filename := sanitizeFileName(fmt.Sprintf("%s_%s", host, templateID))
subFolder := filepath.Join(w.storeResponseDir, sanitizeFileName(eventType))
if !fileutil.FolderExists(subFolder) {
@@ -526,7 +552,6 @@ func (w *StandardWriter) WriteStoreDebugData(host, templateID, eventType string,
_, _ = f.WriteString(fmt.Sprintln(data))
f.Close()
}
-
}
// tryParseCause tries to parse the cause of given error
@@ -540,12 +565,14 @@ func tryParseCause(err error) error {
if strings.HasPrefix(msg, "ReadStatusLine:") {
// last index is actual error (from rawhttp)
parts := strings.Split(msg, ":")
- return errkit.New("%s", strings.TrimSpace(parts[len(parts)-1]))
+ return errkit.New(strings.TrimSpace(parts[len(parts)-1]))
}
if strings.Contains(msg, "read ") {
// same here
parts := strings.Split(msg, ":")
- return errkit.New("%s", strings.TrimSpace(parts[len(parts)-1]))
+ return errkit.New(strings.TrimSpace(parts[len(parts)-1]))
}
return err
}
+
+func (w *StandardWriter) RequestStatsLog(statusCode, response string) {}
diff --git a/pkg/output/output_stats.go b/pkg/output/output_stats.go
new file mode 100644
index 0000000000..7b0d509cdc
--- /dev/null
+++ b/pkg/output/output_stats.go
@@ -0,0 +1,51 @@
+package output
+
+import (
+ "github.com/logrusorgru/aurora"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output/stats"
+)
+
+// StatsOutputWriter implements writer interface for stats observation
+type StatsOutputWriter struct {
+ colorizer aurora.Aurora
+ Tracker *stats.Tracker
+}
+
+var _ Writer = &StatsOutputWriter{}
+
+// NewStatsOutputWriter returns a new StatsOutputWriter instance.
+func NewTrackerWriter(t *stats.Tracker) *StatsOutputWriter {
+ return &StatsOutputWriter{
+ colorizer: aurora.NewAurora(true),
+ Tracker: t,
+ }
+}
+
+func (tw *StatsOutputWriter) Close() {}
+
+func (tw *StatsOutputWriter) Colorizer() aurora.Aurora {
+ return tw.colorizer
+}
+
+func (tw *StatsOutputWriter) Write(event *ResultEvent) error {
+ return nil
+}
+
+func (tw *StatsOutputWriter) WriteFailure(event *InternalWrappedEvent) error {
+ return nil
+}
+
+func (tw *StatsOutputWriter) Request(templateID, url, requestType string, err error) {
+ if err == nil {
+ return
+ }
+ jsonReq := getJSONLogRequestFromError(templateID, url, requestType, err)
+ tw.Tracker.TrackErrorKind(jsonReq.Error)
+}
+
+func (tw *StatsOutputWriter) WriteStoreDebugData(host, templateID, eventType string, data string) {}
+
+func (tw *StatsOutputWriter) RequestStatsLog(statusCode, response string) {
+ tw.Tracker.TrackStatusCode(statusCode)
+ tw.Tracker.TrackWAFDetected(response)
+}
diff --git a/pkg/output/stats/stats.go b/pkg/output/stats/stats.go
new file mode 100644
index 0000000000..1e030a88da
--- /dev/null
+++ b/pkg/output/stats/stats.go
@@ -0,0 +1,181 @@
+// Package stats provides a stats tracker for tracking Status Codes,
+// Errors & WAF detection events.
+//
+// It is wrapped and called by output.Writer interface.
+package stats
+
+import (
+ _ "embed"
+ "fmt"
+ "sort"
+ "strconv"
+ "sync/atomic"
+
+ "github.com/logrusorgru/aurora"
+ "github.com/projectdiscovery/nuclei/v3/pkg/output/stats/waf"
+ mapsutil "github.com/projectdiscovery/utils/maps"
+)
+
+// Tracker is a stats tracker instance for nuclei scans
+type Tracker struct {
+ // counters for various stats
+ statusCodes *mapsutil.SyncLockMap[string, *atomic.Int32]
+ errorCodes *mapsutil.SyncLockMap[string, *atomic.Int32]
+ wafDetected *mapsutil.SyncLockMap[string, *atomic.Int32]
+
+ // internal stuff
+ wafDetector *waf.WafDetector
+}
+
+// NewTracker creates a new Tracker instance.
+func NewTracker() *Tracker {
+ return &Tracker{
+ statusCodes: mapsutil.NewSyncLockMap[string, *atomic.Int32](),
+ errorCodes: mapsutil.NewSyncLockMap[string, *atomic.Int32](),
+ wafDetected: mapsutil.NewSyncLockMap[string, *atomic.Int32](),
+ wafDetector: waf.NewWafDetector(),
+ }
+}
+
+// TrackStatusCode tracks the status code of a request
+func (t *Tracker) TrackStatusCode(statusCode string) {
+ t.incrementCounter(t.statusCodes, statusCode)
+}
+
+// TrackErrorKind tracks the error kind of a request
+func (t *Tracker) TrackErrorKind(errKind string) {
+ t.incrementCounter(t.errorCodes, errKind)
+}
+
+// TrackWAFDetected tracks the waf detected of a request
+//
+// First it detects if a waf is running and if so, it increments
+// the counter for the waf.
+func (t *Tracker) TrackWAFDetected(httpResponse string) {
+ waf, ok := t.wafDetector.DetectWAF(httpResponse)
+ if !ok {
+ return
+ }
+
+ t.incrementCounter(t.wafDetected, waf)
+}
+
+func (t *Tracker) incrementCounter(m *mapsutil.SyncLockMap[string, *atomic.Int32], key string) {
+ if counter, ok := m.Get(key); ok {
+ counter.Add(1)
+ } else {
+ newCounter := new(atomic.Int32)
+ newCounter.Store(1)
+ _ = m.Set(key, newCounter)
+ }
+}
+
+type StatsOutput struct {
+ StatusCodeStats map[string]int `json:"status_code_stats"`
+ ErrorStats map[string]int `json:"error_stats"`
+ WAFStats map[string]int `json:"waf_stats"`
+}
+
+func (t *Tracker) GetStats() *StatsOutput {
+ stats := &StatsOutput{
+ StatusCodeStats: make(map[string]int),
+ ErrorStats: make(map[string]int),
+ WAFStats: make(map[string]int),
+ }
+ _ = t.errorCodes.Iterate(func(k string, v *atomic.Int32) error {
+ stats.ErrorStats[k] = int(v.Load())
+ return nil
+ })
+ _ = t.statusCodes.Iterate(func(k string, v *atomic.Int32) error {
+ stats.StatusCodeStats[k] = int(v.Load())
+ return nil
+ })
+ _ = t.wafDetected.Iterate(func(k string, v *atomic.Int32) error {
+ waf, ok := t.wafDetector.GetWAF(k)
+ if !ok {
+ return nil
+ }
+ stats.WAFStats[waf.Name] = int(v.Load())
+ return nil
+ })
+ return stats
+}
+
+// DisplayTopStats prints the most relevant statistics for CLI
+func (t *Tracker) DisplayTopStats(noColor bool) {
+ stats := t.GetStats()
+
+ fmt.Printf("\n%s\n", aurora.Bold(aurora.Blue("Top Status Codes:")))
+ topStatusCodes := getTopN(stats.StatusCodeStats, 6)
+ for _, item := range topStatusCodes {
+ if noColor {
+ fmt.Printf(" %s: %d\n", item.Key, item.Value)
+ } else {
+ color := getStatusCodeColor(item.Key)
+ fmt.Printf(" %s: %d\n", aurora.Colorize(item.Key, color), item.Value)
+ }
+ }
+
+ if len(stats.ErrorStats) > 0 {
+ fmt.Printf("\n%s\n", aurora.Bold(aurora.Red("Top Errors:")))
+ topErrors := getTopN(stats.ErrorStats, 5)
+ for _, item := range topErrors {
+ if noColor {
+ fmt.Printf(" %s: %d\n", item.Key, item.Value)
+ } else {
+ fmt.Printf(" %s: %d\n", aurora.Red(item.Key), item.Value)
+ }
+ }
+ }
+
+ if len(stats.WAFStats) > 0 {
+ fmt.Printf("\n%s\n", aurora.Bold(aurora.Yellow("WAF Detections:")))
+ for name, count := range stats.WAFStats {
+ if noColor {
+ fmt.Printf(" %s: %d\n", name, count)
+ } else {
+ fmt.Printf(" %s: %d\n", aurora.Yellow(name), count)
+ }
+ }
+ }
+}
+
+// Helper struct for sorting
+type kv struct {
+ Key string
+ Value int
+}
+
+// getTopN returns top N items from a map, sorted by value
+func getTopN(m map[string]int, n int) []kv {
+ var items []kv
+ for k, v := range m {
+ items = append(items, kv{k, v})
+ }
+
+ sort.Slice(items, func(i, j int) bool {
+ return items[i].Value > items[j].Value
+ })
+
+ if len(items) > n {
+ items = items[:n]
+ }
+ return items
+}
+
+// getStatusCodeColor returns appropriate color for status code
+func getStatusCodeColor(statusCode string) aurora.Color {
+ code, _ := strconv.Atoi(statusCode)
+ switch {
+ case code >= 200 && code < 300:
+ return aurora.GreenFg
+ case code >= 300 && code < 400:
+ return aurora.BlueFg
+ case code >= 400 && code < 500:
+ return aurora.YellowFg
+ case code >= 500:
+ return aurora.RedFg
+ default:
+ return aurora.WhiteFg
+ }
+}
diff --git a/pkg/output/stats/stats_test.go b/pkg/output/stats/stats_test.go
new file mode 100644
index 0000000000..2eec59f300
--- /dev/null
+++ b/pkg/output/stats/stats_test.go
@@ -0,0 +1,36 @@
+package stats
+
+import (
+ "testing"
+)
+
+func TestTrackErrorKind(t *testing.T) {
+ tracker := NewTracker()
+
+ // Test single increment
+ tracker.TrackErrorKind("timeout")
+ if count, _ := tracker.errorCodes.Get("timeout"); count == nil || count.Load() != 1 {
+ t.Errorf("expected error kind timeout count to be 1, got %v", count)
+ }
+
+ // Test multiple increments
+ tracker.TrackErrorKind("timeout")
+ if count, _ := tracker.errorCodes.Get("timeout"); count == nil || count.Load() != 2 {
+ t.Errorf("expected error kind timeout count to be 2, got %v", count)
+ }
+
+ // Test different error kind
+ tracker.TrackErrorKind("connection-refused")
+ if count, _ := tracker.errorCodes.Get("connection-refused"); count == nil || count.Load() != 1 {
+ t.Errorf("expected error kind connection-refused count to be 1, got %v", count)
+ }
+}
+
+func TestTrackWaf_Detect(t *testing.T) {
+ tracker := NewTracker()
+
+ tracker.TrackWAFDetected("Attention Required! | Cloudflare")
+ if count, _ := tracker.wafDetected.Get("cloudflare"); count == nil || count.Load() != 1 {
+ t.Errorf("expected waf detected count to be 1, got %v", count)
+ }
+}
diff --git a/pkg/output/stats/waf/regexes.json b/pkg/output/stats/waf/regexes.json
new file mode 100644
index 0000000000..989334c0f6
--- /dev/null
+++ b/pkg/output/stats/waf/regexes.json
@@ -0,0 +1,903 @@
+{
+ "__copyright__": "Copyright (c) 2019-2021 Miroslav Stampar (@stamparm), MIT. See the file 'LICENSE' for copying permission",
+ "__notice__": "The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software",
+ "__url__": "Taken from: https://raw.githubusercontent.com/stamparm/identYwaf/refs/heads/master/data.json",
+
+ "wafs": {
+ "360": {
+ "company": "360",
+ "name": "360",
+ "regex": "493|/wzws-waf-cgi/",
+ "signatures": [
+ "9778:RVZXum61OEhCWapBYKcPk4JzWOpohM4JiUcMr2RXg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tKaK99n+i7c4VmkwI3FZjxtDtAeq+c36A5chW1XaTC",
+ "9ccc:RVZXum61OEhCWapBYKcPk4JzWOpohM4JiUcMr2RXg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tKaK99n+i7c4VmkwI3FZjxtDtAeq+c36A4chW1XaTC"
+ ]
+ },
+ "aesecure": {
+ "company": "aeSecure",
+ "name": "aeSecure",
+ "regex": "aesecure_denied\\.png|aesecure-code: \\d+",
+ "signatures": [
+ "8a4b:RVdXu260OEhCWapBYKcPk4JzWOtohM4JiUcMrmRXg1uQJbX3uhdOn9htOj+hX7AB16FcPxJOdLsXo2tKaK99n+i7c4RmkgI2FZnxtDtBeq+c36A4chW1XaTD"
+ ]
+ },
+ "airlock": {
+ "company": "Phion/Ergon",
+ "name": "Airlock",
+ "regex": "The server detected a syntax error in your request",
+ "signatures": [
+ "3e2c:RVZXu261OEhCWapBYKcPk4JzWOtohM4IiUcMr2RXg1uQJbX3uhdOn9htOj+hX7AB16FcPxJPdLsXomtKaK59n+i6c4RmkwI2FZjxtDtAeq6c36A5chW1XaTD"
+ ]
+ },
+ "alertlogic": {
+ "company": "Alert Logic",
+ "name": "Alert Logic",
+ "regex": "(?s)timed_redirect\\(seconds, url\\).+?Reference ID:",
+ "signatures": []
+ },
+ "aliyundun": {
+ "company": "Alibaba Cloud Computing",
+ "name": "AliYunDun",
+ "regex": "Sorry, your request has been blocked as it may cause potential threats to the server's security|//errors\\.aliyun\\.com/",
+ "signatures": [
+ "e082:RVZXum61OElCWapAYKYPkoJzWOpohM4JiUYMr2RXg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI2FZjxtDtAeq+c3qA4chW1XaTC"
+ ]
+ },
+ "anquanbao": {
+ "company": "Anquanbao",
+ "name": "Anquanbao",
+ "regex": "/aqb_cc/error/",
+ "signatures": [
+ "c790:RVZXum61OElCWapAYKYPk4JzWOpohM4JiUYMr2RXg1uQJbX3uhdOn9hsOj+hXrAB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI2FZjxtDtAeq+c36A4chW1XaTC",
+ "d3d3:RVZXum61OElCWapAYKYPk4JzWOpohM4JiUYMr2RXg1uQJbX3uhdOn9hsOj+hXrAB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI2FZjxtDtAeq+c3qA4chW1XaTC"
+ ]
+ },
+ "approach": {
+ "company": "Approach",
+ "name": "Approach",
+ "regex": "Approach.+?Web Application (Firewall|Filtering)",
+ "signatures": [
+ "fef0:RVZXum60OEhCWKpAYKYPkoJyWOpohM4IiUYMrmRWg1qQJLX2uhZOnthsOj6hXrAA16BcPhJOdLoXomtKaK59nui7c4RmkgI2FZjxtDtAeq+c36A5chW1XKTD"
+ ]
+ },
+ "armor": {
+ "company": "Armor Defense",
+ "name": "Armor Protection",
+ "regex": "This request has been blocked by website protection from Armor",
+ "signatures": [
+ "03ec:RVZXum60OEhCWapBYKYPk4JzWOtohM4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tKaK99n+i6c4RmkgI2FZjxtDtAeq6c36A4chS1XaTC",
+ "1160:RVZXum60OEhCWapBYKYPk4JyWOtohM4IiUcMr2RWg1qQJbX3uhZOnthsOj6hXrAA16BcPhJOdLoXo2tKaK99n+i6c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC"
+ ],
+ "note": "Uses SecureSphere (Imperva) (Reference: https://www.imperva.com/resources/case_studies/CS_Armor.pdf)"
+ },
+ "asm": {
+ "company": "F5 Networks",
+ "name": "Application Security Manager",
+ "regex": "The requested URL was rejected\\. Please consult with your administrator|security\\.f5aas\\.com",
+ "signatures": [
+ "2f81:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hXrAB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI3FZjxtDtAeq+c36A4chS1XaTC",
+ "4fd0:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq6c3qA4chS1XaTC",
+ "5904:RVZXum60OEhCWapBYKcPk4JzWOpohc4IiUcMr2RWg1uQJbX3uhdOnthtOj+hXrAB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtTtAeq+c3qA4chS1XaTC",
+ "8bcf:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtTtAeq6c36A5chS1XaTC",
+ "540f:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtTtAeq+c36A5chS1XaTC",
+ "c7ba:RVZXum60OEhCWKpAYKYPkoJzWOpohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXomtLaK99n+i7c4VmkwI3FZjxtDtAeq6c3qA4chS1XaTC",
+ "fb21:RVZXum60OEhCWapBYKcPk4JzWOpohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI3FZjxtDtAeq+c36A5chW1XaTC",
+ "b6ff:RVZXum61OEhCWapBYKcPkoJzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq+c36A4chW1XaTC",
+ "3b1e:RVZXum60OEhCWapBYKcPk4JyWOpohM4IiUcMr2RWg1qQJLX3uhdOnthtOj+hXrAB16FcPxJPdLsXo2tKaK99nui7c4RmkgI2FZjxtDtAeq6c3qA5chS1XKTC",
+ "620c:RVZXum60OEhCWapBYKcPkoJzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkgI2FZjxtDtAeq+c36A5chW1XaTC",
+ "b9a0:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq+c3qA4chW1XaTC",
+ "ccb6:RVdXum61OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtTtAeq+c36A5chW1XaTC",
+ "9138:RVZXum60OEhCWapBYKcPk4JzWOpohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq6c3qA4chS1XaTC",
+ "54cc:RVZXum61OEhCWapBYKcPkoJzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq6c3qA4chS1XaTC",
+ "4c83:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4VmkwI3FZjxtDtAeq+c36A5chW1XaTC",
+ "8453:RVZXum60OEhCWapBYKcPk4JzWOtohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPxJPdLsXo2tLaK99n+i7c4RmkwI3FZjxtDtAeq+c36A4chS1XaTC"
+ ]
+ },
+ "astra": {
+ "company": "Czar Securities",
+ "name": "Astra",
+ "regex": "(?s)unfortunately our website protection system.+?//www\\.getastra\\.com",
+ "signatures": []
+ },
+ "aws": {
+ "company": "Amazon",
+ "name": "AWS WAF",
+ "regex": "(?i)HTTP/1.+\\b403\\b.+\\s+Server: aws|(?s)Request blocked.+?Generated by cloudfront",
+ "signatures": [
+ "2998:RVZXu261OEhCWapBYKcPk4JzWOpohM4IiUcMr2RWg1uQJbX3uhZOnthsOj6hXrAA16BcPhJOdLoXo2tKaK99n+i6c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC",
+ "fffa:RVZXum60OEhCWapAYKYPk4JyWOpohc4JiUcMr2RWg1uQJbX3uhdOnthtOj+hX7AB16FcPhJPdLsXo2tKaK99n+i6c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC",
+ "9de0:RVZXu261OEhCWapBYKcPk4JzWOpohM4IiUcMr2RWg1uQJbX3uhZOnthtOj+hXrAA16BcPhJOdLoXo2tKaK99n+i7c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC",
+ "34a8:RVZXu261OEhCWapBYKcPk4JzWOpohM4IiUcMr2RWg1uQJbX3uhdOn9htOj+hXrAB16BcPxJOdLsXo2tKaK99n+i7c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC",
+ "1104:RVZXum61OEhCWapBYKcPk4JzWOpohM4IiUcMr2RXg1uQJbX3uhZOnthsOj6hXrAA16BcPhJOdLoXomtKaK59n+i6c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC",
+ "ea40:RVZXu261OEhCWapBYKcPk4JzWOtohM4IiUcMr2RWg1uQJbX3uhdOn9htOj+hXrAB16BcPxJOdLsXo2tKaK99n+i7c4RmkgI2FZjxtDtAeq6c3qA4chS1XKTC"
+ ]
+ },
+ "barracuda": {
+ "company": "Barracuda Networks",
+ "name": "Barracuda",
+ "regex": "\\bbarracuda_|barra_counter_session=|when this page occurred and the event ID found at the bottom of the page|