diff --git a/.github/workflows/auto-merge.yaml b/.github/workflows/auto-merge.yaml index ad2890ddaf..f6bb2c5c25 100644 --- a/.github/workflows/auto-merge.yaml +++ b/.github/workflows/auto-merge.yaml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest if: github.actor == 'dependabot[bot]' steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: token: ${{ secrets.DEPENDABOT_PAT }} diff --git a/.github/workflows/compat-checks.yaml b/.github/workflows/compat-checks.yaml index 8a9080b904..093bd6ba01 100644 --- a/.github/workflows/compat-checks.yaml +++ b/.github/workflows/compat-checks.yaml @@ -13,7 +13,7 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go/compat-checks@v1 with: release-test: true diff --git a/.github/workflows/generate-docs.yaml b/.github/workflows/generate-docs.yaml index a68ff7d972..365ad32f93 100644 --- a/.github/workflows/generate-docs.yaml +++ b/.github/workflows/generate-docs.yaml @@ -11,7 +11,7 @@ jobs: if: "${{ !endsWith(github.actor, '[bot]') }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/git@v1 - run: make syntax-docs diff --git a/.github/workflows/generate-pgo.yaml b/.github/workflows/generate-pgo.yaml index 322467e704..39fc7e6a10 100644 --- a/.github/workflows/generate-pgo.yaml +++ b/.github/workflows/generate-pgo.yaml @@ -28,9 +28,10 @@ jobs: LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt" PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/git@v1 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - name: Generate list run: for i in {1..${{ matrix.targets }}}; do echo "https://honey.scanme.sh/?_=${i}" >> "${LIST_FILE}"; done # NOTE(dwisiswant0): use `-no-mhe` flag to get better samples. diff --git a/.github/workflows/govulncheck.yaml b/.github/workflows/govulncheck.yaml index 11898380ec..38edae2487 100644 --- a/.github/workflows/govulncheck.yaml +++ b/.github/workflows/govulncheck.yaml @@ -16,7 +16,7 @@ jobs: env: OUTPUT: "/tmp/results.sarif" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - run: go install golang.org/x/vuln/cmd/govulncheck@latest - run: govulncheck -scan package -format sarif ./... > $OUTPUT diff --git a/.github/workflows/perf-regression.yaml b/.github/workflows/perf-regression.yaml index 8e7e7eed5f..7856067243 100644 --- a/.github/workflows/perf-regression.yaml +++ b/.github/workflows/perf-regression.yaml @@ -11,8 +11,10 @@ jobs: env: BENCH_OUT: "/tmp/bench.out" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - run: make build-test - run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT env: diff --git a/.github/workflows/perf-test.yaml b/.github/workflows/perf-test.yaml index 4ee8408c9d..ff40b824b9 100644 --- a/.github/workflows/perf-test.yaml +++ b/.github/workflows/perf-test.yaml @@ -16,7 +16,7 @@ jobs: LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt" PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - run: make verify - name: Generate list diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4d9d412dda..acaecb5969 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -10,7 +10,7 @@ jobs: release: runs-on: ubuntu-latest-16-cores steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: fetch-depth: 0 - uses: projectdiscovery/actions/setup/go@v1 diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index efa88506da..43e4f0fb45 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -37,5 +37,4 @@ jobs: it, please comment or feel free to reopen it. close-issue-label: "Status: Abandoned" close-pr-label: "Status: Abandoned" - exempt-issue-labels: "Status: Abandoned" - exempt-pr-labels: "Status: Abandoned" + exempt-issue-labels: "Type: Enhancement" diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index d616e8b71c..42a46a67db 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -22,8 +22,9 @@ jobs: if: "${{ !endsWith(github.actor, '[bot]') }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - uses: projectdiscovery/actions/golangci-lint/v2@v1 tests: @@ -35,8 +36,19 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: "${{ matrix.os }}" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 + - uses: projectdiscovery/actions/free-disk-space@v1 + with: + llvm: 'false' + php: 'false' + mongodb: 'false' + mysql: 'false' + misc-packages: 'false' + docker-images: 'false' + tools-cache: 'false' - run: make vet - run: make build - run: make test @@ -52,8 +64,10 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - name: "Simple" run: go run . working-directory: examples/simple/ @@ -74,9 +88,11 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - uses: projectdiscovery/actions/setup/python@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh "${{ matrix.os }}" env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" @@ -93,9 +109,11 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - uses: projectdiscovery/actions/setup/python@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" @@ -106,8 +124,9 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make template-validate codeql: @@ -119,7 +138,7 @@ jobs: contents: read security-events: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: github/codeql-action/init@v4 with: languages: 'go' @@ -131,7 +150,7 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/goreleaser@v1 @@ -143,7 +162,7 @@ jobs: TARGET_URL: "http://scanme.sh/a/?b=c" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - run: make build - name: "Setup environment (push)" if: ${{ github.event_name == 'push' }} diff --git a/README.md b/README.md index a422e5dad9..ae4f3eab32 100644 --- a/README.md +++ b/README.md @@ -140,7 +140,7 @@ TARGET: -u, -target string[] target URLs/hosts to scan -l, -list string path to file containing a list of target URLs/hosts to scan (one per line) -eh, -exclude-hosts string[] hosts to exclude to scan from the input list (ip, cidr, hostname) - -resume string resume scan using resume.cfg (clustering will be disabled) + -resume string resume scan from and save to specified file (clustering will be disabled) -sa, -scan-all-ips scan all the IP's associated with dns record -iv, -ip-version string[] IP version to scan of hostname (4,6) - (default 4) diff --git a/README_CN.md b/README_CN.md index 5fb4dd14e9..0396b0ba6e 100644 --- a/README_CN.md +++ b/README_CN.md @@ -119,7 +119,7 @@ Nuclei是一款注重于可配置性、可扩展性和易用性的基于模板 目标: -u, -target string[] 指定扫描的目标URL/主机(多个目标则指定多个-u参数) -l, -list string 指定包含要扫描的目标URL/主机列表的文件路径(一行一个) - -resume string 使用指定的resume.cfg文件恢复扫描(将禁用请求聚类) + -resume string 从指定文件恢复扫描并保存到指定文件(将禁用请求聚类) -sa, -scan-all-ips 扫描由目标解析出来的所有IP(针对域名对应多个IP的情况) -iv, -ip-version string[] 要扫描的主机名的IP版本(4,6)-(默认为4) diff --git a/README_ES.md b/README_ES.md index ec7949efef..4432699dc0 100644 --- a/README_ES.md +++ b/README_ES.md @@ -118,7 +118,7 @@ TARGET: -u, -target string[] URLs/hosts a escanear -l, -list string ruta al archivo que contiene la lista de URLs/hosts a escanear (uno por línea) -eh, -exclude-hosts string[] hosts a excluir para escanear de la lista de entrada (ip, cidr, hostname) - -resume string reanudar el escaneo usando resume.cfg (la clusterización quedará inhabilitada) + -resume string reanudar el escaneo desde y guardar en el archivo especificado (la clusterización quedará inhabilitada) -sa, -scan-all-ips escanear todas las IP asociadas al registro dns -iv, -ip-version string[] versión IP a escanear del nombre de host (4,6) - (por defecto 4) diff --git a/README_ID.md b/README_ID.md index 459352b4d5..db18e1db62 100644 --- a/README_ID.md +++ b/README_ID.md @@ -98,7 +98,7 @@ Flags: TARGET: -u, -target string[] target URLs/hosts to scan -l, -list string path to file containing a list of target URLs/hosts to scan (one per line) - -resume string resume scan using resume.cfg (clustering will be disabled) + -resume string resume scan from and save to specified file (clustering will be disabled) -sa, -scan-all-ips scan all the IP's associated with dns record -iv, -ip-version string[] IP version to scan of hostname (4,6) - (default 4) diff --git a/README_JP.md b/README_JP.md index d80fb4dfcb..33e5c282e7 100644 --- a/README_JP.md +++ b/README_JP.md @@ -113,7 +113,7 @@ Nucleiは、広範な設定可能性、大規模な拡張性、および使い ターゲット: -u, -target string[] スキャンする対象のURL/ホスト -l, -list string スキャンする対象のURL/ホストのリストが含まれているファイルへのパス(1行に1つ) - -resume string resume.cfgを使用してスキャンを再開(クラスタリングは無効になります) + -resume string 指定されたファイルからスキャンを再開し、指定されたファイルに保存(クラスタリングは無効になります) -sa, -scan-all-ips DNSレコードに関連付けられているすべてのIPをスキャン -iv, -ip-version string[] ホスト名のスキャンするIPバージョン(4,6)-(デフォルトは4) diff --git a/README_KR.md b/README_KR.md index d0828564ab..2b137443c7 100644 --- a/README_KR.md +++ b/README_KR.md @@ -96,7 +96,7 @@ Nuclei는 빠르고, 템플릿 기반의 취약점 스캐너로 TARGET: -u, -target string[] 스캔할 대상 URL/호스트 -l, -list string 스캔할 대상 URL/호스트 목록이 있는 파일 경로 (한 줄에 하나씩) - -resume string resume.cfg를 사용하여 스캔 재개 (클러스터링은 비활성화됨) + -resume string 지정된 파일에서 스캔을 재개하고 지정된 파일에 저장 (클러스터링은 비활성화됨) -sa, -scan-all-ips dns 레코드와 관련된 모든 IP 스캔 -iv, -ip-version string[] 스캔할 호스트의 IP 버전 (4,6) - (기본값 4) diff --git a/README_PT-BR.md b/README_PT-BR.md index e63a3d8a1e..64e4f4e307 100644 --- a/README_PT-BR.md +++ b/README_PT-BR.md @@ -118,7 +118,7 @@ TARGET: -u, -target string[] URLs/hosts a serem escaneados -l, -list string caminho do arquivo contendo a lista de URLs/hosts a serem escaneados (um por linha) -eh, -exclude-hosts string[] hosts a serem excluídos do escaneamento na lista de entrada (ip, cidr, hostname) - -resume string retomar o escaneamento usando resume.cfg (a clusterização será desabilitada) + -resume string retomar o escaneamento a partir de e salvar no arquivo especificado (a clusterização será desabilitada) -sa, -scan-all-ips escanear todos os IPs associados ao registro DNS -iv, -ip-version string[] versão de IP a escanear do nome do host (4,6) - (padrão 4) diff --git a/cmd/integration-test/integration-test.go b/cmd/integration-test/integration-test.go index 82ca1bc8ae..d64d242f5b 100644 --- a/cmd/integration-test/integration-test.go +++ b/cmd/integration-test/integration-test.go @@ -6,6 +6,7 @@ import ( "os" "regexp" "runtime" + "slices" "strings" "github.com/kitabisa/go-ci" @@ -24,7 +25,7 @@ type TestCaseInfo struct { } var ( - debug = os.Getenv("DEBUG") == "true" + debug = isDebugMode() customTests = os.Getenv("TESTS") protocol = os.Getenv("PROTO") @@ -60,6 +61,7 @@ var ( "matcher-status": matcherStatusTestcases, "exporters": exportersTestCases, } + // flakyTests are run with a retry count of 3 flakyTests = map[string]bool{ "protocols/http/self-contained-file-input.yaml": true, @@ -90,11 +92,12 @@ func main() { } // start fuzz playground server - defer fuzzplayground.Cleanup() server := fuzzplayground.GetPlaygroundServer() defer func() { + fuzzplayground.Cleanup() _ = server.Close() }() + go func() { if err := server.Start("localhost:8082"); err != nil { if !strings.Contains(err.Error(), "Server closed") { @@ -104,7 +107,6 @@ func main() { }() customTestsList := normalizeSplit(customTests) - failedTestTemplatePaths := runTests(customTestsList) if len(failedTestTemplatePaths) > 0 { @@ -131,6 +133,27 @@ func main() { } } +// isDebugMode checks if debug mode is enabled via any of the supported debug +// environment variables. +func isDebugMode() bool { + debugEnvVars := []string{ + "DEBUG", + "ACTIONS_RUNNER_DEBUG", // GitHub Actions runner debug + // Add more debug environment variables here as needed + } + + truthyValues := []string{"true", "1", "yes", "on", "enabled"} + + for _, envVar := range debugEnvVars { + envValue := strings.ToLower(strings.TrimSpace(os.Getenv(envVar))) + if slices.Contains(truthyValues, envValue) { + return true + } + } + + return false +} + // execute a testcase with retry and consider best of N // intended for flaky tests like interactsh func executeWithRetry(testCase testutils.TestCase, templatePath string, retryCount int) (string, error) { diff --git a/cmd/integration-test/javascript.go b/cmd/integration-test/javascript.go index 6e99b7f844..c0b8c19900 100644 --- a/cmd/integration-test/javascript.go +++ b/cmd/integration-test/javascript.go @@ -17,6 +17,7 @@ var jsTestcases = []TestCaseInfo{ {Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}}, {Path: "protocols/javascript/oracle-auth-test.yaml", TestCase: &javascriptOracleAuthTest{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }}, {Path: "protocols/javascript/vnc-pass-brute.yaml", TestCase: &javascriptVncPassBrute{}}, + {Path: "protocols/javascript/multi-ports.yaml", TestCase: &javascriptMultiPortsSSH{}}, } var ( @@ -167,6 +168,17 @@ func (j *javascriptVncPassBrute) Execute(filePath string) error { return multierr.Combine(errs...) } +type javascriptMultiPortsSSH struct{} + +func (j *javascriptMultiPortsSSH) Execute(filePath string) error { + // use scanme.sh as target to ensure we match on the 2nd default port 22 + results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "scanme.sh", debug) + if err != nil { + return err + } + return expectResultsCount(results, 1) +} + // purge any given resource if it is not nil func purge(resource *dockertest.Resource) { if resource != nil && pool != nil { diff --git a/cmd/integration-test/library.go b/cmd/integration-test/library.go index 3513b1d043..2c4cda5764 100644 --- a/cmd/integration-test/library.go +++ b/cmd/integration-test/library.go @@ -15,6 +15,7 @@ import ( "github.com/logrusorgru/aurora" "github.com/pkg/errors" "github.com/projectdiscovery/goflags" + "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" @@ -70,6 +71,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error) defaultOpts := types.DefaultOptions() defaultOpts.ExecutionId = "test" + defaultOpts.Logger = gologger.DefaultLogger mockProgress := &testutils.MockProgressClient{} reportingClient, err := reporting.New(&reporting.Options{ExecutionId: defaultOpts.ExecutionId}, "", false) diff --git a/cmd/nuclei/main.go b/cmd/nuclei/main.go index e2de5ff03b..1a51c9f010 100644 --- a/cmd/nuclei/main.go +++ b/cmd/nuclei/main.go @@ -194,8 +194,11 @@ func main() { }) } - // Setup graceful exits + // Setup filename for graceful exits resumeFileName := types.DefaultResumeFilePath() + if options.Resume == "" { + resumeFileName = options.Resume + } c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) go func() { @@ -255,7 +258,7 @@ on extensive configurability, massive extensibility and ease of use.`) flagSet.StringSliceVarP(&options.Targets, "target", "u", nil, "target URLs/hosts to scan", goflags.CommaSeparatedStringSliceOptions), flagSet.StringVarP(&options.TargetsFilePath, "list", "l", "", "path to file containing a list of target URLs/hosts to scan (one per line)"), flagSet.StringSliceVarP(&options.ExcludeTargets, "exclude-hosts", "eh", nil, "hosts to exclude to scan from the input list (ip, cidr, hostname)", goflags.FileCommaSeparatedStringSliceOptions), - flagSet.StringVar(&options.Resume, "resume", "", "resume scan using resume.cfg (clustering will be disabled)"), + flagSet.StringVar(&options.Resume, "resume", "", "resume scan from and save to specified file (clustering will be disabled)"), flagSet.BoolVarP(&options.ScanAllIPs, "scan-all-ips", "sa", false, "scan all the IP's associated with dns record"), flagSet.StringSliceVarP(&options.IPVersion, "ip-version", "iv", nil, "IP version to scan of hostname (4,6) - (default 4)", goflags.CommaSeparatedStringSliceOptions), ) diff --git a/cmd/nuclei/main_benchmark_test.go b/cmd/nuclei/main_benchmark_test.go index 04e17bf904..26d7a1965b 100644 --- a/cmd/nuclei/main_benchmark_test.go +++ b/cmd/nuclei/main_benchmark_test.go @@ -1,9 +1,13 @@ package main_test import ( + "fmt" "net/http" "net/http/httptest" "os" + "runtime" + "runtime/pprof" + "strings" "testing" "time" @@ -48,6 +52,31 @@ func TestMain(m *testing.M) { os.Exit(exitCode) } +// getUniqFilename generates a unique filename by appending .N if file exists +// Similar to wget's behavior: file.cpu.prof, file.cpu.1.prof, file.cpu.2.prof, etc. +func getUniqFilename(basePath string) string { + if _, err := os.Stat(basePath); os.IsNotExist(err) { + return basePath + } + + lastDot := strings.LastIndex(basePath, ".") + var name, ext string + if lastDot != -1 { + name = basePath[:lastDot] + ext = basePath[lastDot:] + } else { + name = basePath + ext = "" + } + + for i := 1; ; i++ { + newPath := fmt.Sprintf("%s.%d%s", name, i, ext) + if _, err := os.Stat(newPath); os.IsNotExist(err) { + return newPath + } + } +} + func getDefaultOptions() *types.Options { return &types.Options{ RemoteTemplateDomainList: []string{"cloud.projectdiscovery.io"}, @@ -106,24 +135,55 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { } defer nucleiRunner.Close() - b.ResetTimer() + benchNameSlug := strings.ReplaceAll(b.Name(), "/", "-") + + // Start CPU profiling + cpuProfileBase := fmt.Sprintf("%s.cpu.prof", benchNameSlug) + cpuProfilePath := getUniqFilename(cpuProfileBase) + cpuProfile, err := os.Create(cpuProfilePath) + if err != nil { + b.Fatalf("failed to create CPU profile: %s", err) + } + defer func() { _ = cpuProfile.Close() }() + + if err := pprof.StartCPUProfile(cpuProfile); err != nil { + b.Fatalf("failed to start CPU profile: %s", err) + } + defer pprof.StopCPUProfile() + b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { if err := nucleiRunner.RunEnumeration(); err != nil { b.Fatalf("%s failed: %s", b.Name(), err) } } + + b.StopTimer() + + // Write heap profile + heapProfileBase := fmt.Sprintf("%s.heap.prof", benchNameSlug) + heapProfilePath := getUniqFilename(heapProfileBase) + heapProfile, err := os.Create(heapProfilePath) + if err != nil { + b.Fatalf("failed to create heap profile: %s", err) + } + defer func() { _ = heapProfile.Close() }() + + runtime.GC() // Force GC before heap profile + if err := pprof.WriteHeapProfile(heapProfile); err != nil { + b.Fatalf("failed to write heap profile: %s", err) + } } func BenchmarkRunEnumeration(b *testing.B) { // Default case: run enumeration with default options == all nuclei-templates - // b.Run("Default", func(b *testing.B) { - // options := getDefaultOptions() - // options.Targets = []string{targetURL} + b.Run("Default", func(b *testing.B) { + options := getDefaultOptions() + options.Targets = []string{targetURL} - // runEnumBenchmark(b, options) - // }) + runEnumBenchmark(b, options) + }) // Case: https://github.com/projectdiscovery/nuclei/pull/6258 b.Run("Multiproto", func(b *testing.B) { diff --git a/go.mod b/go.mod index c3a527351b..8bd2a75809 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.24.4 require ( github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible github.com/andygrunwald/go-jira v1.16.1 - github.com/antchfx/htmlquery v1.3.4 + github.com/antchfx/htmlquery v1.3.5 github.com/bluele/gcache v0.0.2 github.com/go-playground/validator/v10 v10.26.0 github.com/go-rod/rod v0.116.2 @@ -22,12 +22,12 @@ require ( github.com/olekukonko/tablewriter v1.0.8 github.com/pkg/errors v0.9.1 github.com/projectdiscovery/clistats v0.1.1 - github.com/projectdiscovery/fastdialer v0.4.15 - github.com/projectdiscovery/hmap v0.0.95 + github.com/projectdiscovery/fastdialer v0.4.18 + github.com/projectdiscovery/hmap v0.0.97 github.com/projectdiscovery/interactsh v1.2.4 github.com/projectdiscovery/rawhttp v0.1.90 - github.com/projectdiscovery/retryabledns v1.0.108 - github.com/projectdiscovery/retryablehttp-go v1.0.131 + github.com/projectdiscovery/retryabledns v1.0.110 + github.com/projectdiscovery/retryablehttp-go v1.0.133 github.com/projectdiscovery/yamldoc-go v1.0.6 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rs/xid v1.6.0 @@ -39,9 +39,9 @@ require ( github.com/valyala/fasttemplate v1.2.2 github.com/weppos/publicsuffix-go v0.50.0 go.uber.org/multierr v1.11.0 - golang.org/x/net v0.46.0 + golang.org/x/net v0.47.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/text v0.30.0 + golang.org/x/text v0.31.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -58,7 +58,7 @@ require ( github.com/alexsnet/go-vnc v0.1.0 github.com/alitto/pond v1.9.2 github.com/antchfx/xmlquery v1.4.4 - github.com/antchfx/xpath v1.3.3 + github.com/antchfx/xpath v1.3.5 github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 github.com/aws/aws-sdk-go-v2 v1.36.5 github.com/aws/aws-sdk-go-v2/config v1.29.17 @@ -87,30 +87,31 @@ require ( github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa github.com/lib/pq v1.10.9 github.com/mattn/go-sqlite3 v1.14.28 + github.com/maypok86/otter/v2 v2.2.1 github.com/mholt/archives v0.1.5 github.com/microsoft/go-mssqldb v1.9.2 github.com/ory/dockertest/v3 v3.12.0 github.com/praetorian-inc/fingerprintx v1.1.15 - github.com/projectdiscovery/dsl v0.8.4 + github.com/projectdiscovery/dsl v0.8.6 github.com/projectdiscovery/fasttemplate v0.0.2 github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/goflags v0.1.74 - github.com/projectdiscovery/gologger v1.1.59 + github.com/projectdiscovery/gologger v1.1.62 github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 - github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a + github.com/projectdiscovery/httpx v1.7.2 github.com/projectdiscovery/mapcidr v1.1.97 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 - github.com/projectdiscovery/networkpolicy v0.1.27 + github.com/projectdiscovery/networkpolicy v0.1.30 github.com/projectdiscovery/ratelimit v0.0.82 github.com/projectdiscovery/rdap v0.9.0 github.com/projectdiscovery/sarif v0.0.1 - github.com/projectdiscovery/tlsx v1.2.1 - github.com/projectdiscovery/uncover v1.1.0 - github.com/projectdiscovery/useragent v0.0.102 - github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6 - github.com/projectdiscovery/wappalyzergo v0.2.54 + github.com/projectdiscovery/tlsx v1.2.2 + github.com/projectdiscovery/uncover v1.2.0 + github.com/projectdiscovery/useragent v0.0.104 + github.com/projectdiscovery/utils v0.7.3 + github.com/projectdiscovery/wappalyzergo v0.2.57 github.com/redis/go-redis/v9 v9.11.0 github.com/seh-msft/burpxml v1.0.1 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 @@ -123,7 +124,7 @@ require ( github.com/zmap/zgrab2 v0.1.8 gitlab.com/gitlab-org/api/client-go v0.130.1 go.mongodb.org/mongo-driver v1.17.4 - golang.org/x/term v0.36.0 + golang.org/x/term v0.37.0 gopkg.in/yaml.v3 v3.0.1 moul.io/http2curl v1.0.0 ) @@ -181,6 +182,7 @@ require ( github.com/caddyserver/certmagic v0.19.2 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v5 v5.0.3 // indirect + github.com/censys/censys-sdk-go v0.19.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect @@ -211,6 +213,7 @@ require ( github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect github.com/ebitengine/purego v0.8.4 // indirect github.com/emirpasic/gods v1.18.1 // indirect + github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 // indirect github.com/fatih/color v1.18.0 // indirect github.com/felixge/fgprof v0.9.5 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -275,7 +278,7 @@ require ( github.com/leodido/go-urn v1.4.0 // indirect github.com/libdns/libdns v0.2.1 // indirect github.com/logrusorgru/aurora/v4 v4.0.0 // indirect - github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect + github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d // indirect github.com/mackerelio/go-osstat v0.2.4 // indirect @@ -322,7 +325,7 @@ require ( github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/blackrock v0.0.1 // indirect - github.com/projectdiscovery/cdncheck v1.2.9 // indirect + github.com/projectdiscovery/cdncheck v1.2.12 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect @@ -356,6 +359,7 @@ require ( github.com/vmihailenco/msgpack/v5 v5.3.4 // indirect github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/vulncheck-oss/go-exploit v1.51.0 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect @@ -379,7 +383,7 @@ require ( go.opentelemetry.io/otel/trace v1.38.0 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect golang.org/x/arch v0.3.0 // indirect - golang.org/x/sync v0.17.0 // indirect + golang.org/x/sync v0.18.0 // indirect gopkg.in/djherbis/times.v1 v1.3.0 // indirect mellium.im/sasl v0.3.2 // indirect ) @@ -403,12 +407,12 @@ require ( go.etcd.io/bbolt v1.4.0 // indirect go.uber.org/zap v1.27.0 // indirect goftp.io/server/v2 v2.0.1 // indirect - golang.org/x/crypto v0.43.0 // indirect + golang.org/x/crypto v0.45.0 // indirect golang.org/x/exp v0.0.0-20250911091902-df9299821621 - golang.org/x/mod v0.28.0 // indirect - golang.org/x/sys v0.37.0 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/sys v0.38.0 // indirect golang.org/x/time v0.14.0 // indirect - golang.org/x/tools v0.37.0 + golang.org/x/tools v0.38.0 google.golang.org/protobuf v1.36.6 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect @@ -421,4 +425,4 @@ require ( ) // https://go.dev/ref/mod#go-mod-file-retract -retract v3.2.0 // retract due to broken js protocol issue \ No newline at end of file +retract v3.2.0 // retract due to broken js protocol issue diff --git a/go.sum b/go.sum index a1063e60e0..aa5d566cdd 100644 --- a/go.sum +++ b/go.sum @@ -140,12 +140,13 @@ github.com/andygrunwald/go-jira v1.16.1 h1:WoQEar5XoDRAibOgKzTFELlPNlKAtnfWr296R github.com/andygrunwald/go-jira v1.16.1/go.mod h1:UQH4IBVxIYWbgagc0LF/k9FRs9xjIiQ8hIcC6HfLwFU= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/antchfx/htmlquery v1.3.4 h1:Isd0srPkni2iNTWCwVj/72t7uCphFeor5Q8nCzj1jdQ= -github.com/antchfx/htmlquery v1.3.4/go.mod h1:K9os0BwIEmLAvTqaNSua8tXLWRWZpocZIH73OzWQbwM= +github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0= +github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA= github.com/antchfx/xmlquery v1.4.4 h1:mxMEkdYP3pjKSftxss4nUHfjBhnMk4imGoR96FRY2dg= github.com/antchfx/xmlquery v1.4.4/go.mod h1:AEPEEPYE9GnA2mj5Ur2L5Q5/2PycJ0N9Fusrx9b12fc= -github.com/antchfx/xpath v1.3.3 h1:tmuPQa1Uye0Ym1Zn65vxPgfltWb/Lxu2jeqIGteJSRs= github.com/antchfx/xpath v1.3.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ= +github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -233,6 +234,8 @@ github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyY github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/censys/censys-sdk-go v0.19.1 h1:CG8rQKgwrKuoICd3oU0uddALMfJnboeMkDg/e74HYyc= +github.com/censys/censys-sdk-go v0.19.1/go.mod h1:DgPz5NgL+EfoueXLPG9UG1e7hS0OhtlywgpkIuu3ZRE= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -336,6 +339,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 h1:S92OBrGuLLZsyM5ybUzgc/mPjIYk2AZqufieooe98uw= +github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05/go.mod h1:M9R1FoZ3y//hwwnJtO51ypFGwm8ZfpxPT/ZLtO1mcgQ= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= @@ -678,8 +683,9 @@ github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczG github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora/v4 v4.0.0 h1:sRjfPpun/63iADiSvGGjgA1cAYegEWMPCJdUpJYn9JA= github.com/logrusorgru/aurora/v4 v4.0.0/go.mod h1:lP0iIa2nrnT/qoFXcOZSrZQpJ1o6n2CUf/hyHi2Q4ZQ= -github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 h1:wIONC+HMNRqmWBjuMxhatuSzHaljStc4gjDeKycxy0A= github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3/go.mod h1:37YR9jabpiIxsb8X9VCIx8qFOjTDIIrIHHODa8C4gz0= +github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8 h1:z9RDOBcFcf3f2hSfKuoM3/FmJpt8M+w0fOy4wKneBmc= +github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8/go.mod h1:37YR9jabpiIxsb8X9VCIx8qFOjTDIIrIHHODa8C4gz0= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d h1:vFzYZc8yji+9DmNRhpEbs8VBK4CgV/DPfGzeVJSSp/8= @@ -701,6 +707,8 @@ github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEu github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/maypok86/otter/v2 v2.2.1 h1:hnGssisMFkdisYcvQ8L019zpYQcdtPse+g0ps2i7cfI= +github.com/maypok86/otter/v2 v2.2.1/go.mod h1:1NKY9bY+kB5jwCXBJfE59u+zAwOt6C7ni1FTlFFMqVs= github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30= github.com/mholt/acmez v1.2.0/go.mod h1:VT9YwH1xgNX1kmYY89gY8xPJC84BFAisjo8Egigt4kE= github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ= @@ -817,14 +825,14 @@ github.com/projectdiscovery/asnmap v1.1.1 h1:ImJiKIaACOT7HPx4Pabb5dksolzaFYsD1kI github.com/projectdiscovery/asnmap v1.1.1/go.mod h1:QT7jt9nQanj+Ucjr9BqGr1Q2veCCKSAVyUzLXfEcQ60= github.com/projectdiscovery/blackrock v0.0.1 h1:lHQqhaaEFjgf5WkuItbpeCZv2DUIE45k0VbGJyft6LQ= github.com/projectdiscovery/blackrock v0.0.1/go.mod h1:ANUtjDfaVrqB453bzToU+YB4cUbvBRpLvEwoWIwlTss= -github.com/projectdiscovery/cdncheck v1.2.9 h1:DsT+uZdGduJSsSrTbFRl1JDcsDHrPKi0v+/KziQnuTw= -github.com/projectdiscovery/cdncheck v1.2.9/go.mod h1:ibL9HoZs2JYTEUBOZo4f+W+XEzQifFLOf4bpgFStgj4= +github.com/projectdiscovery/cdncheck v1.2.12 h1:hS+CW0S2xOBIq7inKLp8u0/714S/t+X52S5iamVkp+U= +github.com/projectdiscovery/cdncheck v1.2.12/go.mod h1:Rc1G0QQdv3F+n003fksivUmTGitvb5+Gf36FpWkPQZw= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= -github.com/projectdiscovery/dsl v0.8.4 h1:p3rvzJae9BecOMufdYex3DX9zZeQNaXwVQe4kCEAOtE= -github.com/projectdiscovery/dsl v0.8.4/go.mod h1:msE7dGAuHYRrKddEwB1yoQ5dHrzzyimQUjsGDsMDis8= -github.com/projectdiscovery/fastdialer v0.4.15 h1:AHDgyydTdE5uUHGwzpvIDslY2AQn1kVq79gKEgFGAbE= -github.com/projectdiscovery/fastdialer v0.4.15/go.mod h1:X0l4+KqOE/aIL00pyTnBj4pWQDPYnCGL7cwZsJu6SCQ= +github.com/projectdiscovery/dsl v0.8.6 h1:kwtJn53UtDVX5vzmSmD/vDS8f1sR5yFyQchFd8Y2Oh8= +github.com/projectdiscovery/dsl v0.8.6/go.mod h1:bKSpMqLfeSllWPHlRuw/L0afAUYL2omA7sT6fGj8Nhc= +github.com/projectdiscovery/fastdialer v0.4.18 h1:jM3DlFjpy+NdmEpMpVdPF6J6hT0tDQcaKX0K2MX8xNU= +github.com/projectdiscovery/fastdialer v0.4.18/go.mod h1:zfpqgPL0LmQMxHC9t0np8B7gHHgYwcQ55f8/NJyUPKM= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw= github.com/projectdiscovery/freeport v0.0.7 h1:Q6uXo/j8SaV/GlAHkEYQi8WQoPXyJWxyspx+aFmz9Qk= @@ -835,16 +843,16 @@ github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG90 github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY= github.com/projectdiscovery/goflags v0.1.74 h1:n85uTRj5qMosm0PFBfsvOL24I7TdWRcWq/1GynhXS7c= github.com/projectdiscovery/goflags v0.1.74/go.mod h1:UMc9/7dFz2oln+10tv6cy+7WZKTHf9UGhaNkF95emh4= -github.com/projectdiscovery/gologger v1.1.59 h1:3XFidZHrUqtvL1CUbw7L1jtwiUmTZxT2CoQ0I/yiNh4= -github.com/projectdiscovery/gologger v1.1.59/go.mod h1:8FJFKmo0N4ITIH3n1Jy4ze6ijr+mA3t78g+VpN8uBRU= +github.com/projectdiscovery/gologger v1.1.62 h1:wzKqvL6HQRzf0/PpBEhInZqqL1q4mKe2gFGJeDG3FqE= +github.com/projectdiscovery/gologger v1.1.62/go.mod h1:YWvMSxlHybU3SkFCcWn+driSJ8yY+3CR3g/textnp+Y= github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M= github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 h1:yHh46pJovYbyiaHCV7oIDinFmy+Fyq36H1BowJgb0M0= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81/go.mod h1:9lmGPBDGZVANzCGjQg+V32n8Y3Cgjo/4kT0E88lsVTI= -github.com/projectdiscovery/hmap v0.0.95 h1:OO6MCySlK2xMzvJmsYUwdaI7YWv/U437OtsN0Ovw72k= -github.com/projectdiscovery/hmap v0.0.95/go.mod h1:KiTRdGd/GzX7uaoFWPrPBxPf4X/uZ9HTQ9dQ8x7x1bo= -github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a h1:5NBp4BegAQuT3QSnbBKt05LH1nOyEeFAXYh1+aE3Nlo= -github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a/go.mod h1:SQl92RiEuBnv1QQ8aQLC3b1lfgGHttoqUV0cTTvlzxQ= +github.com/projectdiscovery/hmap v0.0.97 h1:rfJt44HOaK5/zkeQXXoDl5tCuiUpc0chuYN43QPnm+E= +github.com/projectdiscovery/hmap v0.0.97/go.mod h1:x7K+2xTDMYLilpZjnnaXufesVVUSfxttXgqsuYvQJVk= +github.com/projectdiscovery/httpx v1.7.2 h1:AfJ5wjhKOlywX+x+gPO4iPqgFEyoIJwvXsLpQQgs4+c= +github.com/projectdiscovery/httpx v1.7.2/go.mod h1:hm0uTQGUTU1K0AQ1NQVfFrKfiS4u9Ynh8wArdXUXBS4= github.com/projectdiscovery/interactsh v1.2.4 h1:WUSj+fxbcV53J64oIAhbYzCKD1w/IyenyRBhkI5jiqI= github.com/projectdiscovery/interactsh v1.2.4/go.mod h1:E/IVNZ80/WKz8zTwGJWQygxIbhlRmuzZFsZwcGSZTdc= github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb h1:MGtI4oE12ruWv11ZlPXXd7hl/uAaQZrFvrIDYDeVMd8= @@ -855,32 +863,32 @@ github.com/projectdiscovery/mapcidr v1.1.97 h1:7FkxNNVXp+m1rIu5Nv/2SrF9k4+LwP8Qu github.com/projectdiscovery/mapcidr v1.1.97/go.mod h1:9dgTJh1SP02gYZdpzMjm6vtYFkEHQHoTyaVNvaeJ7lA= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 h1:L/e8z8yw1pfT6bg35NiN7yd1XKtJap5Nk6lMwQ0RNi8= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc= -github.com/projectdiscovery/networkpolicy v0.1.27 h1:GsbvDIW3nPstAx8Beke6rtn95PhXnOcoXrnjcohn5Xk= -github.com/projectdiscovery/networkpolicy v0.1.27/go.mod h1:/3XfgnxKNuxaTZc6wZ/Pq6fiKvK8N4OQyLmfcUeDk2E= +github.com/projectdiscovery/networkpolicy v0.1.30 h1:XvzvRxldndEk7eRwSvuJ4xtGSzSgwZsrZB9VuUEfR1A= +github.com/projectdiscovery/networkpolicy v0.1.30/go.mod h1:/aJZmi2/d41O67wBcTkee/LWhyJnlKxCuWe6cUN2SNU= github.com/projectdiscovery/ratelimit v0.0.82 h1:rtO5SQf5uQFu5zTahTaTcO06OxmG8EIF1qhdFPIyTak= github.com/projectdiscovery/ratelimit v0.0.82/go.mod h1:z076BrLkBb5yS7uhHNoCTf8X/BvFSGRxwQ8EzEL9afM= github.com/projectdiscovery/rawhttp v0.1.90 h1:LOSZ6PUH08tnKmWsIwvwv1Z/4zkiYKYOSZ6n+8RFKtw= github.com/projectdiscovery/rawhttp v0.1.90/go.mod h1:VZYAM25UI/wVB3URZ95ZaftgOnsbphxyAw/XnQRRz4Y= github.com/projectdiscovery/rdap v0.9.0 h1:wPhHx5pQ2QI+WGhyNb2PjhTl0NtB39Nk7YFZ9cp8ZGA= github.com/projectdiscovery/rdap v0.9.0/go.mod h1:zk4yrJFQ2Hy36Aqk+DvotYQxYAeALaCJ5ORySkff36Q= -github.com/projectdiscovery/retryabledns v1.0.108 h1:47LYRW2LY/0cDnZQfUhoOHNxe9rNc9NQ9ZfNrV/GbyM= -github.com/projectdiscovery/retryabledns v1.0.108/go.mod h1:j7H7K6JZePh9PeNleeRUtDSrkUKMpwDhZw3Ogewzio8= -github.com/projectdiscovery/retryablehttp-go v1.0.131 h1:OU2x9fVDIWnDoKvT8tKbaCONTL1gHnTOIFQFXmnEOE0= -github.com/projectdiscovery/retryablehttp-go v1.0.131/go.mod h1:ttW+Zka1L8IwEUhJ4zArbC+pKZum7b47fzV+4VGN6cA= +github.com/projectdiscovery/retryabledns v1.0.110 h1:24p1PzWBdfsRnGsBf6ZxXPzvK0sYaL4q/ju4+2OhJzU= +github.com/projectdiscovery/retryabledns v1.0.110/go.mod h1:GFj5HjxfaGrZeoYf79zI/R99XljBNjmOqNvwOqPepRU= +github.com/projectdiscovery/retryablehttp-go v1.0.133 h1:uAIGwsRelrS1Ulelyp9qLtZRDTFHixw4O0cUQWLhTJQ= +github.com/projectdiscovery/retryablehttp-go v1.0.133/go.mod h1:9DU57ezv5cfZSWw/m5XFDTMjy1yKeMyn1kj35lPlcfM= github.com/projectdiscovery/sarif v0.0.1 h1:C2Tyj0SGOKbCLgHrx83vaE6YkzXEVrMXYRGLkKCr/us= github.com/projectdiscovery/sarif v0.0.1/go.mod h1:cEYlDu8amcPf6b9dSakcz2nNnJsoz4aR6peERwV+wuQ= github.com/projectdiscovery/stringsutil v0.0.2 h1:uzmw3IVLJSMW1kEg8eCStG/cGbYYZAja8BH3LqqJXMA= github.com/projectdiscovery/stringsutil v0.0.2/go.mod h1:EJ3w6bC5fBYjVou6ryzodQq37D5c6qbAYQpGmAy+DC0= -github.com/projectdiscovery/tlsx v1.2.1 h1:R8QgKb/vxd6Y0cfGFBYs4nn0zodHABeeLPqJjs2mNrA= -github.com/projectdiscovery/tlsx v1.2.1/go.mod h1:p19UHGQ6bvcbvhO4NvYBKOxlE4QvrUaectx9g/Mm3JA= -github.com/projectdiscovery/uncover v1.1.0 h1:UDp/qLZn78YZb6VPoOrfyP1vz+ojEx8VrTTyjjRt9UU= -github.com/projectdiscovery/uncover v1.1.0/go.mod h1:2rXINmMe/lmVAt2jn9CpAOs9An57/JEeLZobY3Z9kUs= -github.com/projectdiscovery/useragent v0.0.102 h1:Xfr8a7LQhIu0zeSz5gBxGCdyuqZbhkOMAEQUcEZXyBU= -github.com/projectdiscovery/useragent v0.0.102/go.mod h1:DIfLRBKZ6dLhHRnMYkxdg6Jpu0kpE3pJlMG94dsIchY= -github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6 h1:nvszzYNHYnc8X+Dm68zMuYNNesZJp7QWfe8EEyL4azc= -github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6/go.mod h1:GOjhpPLmpMHcYJKI0vhjvjdczMQf3jWdUgYiBeKkwVk= -github.com/projectdiscovery/wappalyzergo v0.2.54 h1:8w0qUb0dO9N5FN1y4M8pIzDNqLCj0MrITqV/1xp05Lw= -github.com/projectdiscovery/wappalyzergo v0.2.54/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= +github.com/projectdiscovery/tlsx v1.2.2 h1:Y96QBqeD2anpzEtBl4kqNbwzXh2TrzJuXfgiBLvK+SE= +github.com/projectdiscovery/tlsx v1.2.2/go.mod h1:ZJl9F1sSl0sdwE+lR0yuNHVX4Zx6tCSTqnNxnHCFZB4= +github.com/projectdiscovery/uncover v1.2.0 h1:31tjYa0v8FB8Ch8hJTxb+2t63vsljdOo0OSFylJcX4M= +github.com/projectdiscovery/uncover v1.2.0/go.mod h1:ozqKb++p39Kmh1SmwIpbQ9p0aVGPXuwsb4/X2Kvx6ms= +github.com/projectdiscovery/useragent v0.0.104 h1:Gxy5UrZ494Ce8CWicFECqc8LGRVknK5duUfBU39VhG0= +github.com/projectdiscovery/useragent v0.0.104/go.mod h1:dq6N53FgzL5xEQkFBSpAtK70TbEOHZPLeG7LOorSS14= +github.com/projectdiscovery/utils v0.7.3 h1:kX+77AA58yK6EZgkTRJEnK9V/7AZYzlXdcu/o/kJhFs= +github.com/projectdiscovery/utils v0.7.3/go.mod h1:uDdQ3/VWomai98l+a3Ye/srDXdJ4xUIar/mSXlQ9gBM= +github.com/projectdiscovery/wappalyzergo v0.2.57 h1:g3M/lEX0epjY2pL5diqVtn3J0meLg8cG6qzX0pCAW+4= +github.com/projectdiscovery/wappalyzergo v0.2.57/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= github.com/projectdiscovery/yamldoc-go v1.0.6 h1:GCEdIRlQjDux28xTXKszM7n3jlMf152d5nqVpVoetas= github.com/projectdiscovery/yamldoc-go v1.0.6/go.mod h1:R5lWrNzP+7Oyn77NDVPnBsxx2/FyQZBBkIAaSaCQFxw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= @@ -1051,6 +1059,8 @@ github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vb github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/vulncheck-oss/go-exploit v1.51.0 h1:HTmJ4Q94tbEDPb35mQZn6qMg4rT+Sw9n+L7g3Pjr+3o= +github.com/vulncheck-oss/go-exploit v1.51.0/go.mod h1:J28w0dLnA6DnCrnBm9Sbt6smX8lvztnnN2wCXy7No6c= github.com/weppos/publicsuffix-go v0.12.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k= github.com/weppos/publicsuffix-go v0.13.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k= github.com/weppos/publicsuffix-go v0.30.0/go.mod h1:kBi8zwYnR0zrbm8RcuN1o9Fzgpnnn+btVN8uWPMyXAY= @@ -1204,8 +1214,8 @@ golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1243,8 +1253,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U= -golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1295,8 +1305,8 @@ golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1325,8 +1335,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1391,8 +1401,8 @@ golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -1406,8 +1416,8 @@ golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1424,8 +1434,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1479,8 +1489,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE= -golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= diff --git a/integration_tests/protocols/javascript/multi-ports.yaml b/integration_tests/protocols/javascript/multi-ports.yaml new file mode 100644 index 0000000000..aa1c5fe2d9 --- /dev/null +++ b/integration_tests/protocols/javascript/multi-ports.yaml @@ -0,0 +1,28 @@ +id: multi-ports + +info: + name: Multi Ports - Detection + author: pdteam + severity: info + description: | + Multi Ports template for testing + metadata: + max-request: 1 + tags: js,detect,multi-ports,enum,network + +javascript: + - pre-condition: | + isPortOpen(Host,Port); + code: | + var m = require("nuclei/ssh"); + var c = m.SSHClient(); + var response = c.ConnectSSHInfoMode(Host, Port); + Export(response); + args: + Host: "{{Host}}" + Port: "2222,22" # Port 22 should match + + extractors: + - type: json + json: + - '.UserAuth' \ No newline at end of file diff --git a/internal/pdcp/writer.go b/internal/pdcp/writer.go index 778d2ccc98..602942642e 100644 --- a/internal/pdcp/writer.go +++ b/internal/pdcp/writer.go @@ -104,7 +104,7 @@ func NewUploadWriter(ctx context.Context, logger *gologger.Logger, creds *pdcpau // SetScanID sets the scan id for the upload writer func (u *UploadWriter) SetScanID(id string) error { if !xidRegex.MatchString(id) { - return fmt.Errorf("invalid scan id provided") + gologger.Warning().Msgf("invalid asset id provided (unknown xid format): %s", id) } u.scanID = id return nil diff --git a/internal/runner/runner.go b/internal/runner/runner.go index 236ca3d6d0..0c5573519e 100644 --- a/internal/runner/runner.go +++ b/internal/runner/runner.go @@ -254,8 +254,23 @@ func New(options *types.Options) (*Runner, error) { os.Exit(0) } + tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*") + if err != nil { + return nil, errors.Wrap(err, "could not create temporary directory") + } + runner.tmpDir = tmpDir + + // Cleanup tmpDir only if initialization fails + // On successful initialization, Close() method will handle cleanup + cleanupOnError := true + defer func() { + if cleanupOnError && runner.tmpDir != "" { + _ = os.RemoveAll(runner.tmpDir) + } + }() + // create the input provider and load the inputs - inputProvider, err := provider.NewInputProvider(provider.InputOptions{Options: options}) + inputProvider, err := provider.NewInputProvider(provider.InputOptions{Options: options, TempDir: runner.tmpDir}) if err != nil { return nil, errors.Wrap(err, "could not create input provider") } @@ -386,10 +401,8 @@ func New(options *types.Options) (*Runner, error) { } runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration) - if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { - runner.tmpDir = tmpDir - } - + // Initialization successful, disable cleanup on error + cleanupOnError = false return runner, nil } diff --git a/lib/config.go b/lib/config.go index cdc56ce063..f624ef42e7 100644 --- a/lib/config.go +++ b/lib/config.go @@ -3,6 +3,7 @@ package nuclei import ( "context" "errors" + "os" "time" "github.com/projectdiscovery/goflags" @@ -559,3 +560,18 @@ func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions { return nil } } + +// WithTemporaryDirectory allows setting a parent directory for SDK-managed temporary files. +// A temporary directory will be created inside the provided directory and cleaned up on engine close. +// If not set, a temporary directory will be automatically created in the system temp location. +// The parent directory is assumed to exist. +func WithTemporaryDirectory(parentDir string) NucleiSDKOptions { + return func(e *NucleiEngine) error { + tmpDir, err := os.MkdirTemp(parentDir, "nuclei-tmp-*") + if err != nil { + return err + } + e.tmpDir = tmpDir + return nil + } +} diff --git a/lib/sdk.go b/lib/sdk.go index 3ed252178f..99523c79a0 100644 --- a/lib/sdk.go +++ b/lib/sdk.go @@ -5,6 +5,7 @@ import ( "bytes" "context" "io" + "os" "sync" "github.com/projectdiscovery/gologger" @@ -92,6 +93,9 @@ type NucleiEngine struct { // Logger instance for the engine Logger *gologger.Logger + + // Temporary directory for SDK-managed template files + tmpDir string } // LoadAllTemplates loads all nuclei template based on given options @@ -231,6 +235,9 @@ func (e *NucleiEngine) closeInternal() { if e.httpxClient != nil { _ = e.httpxClient.Close() } + if e.tmpDir != "" { + _ = os.RemoveAll(e.tmpDir) + } } // Close all resources used by nuclei engine diff --git a/lib/sdk_private.go b/lib/sdk_private.go index d80a0fd068..ba394f024c 100644 --- a/lib/sdk_private.go +++ b/lib/sdk_private.go @@ -3,6 +3,7 @@ package nuclei import ( "context" "fmt" + "os" "strings" "sync" "time" @@ -170,20 +171,29 @@ func (e *NucleiEngine) init(ctx context.Context) error { e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) } + if e.tmpDir == "" { + tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*") + if err != nil { + return err + } + e.tmpDir = tmpDir + } + e.executerOpts = &protocols.ExecutorOptions{ - Output: e.customWriter, - Options: e.opts, - Progress: e.customProgress, - Catalog: e.catalog, - IssuesClient: e.rc, - RateLimiter: e.rateLimiter, - Interactsh: e.interactshClient, - Colorizer: aurora.NewAurora(true), - ResumeCfg: types.NewResumeCfg(), - Browser: e.browserInstance, - Parser: e.parser, - InputHelper: input.NewHelper(), - Logger: e.opts.Logger, + Output: e.customWriter, + Options: e.opts, + Progress: e.customProgress, + Catalog: e.catalog, + IssuesClient: e.rc, + RateLimiter: e.rateLimiter, + Interactsh: e.interactshClient, + Colorizer: aurora.NewAurora(true), + ResumeCfg: types.NewResumeCfg(), + Browser: e.browserInstance, + Parser: e.parser, + InputHelper: input.NewHelper(), + TemporaryDirectory: e.tmpDir, + Logger: e.opts.Logger, } if e.opts.ShouldUseHostError() && e.hostErrCache != nil { e.executerOpts.HostErrorsCache = e.hostErrCache diff --git a/pkg/catalog/config/constants.go b/pkg/catalog/config/constants.go index ddc05d8638..38b636304a 100644 --- a/pkg/catalog/config/constants.go +++ b/pkg/catalog/config/constants.go @@ -31,7 +31,7 @@ const ( CLIConfigFileName = "config.yaml" ReportingConfigFilename = "reporting-config.yaml" // Version is the current version of nuclei - Version = `v3.5.1` + Version = `v3.6.0` // Directory Names of custom templates CustomS3TemplatesDirName = "s3" CustomGitHubTemplatesDirName = "github" diff --git a/pkg/catalog/index/filter.go b/pkg/catalog/index/filter.go new file mode 100644 index 0000000000..ac4959a531 --- /dev/null +++ b/pkg/catalog/index/filter.go @@ -0,0 +1,344 @@ +package index + +import ( + "path/filepath" + "slices" + "strings" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" +) + +// Filter represents filtering criteria for template metadata. +// +// Inclusion fields (e.g., Authors, Tags, IDs, Severities, ProtocolTypes) use +// AND logic across different filter types and OR logic within each type. +// Exclusion fields (e.g., ExcludeTags, ExcludeIDs, ExcludeSeverities, +// ExcludeProtocolTypes) take precedence over inclusion fields. Additionally, +// IncludeTemplates and IncludeTags can force inclusion of templates even if +// they match exclusion criteria. +type Filter struct { + // Authors to include. + Authors []string + + // Tags to include. + Tags []string + + // ExcludeTags to exclude (takes precedence over Tags). + ExcludeTags []string + + // IncludeTags to force include even if excluded. + IncludeTags []string + + // IDs to include (supports wildcards, OR logic). + IDs []string + + // ExcludeIDs to exclude (supports wildcards). + ExcludeIDs []string + + // IncludeTemplates paths to force include even if excluded. + IncludeTemplates []string + + // ExcludeTemplates paths to exclude. + ExcludeTemplates []string + + // Severities to include. + Severities []severity.Severity + + // ExcludeSeverities to exclude. + ExcludeSeverities []severity.Severity + + // ProtocolTypes to include. + ProtocolTypes []types.ProtocolType + + // ExcludeProtocolTypes to exclude. + ExcludeProtocolTypes []types.ProtocolType +} + +// Matches checks if metadata matches the filter criteria. +func (f *Filter) Matches(m *Metadata) bool { + if f.isForcedInclude(m) { + return true + } + + if f.isExcluded(m) { + return false + } + + if !f.matchesIncludes(m) { + return false + } + + return true +} + +// isForcedInclude checks if template is forced to be included. +func (f *Filter) isForcedInclude(m *Metadata) bool { + if len(f.IncludeTemplates) > 0 { + for _, includePath := range f.IncludeTemplates { + if matchesPath(m.FilePath, includePath) { + return true + } + } + } + + if len(f.IncludeTags) > 0 { + if slices.ContainsFunc(f.IncludeTags, m.HasTag) { + return true + } + } + + return false +} + +// isExcluded checks if template should be excluded. +func (f *Filter) isExcluded(m *Metadata) bool { + if len(f.ExcludeTemplates) > 0 { + for _, excludePath := range f.ExcludeTemplates { + if matchesPath(m.FilePath, excludePath) { + return true + } + } + } + + if len(f.ExcludeTags) > 0 { + if slices.ContainsFunc(f.ExcludeTags, m.HasTag) { + return true + } + } + + if len(f.ExcludeIDs) > 0 { + for _, excludeID := range f.ExcludeIDs { + if matchesID(m.ID, excludeID) { + return true + } + } + } + + if len(f.ExcludeSeverities) > 0 { + if slices.ContainsFunc(f.ExcludeSeverities, m.MatchesSeverity) { + return true + } + } + + if len(f.ExcludeProtocolTypes) > 0 { + if slices.ContainsFunc(f.ExcludeProtocolTypes, m.MatchesProtocol) { + return true + } + } + + return false +} + +// matchesIncludes checks if metadata matches include filters. +// +// Returns true if no include filters are specified, or if all specified filter +// types match. +func (f *Filter) matchesIncludes(m *Metadata) bool { + if len(f.Authors) > 0 { + if !slices.ContainsFunc(f.Authors, m.HasAuthor) { + return false + } + } + + if len(f.Tags) > 0 { + if !slices.ContainsFunc(f.Tags, m.HasTag) { + return false + } + } + + if len(f.IDs) > 0 { + matched := false + for _, id := range f.IDs { + if matchesID(m.ID, id) { + matched = true + break + } + } + if !matched { + return false + } + } + + if len(f.Severities) > 0 { + if !slices.ContainsFunc(f.Severities, m.MatchesSeverity) { + return false + } + } + + if len(f.ProtocolTypes) > 0 { + if !slices.ContainsFunc(f.ProtocolTypes, m.MatchesProtocol) { + return false + } + } + + return true +} + +// matchesID checks if template ID matches pattern (supports wildcards). +func matchesID(templateID, pattern string) bool { + // Convert to lowercase for case-insensitive matching + templateID = strings.ToLower(templateID) + pattern = strings.ToLower(pattern) + + if templateID == pattern { + return true + } + + matched, _ := filepath.Match(pattern, templateID) + + return matched +} + +// matchesPath checks if template path matches pattern. +func matchesPath(templatePath, pattern string) bool { + templatePath = filepath.Clean(templatePath) + pattern = filepath.Clean(pattern) + + if templatePath == pattern { + return true + } + + if strings.HasPrefix(templatePath, pattern+string(filepath.Separator)) { + return true + } + + matched, _ := filepath.Match(pattern, templatePath) + + return matched +} + +// FilterFunc is a function that filters metadata. +type FilterFunc func(*Metadata) bool + +// UnmarshalFilter creates a Filter from nuclei options. +func UnmarshalFilter( + authors, tags, excludeTags, includeTags []string, + ids, excludeIDs []string, + includeTemplates, excludeTemplates []string, + severities, excludeSeverities []string, + protocolTypes, excludeProtocolTypes []string, +) (*Filter, error) { + filter := &Filter{ + Authors: authors, + Tags: tags, + ExcludeTags: excludeTags, + IncludeTags: includeTags, + IDs: ids, + ExcludeIDs: excludeIDs, + IncludeTemplates: includeTemplates, + ExcludeTemplates: excludeTemplates, + } + + for _, sev := range severities { + holder := &severity.Holder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = sev + return nil + }); err == nil { + filter.Severities = append(filter.Severities, holder.Severity) + } + } + + for _, sev := range excludeSeverities { + holder := &severity.Holder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = sev + return nil + }); err == nil { + filter.ExcludeSeverities = append(filter.ExcludeSeverities, holder.Severity) + } + } + + for _, pt := range protocolTypes { + holder := &types.TypeHolder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = pt + return nil + }); err == nil && holder.ProtocolType != types.InvalidProtocol { + filter.ProtocolTypes = append(filter.ProtocolTypes, holder.ProtocolType) + } + } + + for _, pt := range excludeProtocolTypes { + holder := &types.TypeHolder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = pt + return nil + }); err == nil && holder.ProtocolType != types.InvalidProtocol { + filter.ExcludeProtocolTypes = append(filter.ExcludeProtocolTypes, holder.ProtocolType) + } + } + + return filter, nil +} + +// UnmarshalFilterFunc creates a FilterFunc from filter criteria. +func UnmarshalFilterFunc(filter *Filter) FilterFunc { + if filter == nil { + return func(*Metadata) bool { return true } + } + + return filter.Matches +} + +// IsEmpty returns true if filter has no criteria set. +func (f *Filter) IsEmpty() bool { + return len(f.Authors) == 0 && + len(f.Tags) == 0 && + len(f.ExcludeTags) == 0 && + len(f.IncludeTags) == 0 && + len(f.IDs) == 0 && + len(f.ExcludeIDs) == 0 && + len(f.IncludeTemplates) == 0 && + len(f.ExcludeTemplates) == 0 && + len(f.Severities) == 0 && + len(f.ExcludeSeverities) == 0 && + len(f.ProtocolTypes) == 0 && + len(f.ExcludeProtocolTypes) == 0 +} + +// String returns a human-readable representation of the filter. +func (f *Filter) String() string { + var parts []string + + if len(f.Authors) > 0 { + parts = append(parts, "authors="+strings.Join(f.Authors, ",")) + } + + if len(f.Tags) > 0 { + parts = append(parts, "tags="+strings.Join(f.Tags, ",")) + } + + if len(f.ExcludeTags) > 0 { + parts = append(parts, "exclude-tags="+strings.Join(f.ExcludeTags, ",")) + } + + if len(f.IDs) > 0 { + parts = append(parts, "ids="+strings.Join(f.IDs, ",")) + } + + if len(f.Severities) > 0 { + sevs := make([]string, len(f.Severities)) + for i, s := range f.Severities { + sevs[i] = s.String() + } + + parts = append(parts, "severities="+strings.Join(sevs, ",")) + } + + if len(f.ProtocolTypes) > 0 { + pts := make([]string, len(f.ProtocolTypes)) + for i, p := range f.ProtocolTypes { + pts[i] = p.String() + } + + parts = append(parts, "types="+strings.Join(pts, ",")) + } + + if len(parts) == 0 { + return "filter=" + } + + return "filter(" + strings.Join(parts, ", ") + ")" +} diff --git a/pkg/catalog/index/filter_test.go b/pkg/catalog/index/filter_test.go new file mode 100644 index 0000000000..2bc4735e6c --- /dev/null +++ b/pkg/catalog/index/filter_test.go @@ -0,0 +1,407 @@ +package index + +import ( + "os" + "path/filepath" + "testing" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" + "github.com/stretchr/testify/require" +) + +func TestFilterMatches(t *testing.T) { + metadata := &Metadata{ + ID: "test-template-1", + FilePath: "/templates/cves/2021/CVE-2021-1234.yaml", + Name: "Test CVE Template", + Authors: []string{"pdteam", "geeknik"}, + Tags: []string{"cve", "rce", "apache"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("Empty filter matches all", func(t *testing.T) { + filter := &Filter{} + require.True(t, filter.Matches(metadata)) + require.True(t, filter.IsEmpty()) + }) + + t.Run("Author filter - match", func(t *testing.T) { + filter := &Filter{Authors: []string{"pdteam"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Author filter - no match", func(t *testing.T) { + filter := &Filter{Authors: []string{"unknown"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Multiple authors - OR logic", func(t *testing.T) { + filter := &Filter{Authors: []string{"unknown", "geeknik"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Tag filter - match", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Tag filter - no match", func(t *testing.T) { + filter := &Filter{Tags: []string{"xss"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude tags - match", func(t *testing.T) { + filter := &Filter{ExcludeTags: []string{"rce"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Include tags overrides exclude", func(t *testing.T) { + filter := &Filter{ + ExcludeTags: []string{"rce"}, + IncludeTags: []string{"cve"}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - exact match", func(t *testing.T) { + filter := &Filter{IDs: []string{"test-template-1"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - wildcard match", func(t *testing.T) { + filter := &Filter{IDs: []string{"test-*"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - no match", func(t *testing.T) { + filter := &Filter{IDs: []string{"other-*"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude ID - exact match", func(t *testing.T) { + filter := &Filter{ExcludeIDs: []string{"test-template-1"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude ID - wildcard match", func(t *testing.T) { + filter := &Filter{ExcludeIDs: []string{"test-*"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Severity filter - match", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.Critical}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Severity filter - no match", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.High, severity.Medium}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude severity - match", func(t *testing.T) { + filter := &Filter{ExcludeSeverities: []severity.Severity{severity.Critical}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Protocol type filter - match", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Protocol type filter - no match", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.DNSProtocol}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude protocol type - match", func(t *testing.T) { + filter := &Filter{ExcludeProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Include templates - path match", func(t *testing.T) { + filter := &Filter{ + ExcludeTags: []string{"cve"}, + IncludeTemplates: []string{"/templates/cves/"}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Exclude templates - path match", func(t *testing.T) { + filter := &Filter{ + ExcludeTemplates: []string{"/templates/cves/"}, + } + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - all match", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"pdteam"}, + Tags: []string{"cve"}, + Severities: []severity.Severity{severity.Critical}, + ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - AND logic across types", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"pdteam"}, // matches + Tags: []string{"xss"}, // doesn't match + Severities: []severity.Severity{severity.Critical}, // matches + } + // With AND logic across filter types, doesn't match because tags don't match + // even though author and severity match + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - no match at all", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"unknown"}, // doesn't match + Tags: []string{"xss"}, // doesn't match + Severities: []severity.Severity{severity.Low}, // doesn't match + } + require.False(t, filter.Matches(metadata)) + }) +} + +func TestMatchesPath(t *testing.T) { + tests := []struct { + name string + path string + pattern string + expected bool + }{ + {"exact match", "/templates/cves/2021/test.yaml", "/templates/cves/2021/test.yaml", true}, + {"directory prefix", "/templates/cves/2021/test.yaml", "/templates/cves", true}, + {"directory with slash", "/templates/cves/2021/test.yaml", "/templates/cves/", true}, + {"no match", "/templates/cves/2021/test.yaml", "/templates/exploits", false}, + {"wildcard match", "/templates/cves/2021/test.yaml", "/templates/*/2021/*.yaml", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := matchesPath(tt.path, tt.pattern) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestMatchesID(t *testing.T) { + tests := []struct { + name string + id string + pattern string + expected bool + }{ + {"exact match", "CVE-2021-1234", "CVE-2021-1234", true}, + {"wildcard prefix", "CVE-2021-1234", "CVE-*", true}, + {"wildcard suffix", "CVE-2021-1234", "*-1234", true}, + {"wildcard middle", "CVE-2021-1234", "CVE-*-1234", true}, + {"no match", "CVE-2021-1234", "CVE-2022-*", false}, + {"partial no match", "CVE-2021-1234", "CVE-2021-12", false}, + {"case insensitive exact", "cve-2021-1234", "CVE-2021-1234", true}, + {"case insensitive wildcard", "CVE-2021-1234", "cve-*", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := matchesID(tt.id, tt.pattern) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestUnmarshalFilter(t *testing.T) { + filter, err := UnmarshalFilter( + []string{"author1", "author2"}, + []string{"tag1", "tag2"}, + []string{"exclude-tag"}, + []string{"include-tag"}, + []string{"id1", "id2*"}, + []string{"exclude-id*"}, + []string{"/include/path"}, + []string{"/exclude/path"}, + []string{"critical", "high"}, + []string{"info"}, + []string{"http", "dns"}, + []string{"file"}, + ) + + require.NoError(t, err) + require.NotNil(t, filter) + + require.Equal(t, []string{"author1", "author2"}, filter.Authors) + require.Equal(t, []string{"tag1", "tag2"}, filter.Tags) + require.Equal(t, []string{"exclude-tag"}, filter.ExcludeTags) + require.Equal(t, []string{"include-tag"}, filter.IncludeTags) + require.Equal(t, []string{"id1", "id2*"}, filter.IDs) + require.Equal(t, []string{"exclude-id*"}, filter.ExcludeIDs) + require.Equal(t, []string{"/include/path"}, filter.IncludeTemplates) + require.Equal(t, []string{"/exclude/path"}, filter.ExcludeTemplates) + + require.Len(t, filter.Severities, 2) + require.Contains(t, filter.Severities, severity.Critical) + require.Contains(t, filter.Severities, severity.High) + + require.Len(t, filter.ExcludeSeverities, 1) + require.Contains(t, filter.ExcludeSeverities, severity.Info) + + require.Len(t, filter.ProtocolTypes, 2) + require.Contains(t, filter.ProtocolTypes, types.HTTPProtocol) + require.Contains(t, filter.ProtocolTypes, types.DNSProtocol) + + require.Len(t, filter.ExcludeProtocolTypes, 1) + require.Contains(t, filter.ExcludeProtocolTypes, types.FileProtocol) +} + +func TestIndexFilter(t *testing.T) { + tmpDir := t.TempDir() + idx, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Create test templates and metadata + templates := []struct { + id string + path string + authors []string + tags []string + severity string + protocol string + }{ + {"cve-2021-1", "/templates/cves/CVE-2021-1.yaml", []string{"pdteam"}, []string{"cve", "rce"}, "critical", "http"}, + {"cve-2021-2", "/templates/cves/CVE-2021-2.yaml", []string{"pdteam"}, []string{"cve", "xss"}, "high", "http"}, + {"exploit-1", "/templates/exploits/exploit-1.yaml", []string{"geeknik"}, []string{"exploit"}, "medium", "dns"}, + {"info-1", "/templates/info/info-1.yaml", []string{"author1"}, []string{"info"}, "info", "http"}, + } + + for _, tmpl := range templates { + tmpFile := filepath.Join(tmpDir, filepath.Base(tmpl.path)) + err := os.WriteFile(tmpFile, []byte("id: "+tmpl.id), 0644) + require.NoError(t, err) + + metadata := &Metadata{ + ID: tmpl.id, + FilePath: tmpFile, + Authors: tmpl.authors, + Tags: tmpl.tags, + Severity: tmpl.severity, + ProtocolType: tmpl.protocol, + } + idx.Set(tmpl.path, metadata) + } + + t.Run("No filter returns all", func(t *testing.T) { + results := idx.Filter(nil) + require.Len(t, results, 4) + }) + + t.Run("Filter by author", func(t *testing.T) { + filter := &Filter{Authors: []string{"pdteam"}} + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Filter by tag", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Filter by severity", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.Critical}} + results := idx.Filter(filter) + require.Len(t, results, 1) + }) + + t.Run("Filter by protocol type", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Exclude by severity", func(t *testing.T) { + filter := &Filter{ExcludeSeverities: []severity.Severity{severity.Info}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Exclude by tag", func(t *testing.T) { + filter := &Filter{ExcludeTags: []string{"info"}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Complex filter", func(t *testing.T) { + filter := &Filter{ + Tags: []string{"cve"}, + Severities: []severity.Severity{severity.Critical, severity.High}, + ExcludeSeverities: []severity.Severity{severity.Info}, + } + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Count with filter", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + count := idx.Count(filter) + require.Equal(t, 2, count) + }) + + t.Run("Count without filter", func(t *testing.T) { + count := idx.Count(nil) + require.Equal(t, 4, count) + }) +} + +func TestIndexFilterFunc(t *testing.T) { + tmpDir := t.TempDir() + idx, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Add test metadata + for i := 0; i < 5; i++ { + metadata := &Metadata{ + ID: "test-" + string(rune('a'+i)), + FilePath: "/tmp/test.yaml", + Severity: "high", + } + if i%2 == 0 { + metadata.Tags = []string{"even"} + } else { + metadata.Tags = []string{"odd"} + } + idx.Set("/tmp/test-"+string(rune('a'+i))+".yaml", metadata) + } + + t.Run("Custom filter function", func(t *testing.T) { + results := idx.FilterFunc(func(m *Metadata) bool { + return m.HasTag("even") + }) + require.Len(t, results, 3) // 0, 2, 4 + }) + + t.Run("Nil filter function returns all", func(t *testing.T) { + results := idx.FilterFunc(nil) + require.Len(t, results, 5) + }) +} + +func TestFilterString(t *testing.T) { + filter := &Filter{ + Authors: []string{"author1", "author2"}, + Tags: []string{"tag1"}, + Severities: []severity.Severity{severity.Critical, severity.High}, + ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}, + } + + str := filter.String() + require.Contains(t, str, "authors=") + require.Contains(t, str, "tags=") + require.Contains(t, str, "severities=") + require.Contains(t, str, "types=") + + emptyFilter := &Filter{} + require.Equal(t, "filter=", emptyFilter.String()) +} diff --git a/pkg/catalog/index/index.go b/pkg/catalog/index/index.go new file mode 100644 index 0000000000..3024851b72 --- /dev/null +++ b/pkg/catalog/index/index.go @@ -0,0 +1,352 @@ +package index + +import ( + "encoding/gob" + "maps" + "os" + "path/filepath" + "sync" + + "github.com/maypok86/otter/v2" + "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + folderutil "github.com/projectdiscovery/utils/folder" +) + +const ( + // IndexFileName is the name of the persistent cache file. + IndexFileName = "index.gob" + + // IndexVersion is the schema version for cache invalidation on breaking + // changes. + IndexVersion = 1 + + // DefaultMaxSize is the default maximum number of templates to cache. + DefaultMaxSize = 50000 + + // DefaultMaxWeight is the default maximum weight of the cache. + DefaultMaxWeight = DefaultMaxSize * 800 // ~40MB assuming ~800B/entry +) + +// Index represents a cache for template metadata. +type Index struct { + cache *otter.Cache[string, *Metadata] + cacheFile string + mu sync.RWMutex + version int +} + +// cacheSnapshot represents the serialized cache structure. +type cacheSnapshot struct { + Version int `gob:"version"` + Data map[string]*Metadata `gob:"data"` +} + +// NewIndex creates a new template metadata cache with the given options. +func NewIndex(cacheDir string) (*Index, error) { + if cacheDir == "" { + cacheDir = folderutil.AppCacheDirOrDefault(".nuclei-cache", config.BinaryName) + } + + if err := os.MkdirAll(cacheDir, 0755); err != nil { + return nil, err + } + + cacheFile := filepath.Join(cacheDir, IndexFileName) + + // NOTE(dwisiswant0): Build cache with adaptive sizing based on memory cost. + opts := &otter.Options[string, *Metadata]{ + MaximumWeight: uint64(DefaultMaxWeight), + Weigher: func(key string, value *Metadata) uint32 { + if value == nil { + return uint32(len(key)) + } + + weight := len(key) + weight += len(value.ID) + weight += len(value.FilePath) + weight += 24 // ModTime is time.Time (24B) + weight += len(value.Name) + weight += len(value.Severity) + weight += len(value.ProtocolType) + weight += len(value.TemplateVerifier) + + for _, author := range value.Authors { + weight += len(author) + } + for _, tag := range value.Tags { + weight += len(tag) + } + + return uint32(weight) + }, + } + + cache, err := otter.New(opts) + if err != nil { + return nil, err + } + + c := &Index{ + cache: cache, + cacheFile: cacheFile, + version: IndexVersion, + } + + return c, nil +} + +// NewDefaultIndex creates a index with default settings in the default cache +// directory. +func NewDefaultIndex() (*Index, error) { + return NewIndex("") +} + +// Get retrieves metadata for a template path, validating freshness via mtime. +func (i *Index) Get(path string) (*Metadata, bool) { + i.mu.RLock() + defer i.mu.RUnlock() + + metadata, found := i.cache.GetIfPresent(path) + if !found { + return nil, false + } + + if !metadata.IsValid() { + go i.Delete(path) + + return nil, false + } + + return metadata, true +} + +// Set stores metadata for a template path. +// +// The caller is responsible for ensuring the metadata is valid and contains +// the correct checksum before calling this method. +// Use [SetFromTemplate] for automatic extraction and checksum computation. +// +// Returns the metadata and whether it was successfully cached (false if evicted). +func (i *Index) Set(path string, metadata *Metadata) (*Metadata, bool) { + i.mu.Lock() + defer i.mu.Unlock() + + return i.cache.Set(path, metadata) +} + +// SetFromTemplate extracts metadata from a parsed template and stores it. +// +// Returns the metadata and whether it was successfully cached. The metadata is +// always returned (even on checksum failure) for immediate filtering use. +// Returns false if the metadata was not cached (e.g., set, evicted). +func (i *Index) SetFromTemplate(path string, tpl *templates.Template) (*Metadata, bool) { + metadata := NewMetadataFromTemplate(path, tpl) + + info, err := os.Stat(path) + if err != nil { + return metadata, false + } + metadata.ModTime = info.ModTime() + + if i.cache == nil { + return metadata, false + } + + return i.Set(path, metadata) +} + +// Has checks if metadata exists for a path without validation. +func (i *Index) Has(path string) bool { + i.mu.RLock() + defer i.mu.RUnlock() + + _, found := i.cache.GetIfPresent(path) + + return found +} + +// Delete removes metadata for a path. +func (i *Index) Delete(path string) { + i.mu.Lock() + defer i.mu.Unlock() + + i.cache.Invalidate(path) +} + +// Size returns the number of cached entries. +func (i *Index) Size() int { + i.mu.RLock() + defer i.mu.RUnlock() + + return i.cache.EstimatedSize() +} + +// Clear removes all cached entries. +func (i *Index) Clear() { + i.mu.Lock() + defer i.mu.Unlock() + + i.cache.InvalidateAll() +} + +// Save persists the cache to disk using gob encoding. +func (i *Index) Save() error { + i.mu.RLock() + defer i.mu.RUnlock() + + snapshot := &cacheSnapshot{ + Version: i.version, + Data: make(map[string]*Metadata), + } + + maps.Insert(snapshot.Data, i.cache.All()) + + // NOTE(dwisiswant0): write to temp for atomic op. + tmpFile := i.cacheFile + ".tmp" + file, err := os.Create(tmpFile) + if err != nil { + return err + } + + encoder := gob.NewEncoder(file) + if err := encoder.Encode(snapshot); err != nil { + _ = file.Close() + _ = os.Remove(tmpFile) + + return err + } + + if err := file.Close(); err != nil { + _ = os.Remove(tmpFile) + + return err + } + + if err := os.Rename(tmpFile, i.cacheFile); err != nil { + _ = os.Remove(tmpFile) + + return err + } + + return nil +} + +// Load loads the cache from disk using gob decoding. +func (i *Index) Load() error { + file, err := os.Open(i.cacheFile) + if err != nil { + if os.IsNotExist(err) { + return nil + } + + return err + } + defer func() { _ = file.Close() }() + + var snapshot cacheSnapshot + + decoder := gob.NewDecoder(file) + if err := decoder.Decode(&snapshot); err != nil { + _ = file.Close() + _ = os.Remove(i.cacheFile) + + return nil + } + + if snapshot.Version != i.version { + _ = file.Close() + _ = os.Remove(i.cacheFile) + + return nil + } + + i.mu.Lock() + defer i.mu.Unlock() + + for key, value := range snapshot.Data { + i.cache.Set(key, value) + } + + return nil +} + +// Filter returns all template paths that match the given filter criteria. +func (i *Index) Filter(filter *Filter) []string { + if filter == nil || filter.IsEmpty() { + return i.All() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + var matched []string + for path, metadata := range i.cache.All() { + if filter.Matches(metadata) { + matched = append(matched, path) + } + } + + return matched +} + +// FilterFunc returns all template paths that match the given filter function. +func (i *Index) FilterFunc(fn FilterFunc) []string { + if fn == nil { + return i.All() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + var matched []string + for path, metadata := range i.cache.All() { + if fn(metadata) { + matched = append(matched, path) + } + } + + return matched +} + +// All returns all template paths in the index. +func (i *Index) All() []string { + i.mu.RLock() + defer i.mu.RUnlock() + + paths := make([]string, 0, i.cache.EstimatedSize()) + for path := range i.cache.All() { + paths = append(paths, path) + } + + return paths +} + +// GetAll returns all metadata entries in the index. +func (i *Index) GetAll() map[string]*Metadata { + i.mu.RLock() + defer i.mu.RUnlock() + + result := maps.Collect(i.cache.All()) + + return result +} + +// Count returns the number of templates matching the filter. +func (i *Index) Count(filter *Filter) int { + if filter == nil || filter.IsEmpty() { + return i.Size() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + count := 0 + for _, metadata := range i.cache.All() { + if filter.Matches(metadata) { + count++ + } + } + + return count +} diff --git a/pkg/catalog/index/index_test.go b/pkg/catalog/index/index_test.go new file mode 100644 index 0000000000..de76dd7592 --- /dev/null +++ b/pkg/catalog/index/index_test.go @@ -0,0 +1,737 @@ +package index + +import ( + "fmt" + "os" + "path/filepath" + "testing" + "time" + + "github.com/projectdiscovery/nuclei/v3/pkg/model" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/code" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/http" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" + "github.com/stretchr/testify/require" +) + +func TestNewIndex(t *testing.T) { + t.Run("with custom directory", func(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err, "Failed to create cache with custom directory") + require.NotNil(t, cache, "Cache should not be nil") + require.Equal(t, filepath.Join(tmpDir, IndexFileName), cache.cacheFile) + require.Equal(t, IndexVersion, cache.version) + }) + + t.Run("with default directory", func(t *testing.T) { + cache, err := NewDefaultIndex() + require.NoError(t, err, "Failed to create cache with default directory") + require.NotNil(t, cache, "Cache should not be nil") + }) +} + +func TestCacheBasicOperations(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "concurrent-test", + FilePath: "/tmp/concurrent.yaml", + } + + t.Run("Set and Has", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + require.Equal(t, 1, cache.Size(), "Cache size should be 1 after Set") + require.True(t, cache.Has(metadata.FilePath), "Cache should contain the path after Set") + require.False(t, cache.Has("/nonexistent"), "Cache should not contain nonexistent path") + }) + + t.Run("Get with validation", func(t *testing.T) { + // Get should fail validation for nonexistent file + retrieved, found := cache.Get(metadata.FilePath) + require.False(t, found, "Get should fail validation for nonexistent file") + require.Nil(t, retrieved, "Retrieved metadata should be nil for invalid entry") + }) + + t.Run("Delete", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + require.True(t, cache.Has(metadata.FilePath), "Cache should contain path before Delete") + + cache.Delete(metadata.FilePath) + require.False(t, cache.Has(metadata.FilePath), "Cache should not contain path after Delete") + }) + + t.Run("Clear", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + cache.Set("/tmp/test2.yaml", &Metadata{ID: "test2", FilePath: "/tmp/test2.yaml"}) + require.True(t, cache.Size() > 0, "Cache should have entries before Clear") + + cache.Clear() + require.Equal(t, 0, cache.Size(), "Cache should be empty after Clear") + }) +} + +func TestCachePersistence(t *testing.T) { + tmpDir := t.TempDir() + + metadata1 := &Metadata{ + ID: "persist-test-1", + FilePath: "/tmp/persist1.yaml", + Name: "Persistence Test 1", + Authors: []string{"tester"}, + Tags: []string{"test"}, + Severity: "medium", + ProtocolType: "dns", + } + + metadata2 := &Metadata{ + ID: "persist-test-2", + FilePath: "/tmp/persist2.yaml", + Name: "Persistence Test 2", + Authors: []string{"tester2"}, + Tags: []string{"cve"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("Save and Load", func(t *testing.T) { + // Create cache and add entries + cache1, err := NewIndex(tmpDir) + require.NoError(t, err) + + cache1.Set(metadata1.FilePath, metadata1) + cache1.Set(metadata2.FilePath, metadata2) + require.Equal(t, 2, cache1.Size()) + + // Save to disk + err = cache1.Save() + require.NoError(t, err, "Failed to save cache") + + // Verify cache file exists + cacheFile := filepath.Join(tmpDir, IndexFileName) + stat, err := os.Stat(cacheFile) + require.NoError(t, err, "Cache file should exist") + require.Greater(t, stat.Size(), int64(0), "Cache file should not be empty") + + // Create new cache and load + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + require.Equal(t, 0, cache2.Size(), "New cache should be empty before Load") + + err = cache2.Load() + require.NoError(t, err, "Failed to load cache") + + // Verify data was loaded + require.Equal(t, 2, cache2.Size(), "Loaded cache should have 2 entries") + require.True(t, cache2.Has(metadata1.FilePath), "Loaded cache should contain first entry") + require.True(t, cache2.Has(metadata2.FilePath), "Loaded cache should contain second entry") + }) + + t.Run("Load non-existent cache", func(t *testing.T) { + emptyDir := t.TempDir() + cache, err := NewIndex(emptyDir) + require.NoError(t, err) + + // Loading non-existent cache should not error + err = cache.Load() + require.NoError(t, err, "Loading non-existent cache should not error") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading non-existent file") + }) + + t.Run("Atomic save", func(t *testing.T) { + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + cache.Set(metadata1.FilePath, metadata1) + err = cache.Save() + require.NoError(t, err) + + // Verify no .tmp file left behind + tmpFile := filepath.Join(tmpDir, IndexFileName+".tmp") + _, err = os.Stat(tmpFile) + require.True(t, os.IsNotExist(err), "Temporary file should not exist after save") + + // Verify actual cache file exists + cacheFile := filepath.Join(tmpDir, IndexFileName) + _, err = os.Stat(cacheFile) + require.NoError(t, err, "Cache file should exist") + }) +} + +func TestIndexVersionMismatch(t *testing.T) { + tmpDir := t.TempDir() + + // Create cache with current version + cache1, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "version-test", + FilePath: "/tmp/version.yaml", + } + cache1.Set(metadata.FilePath, metadata) + + // Save with current version + err = cache1.Save() + require.NoError(t, err) + + // Manually modify version and save again + cache1.version = 999 + err = cache1.Save() + require.NoError(t, err) + + // Try to load with different version + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Load should succeed but cache should be empty (version mismatch) + err = cache2.Load() + require.NoError(t, err, "Load should not error on version mismatch") + require.Equal(t, 0, cache2.Size(), "Cache should be empty after version mismatch") +} + +func TestCacheCorruptedFile(t *testing.T) { + tmpDir := t.TempDir() + cacheFile := filepath.Join(tmpDir, IndexFileName) + + // Create corrupted cache file + err := os.WriteFile(cacheFile, []byte("corrupted data that is not valid gob"), 0644) + require.NoError(t, err) + + // Try to load corrupted cache + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + err = cache.Load() + require.NoError(t, err, "Load should not error on corrupted cache") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading corrupted file") + + // Corrupted file should be removed + _, err = os.Stat(cacheFile) + require.True(t, os.IsNotExist(err), "Corrupted cache file should be removed") +} + +func TestMetadataValidation(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "test.yaml") + + t.Run("Valid metadata", func(t *testing.T) { + // Create a test file + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + // Create metadata with correct checksum + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Should be valid + require.True(t, metadata.IsValid(), "Metadata should be valid for unchanged file") + }) + + t.Run("Invalid metadata after file modification", func(t *testing.T) { + // Create the test file first to ensure it exists in this subtest + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + // Set file ModTime to past to ensure modification is detectable + oldTime := time.Now().Add(-2 * time.Second) + err = os.Chtimes(tmpFile, oldTime, oldTime) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Modify file + err = os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Modified"), 0644) + require.NoError(t, err) + + // Should now be invalid + require.False(t, metadata.IsValid(), "Metadata should be invalid after file modification") + }) + + t.Run("Invalid metadata for deleted file", func(t *testing.T) { + // Create the test file first to ensure it exists in this subtest + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Delete file + err = os.Remove(tmpFile) + require.NoError(t, err) + + // Should be invalid + require.False(t, metadata.IsValid(), "Metadata should be invalid for deleted file") + }) +} + +func TestSetFromTemplate(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "extract.yaml") + + // Create a test file + err := os.WriteFile(tmpFile, []byte("id: extract-test"), 0644) + require.NoError(t, err) + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + t.Run("Basic metadata extraction", func(t *testing.T) { + template := &templates.Template{ + ID: "extract-test", + Info: model.Info{ + Name: "Extract Test Template", + Authors: stringslice.StringSlice{Value: "author1,author2"}, + Tags: stringslice.StringSlice{Value: "tag1,tag2"}, + Description: "Test description", + SeverityHolder: severity.Holder{ + Severity: severity.High, + }, + }, + SelfContained: true, + Verified: true, + TemplateVerifier: "test-verifier", + } + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok, "Failed to set metadata from template") + require.NotNil(t, metadata, "Metadata should not be nil") + + // Verify core fields + require.Equal(t, "extract-test", metadata.ID) + require.Equal(t, tmpFile, metadata.FilePath) + + // Verify Info fields + require.Equal(t, "Extract Test Template", metadata.Name) + require.Equal(t, []string{"author1,author2"}, metadata.Authors) + require.Equal(t, []string{"tag1,tag2"}, metadata.Tags) + require.Equal(t, "high", metadata.Severity) + + // Verify flags + require.True(t, metadata.Verified) + require.Equal(t, "test-verifier", metadata.TemplateVerifier) + }) + + t.Run("HTTP protocol detection", func(t *testing.T) { + // Create a separate test file for this test + httpFile := filepath.Join(tmpDir, "http-test.yaml") + err := os.WriteFile(httpFile, []byte("id: http-test"), 0644) + require.NoError(t, err) + + template := &templates.Template{ + ID: "http-test", + Info: model.Info{ + Name: "HTTP Test", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Medium, + }, + }, + RequestsHTTP: []*http.Request{{Method: http.HTTPMethodTypeHolder{MethodType: http.HTTPGet}}}, + } + + metadata, ok := cache.SetFromTemplate(httpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) + require.Equal(t, "http", metadata.ProtocolType) + }) + + t.Run("Extract with missing file", func(t *testing.T) { + template := &templates.Template{ + ID: "missing-test", + Info: model.Info{ + Name: "Missing File Test", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Low, + }, + }, + } + + metadata, ok := cache.SetFromTemplate("/nonexistent/file.yaml", template) + require.False(t, ok, "Should return false for nonexistent file") + require.NotNil(t, metadata, "Metadata should still be returned") + }) +} + +func TestMetadataMatchingHelpers(t *testing.T) { + metadata := &Metadata{ + Tags: []string{"cve", "rce", "apache"}, + Authors: []string{"pdteam", "geeknik"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("HasTag", func(t *testing.T) { + require.True(t, metadata.HasTag("cve")) + require.True(t, metadata.HasTag("rce")) + require.True(t, metadata.HasTag("apache")) + require.False(t, metadata.HasTag("xxe")) + require.False(t, metadata.HasTag("")) + }) + + t.Run("HasAuthor", func(t *testing.T) { + require.True(t, metadata.HasAuthor("pdteam")) + require.True(t, metadata.HasAuthor("geeknik")) + require.False(t, metadata.HasAuthor("unknown")) + require.False(t, metadata.HasAuthor("")) + }) + + t.Run("MatchesSeverity", func(t *testing.T) { + require.True(t, metadata.MatchesSeverity(severity.Critical)) + require.False(t, metadata.MatchesSeverity(severity.High)) + require.False(t, metadata.MatchesSeverity(severity.Medium)) + require.False(t, metadata.MatchesSeverity(severity.Low)) + require.False(t, metadata.MatchesSeverity(severity.Info)) + }) + + t.Run("MatchesProtocol", func(t *testing.T) { + require.True(t, metadata.MatchesProtocol(types.HTTPProtocol)) + require.False(t, metadata.MatchesProtocol(types.DNSProtocol)) + require.False(t, metadata.MatchesProtocol(types.FileProtocol)) + require.False(t, metadata.MatchesProtocol(types.NetworkProtocol)) + }) + + t.Run("Empty metadata", func(t *testing.T) { + emptyMetadata := &Metadata{} + require.False(t, emptyMetadata.HasTag("any")) + require.False(t, emptyMetadata.HasAuthor("any")) + }) +} + +func TestCacheConcurrency(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Test concurrent writes + t.Run("Concurrent Set", func(t *testing.T) { + done := make(chan bool) + for i := 0; i < 10; i++ { + go func(id int) { + metadata := &Metadata{ + ID: string(rune('a' + id)), + FilePath: filepath.Join("/tmp", string(rune('a'+id))+".yaml"), + } + cache.Set(metadata.FilePath, metadata) + done <- true + }(i) + } + + // Wait for all goroutines + for i := 0; i < 10; i++ { + <-done + } + + require.Equal(t, 10, cache.Size(), "All concurrent writes should succeed") + }) + + // Test concurrent reads + t.Run("Concurrent Has", func(t *testing.T) { + metadata := &Metadata{ + ID: "concurrent-test", + FilePath: "/tmp/concurrent.yaml", + } + cache.Set(metadata.FilePath, metadata) + + done := make(chan bool) + for i := 0; i < 20; i++ { + go func() { + _ = cache.Has(metadata.FilePath) + done <- true + }() + } + + // Wait for all goroutines + for i := 0; i < 20; i++ { + <-done + } + }) +} + +func TestCacheSize(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + require.Equal(t, 0, cache.Size(), "New cache should have size 0") + + // Add entries + for i := 0; i < 5; i++ { + metadata := &Metadata{ + ID: string(rune('a' + i)), + FilePath: filepath.Join("/tmp", string(rune('a'+i))+".yaml"), + } + cache.Set(metadata.FilePath, metadata) + } + + require.Equal(t, 5, cache.Size(), "Cache should have size 5 after adding 5 entries") + + // Delete entries + cache.Delete(filepath.Join("/tmp", "a.yaml")) + cache.Delete(filepath.Join("/tmp", "b.yaml")) + + require.Equal(t, 3, cache.Size(), "Cache should have size 3 after deleting 2 entries") + + // Clear cache + cache.Clear() + require.Equal(t, 0, cache.Size(), "Cache should have size 0 after Clear") +} + +func TestCacheGetWithValidFile(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Create a real file for testing validation + tmpFile := filepath.Join(tmpDir, "test.yaml") + err = os.WriteFile(tmpFile, []byte("id: test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + Name: "Test Template", + } + + // Set and get should work with valid file + cache.Set(metadata.FilePath, metadata) + retrieved, found := cache.Get(metadata.FilePath) + require.True(t, found, "Should find entry with valid file") + require.NotNil(t, retrieved, "Retrieved metadata should not be nil") + require.Equal(t, metadata.ID, retrieved.ID) +} + +func TestCacheSaveErrorHandling(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: filepath.Join("/tmp", "test.yaml"), + } + cache.Set(metadata.FilePath, metadata) + + // Create a directory where the temp file would be created to force an error + // The Save method creates a file at cacheFile + ".tmp" + conflictPath := filepath.Join(tmpDir, IndexFileName+".tmp") + err = os.Mkdir(conflictPath, 0755) + require.NoError(t, err) + + err = cache.Save() + require.Error(t, err, "Save should fail when temp file cannot be created") +} + +func TestNewCacheWithInvalidDirectory(t *testing.T) { + // Try to create cache in a file path (should fail) + tmpFile := filepath.Join(t.TempDir(), "file.txt") + err := os.WriteFile(tmpFile, []byte("test"), 0644) + require.NoError(t, err) + + cache, err := NewIndex(tmpFile) + require.Error(t, err, "NewCache should fail when path is a file") + require.Nil(t, cache, "Cache should be nil on error") +} + +func TestCacheLoadCorruptedRemoval(t *testing.T) { + tmpDir := t.TempDir() + cacheFile := filepath.Join(tmpDir, IndexFileName) + + // Create corrupted cache file with invalid gob data + err := os.WriteFile(cacheFile, []byte("this is not valid gob encoding at all!"), 0644) + require.NoError(t, err) + + // Verify file exists before Load + _, err = os.Stat(cacheFile) + require.NoError(t, err, "Corrupted file should exist") + + // Load should not error but should remove corrupted file + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + err = cache.Load() + require.NoError(t, err, "Load should not return error for corrupted file") + + // Verify corrupted file was removed + _, err = os.Stat(cacheFile) + require.True(t, os.IsNotExist(err), "Corrupted file should be removed") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading corrupted file") +} + +func TestMetadataExtractionWithNilClassification(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "test.yaml") + err := os.WriteFile(tmpFile, []byte("id: test"), 0644) + require.NoError(t, err) + + template := &templates.Template{ + ID: "nil-classification", + Info: model.Info{ + Name: "Template without classification", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Medium, + }, + Classification: nil, // Explicitly nil + }, + } + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) +} + +func TestCachePersistenceWithLargeDataset(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Add 100 entries to test bulk operations + for i := 0; i < 100; i++ { + metadata := &Metadata{ + ID: fmt.Sprintf("template-%d", i), + FilePath: filepath.Join("/tmp", fmt.Sprintf("template-%d.yaml", i)), + Name: fmt.Sprintf("Template %d", i), + Authors: []string{fmt.Sprintf("author%d", i)}, + Tags: []string{"tag1", "tag2", "tag3"}, + Severity: "high", + } + cache.Set(metadata.FilePath, metadata) + } + + require.Equal(t, 100, cache.Size(), "Cache should contain 100 entries") + + // Save to disk + err = cache.Save() + require.NoError(t, err) + + // Load into new cache + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + err = cache2.Load() + require.NoError(t, err) + + require.Equal(t, 100, cache2.Size(), "Loaded cache should contain 100 entries") + + // Verify a sample entry + found := cache2.Has(filepath.Join("/tmp", "template-50.yaml")) + require.True(t, found, "Should find sample entry") +} + +func TestMetadataHelperMethods(t *testing.T) { + metadata := &Metadata{ + ID: "helper-test", + Tags: []string{}, + Authors: []string{}, + Severity: "", + ProtocolType: "", + } + + t.Run("Empty tags", func(t *testing.T) { + require.False(t, metadata.HasTag("anytag")) + }) + + t.Run("Empty authors", func(t *testing.T) { + require.False(t, metadata.HasAuthor("anyauthor")) + }) + + t.Run("Empty severity", func(t *testing.T) { + require.False(t, metadata.MatchesSeverity(severity.Critical)) + }) + + t.Run("Empty protocol", func(t *testing.T) { + require.False(t, metadata.MatchesProtocol(types.HTTPProtocol)) + }) +} + +func TestMultipleProtocolsDetection(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "multi.yaml") + err := os.WriteFile(tmpFile, []byte("id: multi"), 0644) + require.NoError(t, err) + + // Template with multiple protocol types + template := &templates.Template{ + ID: "multi-protocol", + Info: model.Info{ + Name: "Multi Protocol Template", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.High, + }, + }, + RequestsHTTP: []*http.Request{{Method: http.HTTPMethodTypeHolder{MethodType: http.HTTPGet}}}, + RequestsHeadless: []*headless.Request{{}}, + RequestsCode: []*code.Request{{}}, + } + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) + require.Equal(t, "http", metadata.ProtocolType, "Primary protocol should be http") +} + +func TestNewMetadataFromTemplate(t *testing.T) { + tmpl := &templates.Template{ + ID: "test-template", + Info: model.Info{ + Name: "Test Template", + Authors: stringslice.StringSlice{Value: []string{"author"}}, + Tags: stringslice.StringSlice{Value: []string{"tag"}}, + SeverityHolder: severity.Holder{ + Severity: severity.Low, + }, + }, + Verified: true, + TemplateVerifier: "verifier", + } + + path := "/tmp/test.yaml" + metadata := NewMetadataFromTemplate(path, tmpl) + + require.Equal(t, tmpl.ID, metadata.ID) + require.Equal(t, path, metadata.FilePath) + require.Equal(t, tmpl.Info.Name, metadata.Name) + require.Equal(t, tmpl.Info.Authors.ToSlice(), metadata.Authors) + require.Equal(t, tmpl.Info.Tags.ToSlice(), metadata.Tags) + require.Equal(t, tmpl.Info.SeverityHolder.Severity.String(), metadata.Severity) + require.Equal(t, tmpl.Type().String(), metadata.ProtocolType) + require.Equal(t, tmpl.Verified, metadata.Verified) + require.Equal(t, tmpl.TemplateVerifier, metadata.TemplateVerifier) +} diff --git a/pkg/catalog/index/metadata.go b/pkg/catalog/index/metadata.go new file mode 100644 index 0000000000..013ab439c1 --- /dev/null +++ b/pkg/catalog/index/metadata.go @@ -0,0 +1,104 @@ +package index + +import ( + "os" + "slices" + "time" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" +) + +// Metadata contains lightweight metadata extracted from a template. +type Metadata struct { + // ID is the unique identifier of the template. + ID string `gob:"id"` + + // FilePath is the path to the template file. + FilePath string `gob:"file_path"` + + // ModTime is the modification time of the template file. + ModTime time.Time `gob:"mod_time"` + + // Name is the name of the template. + Name string `gob:"name"` + + // Authors are the authors of the template. + Authors []string `gob:"authors"` + + // Tags are the tags associated with the template. + Tags []string `gob:"tags"` + + // Severity is the severity level of the template. + Severity string `gob:"severity"` + + // ProtocolType is the primary protocol type of the template. + ProtocolType string `gob:"protocol_type"` + + // Verified indicates whether the template is verified. + Verified bool `gob:"verified"` + + // TemplateVerifier is the verifier used for the template. + TemplateVerifier string `gob:"verifier,omitempty"` + + // NOTE(dwisiswant0): Consider adding more fields here in the future to + // enhance filtering caps w/o loading full templates, such as: + // `has_{code,headless,file}` to indicate presence of protocol-based + // requests, and/or classification fields (CVE, CWE, CVSS, EPSS), if needed. + // + // For maintainers: when adding new fields, don't forget to update the + // Weigher logic in [NewIndex] to account for the new fields in cache weight + // calculation, because it affects cache eviction behavior. Also, consider + // the impact on existing cached data and whether a [IndexVersion] bump is + // needed. +} + +// NewMetadataFromTemplate creates a new metadata object from a template. +func NewMetadataFromTemplate(path string, tpl *templates.Template) *Metadata { + return &Metadata{ + ID: tpl.ID, + FilePath: path, + + Name: tpl.Info.Name, + Authors: tpl.Info.Authors.ToSlice(), + Tags: tpl.Info.Tags.ToSlice(), + Severity: tpl.Info.SeverityHolder.Severity.String(), + + ProtocolType: tpl.Type().String(), + + Verified: tpl.Verified, + TemplateVerifier: tpl.TemplateVerifier, + } +} + +// IsValid checks if the cached metadata is still valid by comparing the file +// modification time. +func (m *Metadata) IsValid() bool { + info, err := os.Stat(m.FilePath) + if err != nil { + return false + } + + return m.ModTime.Equal(info.ModTime()) +} + +// MatchesSeverity checks if the metadata matches the given severity. +func (m *Metadata) MatchesSeverity(sev severity.Severity) bool { + return m.Severity == sev.String() +} + +// MatchesProtocol checks if the metadata matches the given protocol type. +func (m *Metadata) MatchesProtocol(protocolType types.ProtocolType) bool { + return m.ProtocolType == protocolType.String() +} + +// HasTag checks if the metadata contains the given tag. +func (m *Metadata) HasTag(tag string) bool { + return slices.Contains(m.Tags, tag) +} + +// HasAuthor checks if the metadata contains the given author. +func (m *Metadata) HasAuthor(author string) bool { + return slices.Contains(m.Authors, author) +} diff --git a/pkg/catalog/loader/loader.go b/pkg/catalog/loader/loader.go index a68153d875..00bc970b9b 100644 --- a/pkg/catalog/loader/loader.go +++ b/pkg/catalog/loader/loader.go @@ -14,7 +14,7 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" - "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter" + "github.com/projectdiscovery/nuclei/v3/pkg/catalog/index" "github.com/projectdiscovery/nuclei/v3/pkg/keys" "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" "github.com/projectdiscovery/nuclei/v3/pkg/protocols" @@ -77,7 +77,6 @@ type Config struct { type Store struct { id string // id of the store (optional) tagFilter *templates.TagFilter - pathFilter *filter.PathFilter config *Config finalTemplates []string finalWorkflows []string @@ -92,6 +91,16 @@ type Store struct { // parserCacheOnce is used to cache the parser cache result parserCacheOnce func() *templates.Cache + // metadataIndex is the template metadata cache + metadataIndex *index.Index + + // indexFilter is the cached filter for metadata matching + indexFilter *index.Filter + + // saveTemplatesIndexOnce is used to ensure we only save the metadata index + // once + saveMetadataIndexOnce func() + // NotFoundCallback is called for each not found template // This overrides error handling for not found templates NotFoundCallback func(template string) bool @@ -129,17 +138,10 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts *pr // New creates a new template store based on provided configuration func New(cfg *Config) (*Store, error) { + // tagFilter only for IncludeConditions (advanced filtering). + // All other filtering (tags, authors, severities, IDs, protocols, paths) is + // handled by [index.Filter]. tagFilter, err := templates.NewTagFilter(&templates.TagFilterConfig{ - Tags: cfg.Tags, - ExcludeTags: cfg.ExcludeTags, - Authors: cfg.Authors, - Severities: cfg.Severities, - ExcludeSeverities: cfg.ExcludeSeverities, - IncludeTags: cfg.IncludeTags, - IncludeIds: cfg.IncludeIds, - ExcludeIds: cfg.ExcludeIds, - Protocols: cfg.Protocols, - ExcludeProtocols: cfg.ExcludeProtocols, IncludeConditions: cfg.IncludeConditions, }) if err != nil { @@ -147,13 +149,9 @@ func New(cfg *Config) (*Store, error) { } store := &Store{ - id: cfg.StoreId, - config: cfg, - tagFilter: tagFilter, - pathFilter: filter.NewPathFilter(&filter.PathFilterConfig{ - IncludedTemplates: cfg.IncludeTemplates, - ExcludedTemplates: cfg.ExcludeTemplates, - }, cfg.Catalog), + id: cfg.StoreId, + config: cfg, + tagFilter: tagFilter, finalTemplates: cfg.Templates, finalWorkflows: cfg.Workflows, logger: cfg.Logger, @@ -171,6 +169,21 @@ func New(cfg *Config) (*Store, error) { return nil }) + // Initialize metadata index and filter (load from disk & cache for reuse) + store.metadataIndex = store.loadTemplatesIndex() + store.indexFilter = store.buildIndexFilter() + store.saveMetadataIndexOnce = sync.OnceFunc(func() { + if store.metadataIndex == nil { + return + } + + if err := store.metadataIndex.Save(); err != nil { + store.logger.Warning().Msgf("Could not save metadata cache: %v", err) + } else { + store.logger.Verbose().Msgf("Saved %d templates to metadata cache", store.metadataIndex.Size()) + } + }) + // Do a check to see if we have URLs in templates flag, if so // we need to processs them separately and remove them from the initial list var templatesFinal []string @@ -302,17 +315,102 @@ func init() { templateIDPathMap = make(map[string]string) } +// buildIndexFilter creates an [index.Filter] from the store configuration. +// This filter handles all basic filtering (paths, tags, authors, severities, +// IDs, protocols). Advanced IncludeConditions filtering is handled separately +// by tagFilter. +func (store *Store) buildIndexFilter() *index.Filter { + includeTemplates, _ := store.config.Catalog.GetTemplatesPath(store.config.IncludeTemplates) + excludeTemplates, _ := store.config.Catalog.GetTemplatesPath(store.config.ExcludeTemplates) + + return &index.Filter{ + Authors: store.config.Authors, + Tags: store.config.Tags, + ExcludeTags: store.config.ExcludeTags, + IncludeTags: store.config.IncludeTags, + IDs: store.config.IncludeIds, + ExcludeIDs: store.config.ExcludeIds, + IncludeTemplates: includeTemplates, + ExcludeTemplates: excludeTemplates, + Severities: []severity.Severity(store.config.Severities), + ExcludeSeverities: []severity.Severity(store.config.ExcludeSeverities), + ProtocolTypes: []templateTypes.ProtocolType(store.config.Protocols), + ExcludeProtocolTypes: []templateTypes.ProtocolType(store.config.ExcludeProtocols), + } +} + +func (store *Store) loadTemplatesIndex() *index.Index { + var metadataIdx *index.Index + + idx, err := index.NewDefaultIndex() + if err != nil { + store.logger.Warning().Msgf("Could not create metadata cache: %v", err) + } else { + metadataIdx = idx + if err := metadataIdx.Load(); err != nil { + store.logger.Warning().Msgf("Could not load metadata cache: %v", err) + } + } + + return metadataIdx +} + // LoadTemplatesOnlyMetadata loads only the metadata of the templates func (store *Store) LoadTemplatesOnlyMetadata() error { + defer store.saveMetadataIndexOnce() + templatePaths, errs := store.config.Catalog.GetTemplatesPath(store.finalTemplates) store.logErroredTemplates(errs) - filteredTemplatePaths := store.pathFilter.Match(templatePaths) - + indexFilter := store.indexFilter validPaths := make(map[string]struct{}) - for templatePath := range filteredTemplatePaths { + + for _, templatePath := range templatePaths { + if store.metadataIndex != nil { + if metadata, found := store.metadataIndex.Get(templatePath); found { + if !indexFilter.Matches(metadata) { + continue + } + + if store.tagFilter != nil { + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog) + if !loaded { + if err != nil && strings.Contains(err.Error(), templates.ErrExcluded.Error()) { + stats.Increment(templates.TemplatesExcludedStats) + if config.DefaultConfig.LogAllEvents { + store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error()) + } + } + continue + } + } + + validPaths[templatePath] = struct{}{} + continue + } + } + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog) - if loaded || store.pathFilter.MatchIncluded(templatePath) { + if loaded { + templatesCache := store.parserCacheOnce() + if templatesCache != nil { + if template, _, _ := templatesCache.Has(templatePath); template != nil { + var metadata *index.Metadata + if store.metadataIndex != nil { + metadata, _ = store.metadataIndex.SetFromTemplate(templatePath, template) + } else { + metadata = index.NewMetadataFromTemplate(templatePath, template) + } + + if !indexFilter.Matches(metadata) { + continue + } + + validPaths[templatePath] = struct{}{} + continue + } + } + validPaths[templatePath] = struct{}{} } if err != nil { @@ -376,15 +474,24 @@ func (store *Store) LoadTemplatesOnlyMetadata() error { func (store *Store) ValidateTemplates() error { templatePaths, errs := store.config.Catalog.GetTemplatesPath(store.finalTemplates) store.logErroredTemplates(errs) + workflowPaths, errs := store.config.Catalog.GetTemplatesPath(store.finalWorkflows) store.logErroredTemplates(errs) - filteredTemplatePaths := store.pathFilter.Match(templatePaths) - filteredWorkflowPaths := store.pathFilter.Match(workflowPaths) + templatePathsMap := make(map[string]struct{}, len(templatePaths)) + for _, path := range templatePaths { + templatePathsMap[path] = struct{}{} + } + + workflowPathsMap := make(map[string]struct{}, len(workflowPaths)) + for _, path := range workflowPaths { + workflowPathsMap[path] = struct{}{} + } - if store.areTemplatesValid(filteredTemplatePaths) && store.areWorkflowsValid(filteredWorkflowPaths) { + if store.areTemplatesValid(templatePathsMap) && store.areWorkflowsValid(workflowPathsMap) { return nil } + return errors.New("errors occurred during template validation") } @@ -503,10 +610,9 @@ func (store *Store) LoadTemplates(templatesList []string) []*templates.Template func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template { includedWorkflows, errs := store.config.Catalog.GetTemplatesPath(workflowsList) store.logErroredTemplates(errs) - workflowPathMap := store.pathFilter.Match(includedWorkflows) - loadedWorkflows := make([]*templates.Template, 0, len(workflowPathMap)) - for workflowPath := range workflowPathMap { + loadedWorkflows := make([]*templates.Template, 0, len(includedWorkflows)) + for _, workflowPath := range includedWorkflows { loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog) if err != nil { store.logger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err) @@ -526,9 +632,12 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template // LoadTemplatesWithTags takes a list of templates and extra tags // returning templates that match. func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templates.Template { + defer store.saveMetadataIndexOnce() + + indexFilter := store.indexFilter + includedTemplates, errs := store.config.Catalog.GetTemplatesPath(templatesList) store.logErroredTemplates(errs) - templatePathMap := store.pathFilter.Match(includedTemplates) loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]() loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]() @@ -572,14 +681,46 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ panic("dialers with executionId " + store.config.ExecutorOptions.Options.ExecutionId + " not found") } - for templatePath := range templatePathMap { + for _, templatePath := range includedTemplates { wgLoadTemplates.Add() go func(templatePath string) { defer wgLoadTemplates.Done() + var ( + metadata *index.Metadata + metadataCached bool + ) + + if store.metadataIndex != nil { + if cachedMetadata, found := store.metadataIndex.Get(templatePath); found { + metadata = cachedMetadata + if !indexFilter.Matches(metadata) { + return + } + // NOTE(dwisiswant0): else, tagFilter probably exists (for + // IncludeConditions), which still need to check via + // LoadTemplate. + + metadataCached = true + } + } + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, tags, store.config.Catalog) - if loaded || store.pathFilter.MatchIncluded(templatePath) { + if loaded { parsed, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions) + + if parsed != nil && !metadataCached { + if store.metadataIndex != nil { + metadata, _ = store.metadataIndex.SetFromTemplate(templatePath, parsed) + } else { + metadata = index.NewMetadataFromTemplate(templatePath, parsed) + } + + if metadata != nil && !indexFilter.Matches(metadata) { + return + } + } + if err != nil { // exclude templates not compatible with offline matching from total runtime warning stats if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) { diff --git a/pkg/catalog/loader/loader_bench_test.go b/pkg/catalog/loader/loader_bench_test.go index 079e928ad5..32ed506e8b 100644 --- a/pkg/catalog/loader/loader_bench_test.go +++ b/pkg/catalog/loader/loader_bench_test.go @@ -8,7 +8,9 @@ import ( "github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" "github.com/projectdiscovery/nuclei/v3/pkg/templates" + templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" "github.com/projectdiscovery/nuclei/v3/pkg/testutils" ) @@ -41,3 +43,201 @@ func BenchmarkStoreValidateTemplates(b *testing.B) { _ = store.ValidateTemplates() } } + +func BenchmarkLoadTemplates(b *testing.B) { + options := testutils.DefaultOptions.Copy() + options.Logger = &gologger.Logger{} + options.ExecutionId = "bench-load-templates" + testutils.Init(options) + + catalog := disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) + executerOpts := testutils.NewMockExecuterOptions(options, nil) + executerOpts.Parser = templates.NewParser() + + workflowLoader, err := workflow.NewLoader(executerOpts) + if err != nil { + b.Fatalf("could not create workflow loader: %s", err) + } + executerOpts.WorkflowLoader = workflowLoader + + b.Run("NoFilter", func(b *testing.B) { + loaderCfg := loader.NewConfig(options, catalog, executerOpts) + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterBySeverityCritical", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterBySeverityHighCritical", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.High, severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByAuthor", func(b *testing.B) { + opts := options.Copy() + opts.Authors = []string{"pdteam"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByTags", func(b *testing.B) { + opts := options.Copy() + opts.Tags = []string{"cve", "rce"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByProtocol", func(b *testing.B) { + opts := options.Copy() + opts.Protocols = templateTypes.ProtocolTypes{templateTypes.HTTPProtocol} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("ComplexFilter", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.High, severity.Critical} + opts.Authors = []string{"pdteam"} + opts.Tags = []string{"cve"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) +} + +func BenchmarkLoadTemplatesOnlyMetadata(b *testing.B) { + options := testutils.DefaultOptions.Copy() + options.Logger = &gologger.Logger{} + options.ExecutionId = "bench-metadata" + testutils.Init(options) + + catalog := disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) + executerOpts := testutils.NewMockExecuterOptions(options, nil) + executerOpts.Parser = templates.NewParser() + + workflowLoader, err := workflow.NewLoader(executerOpts) + if err != nil { + b.Fatalf("could not create workflow loader: %s", err) + } + executerOpts.WorkflowLoader = workflowLoader + + b.Run("WithoutFilter", func(b *testing.B) { + loaderCfg := loader.NewConfig(options, catalog, executerOpts) + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + // Pre-warm the cache + _ = store.LoadTemplatesOnlyMetadata() + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplatesOnlyMetadata() + } + }) + + b.Run("WithSeverityFilter", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + // Pre-warm the cache + _ = store.LoadTemplatesOnlyMetadata() + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplatesOnlyMetadata() + } + }) +} diff --git a/pkg/core/execute_options.go b/pkg/core/execute_options.go index df1fe14358..b0a2be1c86 100644 --- a/pkg/core/execute_options.go +++ b/pkg/core/execute_options.go @@ -109,7 +109,6 @@ func (e *Engine) executeTemplateSpray(ctx context.Context, templatesList []*temp defer wp.Wait() for _, template := range templatesList { - template := template select { case <-ctx.Done(): diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 55d19dd677..697312aa47 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -34,17 +34,14 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b swg, _ := syncutil.New(syncutil.WithSize(templateThreads)) for _, template := range w.Workflows { - swg.Add() - - func(template *workflows.WorkflowTemplate) { - defer swg.Done() - - if err := e.runWorkflowStep(template, ctx, results, swg, w); err != nil { - gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) - } - }(template) + newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) + if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { + gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) + } } + swg.Wait() + return results.Load() } diff --git a/pkg/input/formats/burp/burp.go b/pkg/input/formats/burp/burp.go index 9b2a362dfe..459c6d8a54 100644 --- a/pkg/input/formats/burp/burp.go +++ b/pkg/input/formats/burp/burp.go @@ -43,7 +43,6 @@ func (j *BurpFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallba // Print the parsed data for verification for _, item := range items.Items { - item := item binx, err := base64.StdEncoding.DecodeString(item.Request.Raw) if err != nil { return errors.Wrap(err, "could not decode base64") diff --git a/pkg/input/formats/formats.go b/pkg/input/formats/formats.go index c7798286a3..9de4d0d013 100644 --- a/pkg/input/formats/formats.go +++ b/pkg/input/formats/formats.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/projectdiscovery/nuclei/v3/pkg/input/types" + "github.com/projectdiscovery/retryablehttp-go" fileutil "github.com/projectdiscovery/utils/file" "gopkg.in/yaml.v3" ) @@ -47,6 +48,16 @@ type Format interface { SetOptions(options InputFormatOptions) } +// SpecDownloader is an interface for downloading API specifications from URLs +type SpecDownloader interface { + // Download downloads the spec from the given URL and saves it to tmpDir + // Returns the path to the downloaded file + // httpClient is a retryablehttp.Client instance (can be nil for fallback) + Download(url, tmpDir string, httpClient *retryablehttp.Client) (string, error) + // SupportedExtensions returns the list of supported file extensions + SupportedExtensions() []string +} + var ( DefaultVarDumpFileName = "required_openapi_params.yaml" ErrNoVarsDumpFile = errors.New("no required params file found") diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go new file mode 100644 index 0000000000..955fdc50c6 --- /dev/null +++ b/pkg/input/formats/openapi/downloader.go @@ -0,0 +1,136 @@ +package openapi + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/retryablehttp-go" +) + +// OpenAPIDownloader implements the SpecDownloader interface for OpenAPI 3.0 specs +type OpenAPIDownloader struct{} + +// NewDownloader creates a new OpenAPI downloader +func NewDownloader() formats.SpecDownloader { + return &OpenAPIDownloader{} +} + +// This function downloads an OpenAPI 3.0 spec from the given URL and saves it to tmpDir +func (d *OpenAPIDownloader) Download(urlStr, tmpDir string, httpClient *retryablehttp.Client) (string, error) { + // Validate URL format, OpenAPI 3.0 specs are typically JSON + if !strings.HasSuffix(urlStr, ".json") { + return "", fmt.Errorf("URL does not appear to be an OpenAPI JSON spec") + } + + const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB + + // Use provided httpClient or create a fallback + var client *http.Client + if httpClient != nil { + client = httpClient.HTTPClient + } else { + // Fallback to simple client if no httpClient provided + client = &http.Client{Timeout: 30 * time.Second} + } + + resp, err := client.Get(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to download OpenAPI spec") + } + + defer func() { + _ = resp.Body.Close() + }() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("HTTP %d when downloading OpenAPI spec", resp.StatusCode) + } + + bodyBytes, err := io.ReadAll(io.LimitReader(resp.Body, maxSpecSizeBytes)) + if err != nil { + return "", errors.Wrap(err, "failed to read response body") + } + + // Validate it's a valid JSON and has OpenAPI structure + var spec map[string]interface{} + if err := json.Unmarshal(bodyBytes, &spec); err != nil { + return "", fmt.Errorf("downloaded content is not valid JSON: %w", err) + } + + // Check if it's an OpenAPI 3.0 spec + if openapi, exists := spec["openapi"]; exists { + if openapiStr, ok := openapi.(string); ok && strings.HasPrefix(openapiStr, "3.") { + // Valid OpenAPI 3.0 spec + } else { + return "", fmt.Errorf("not a valid OpenAPI 3.0 spec (found version: %v)", openapi) + } + } else { + return "", fmt.Errorf("not an OpenAPI spec (missing 'openapi' field)") + } + + // Extract host from URL for server configuration + parsedURL, err := url.Parse(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to parse URL") + } + host := parsedURL.Host + scheme := parsedURL.Scheme + if scheme == "" { + scheme = "https" + } + + // Add servers section if missing or empty + servers, exists := spec["servers"] + if !exists || servers == nil { + spec["servers"] = []map[string]interface{}{{"url": scheme + "://" + host}} + } else if serverList, ok := servers.([]interface{}); ok && len(serverList) == 0 { + spec["servers"] = []map[string]interface{}{{"url": scheme + "://" + host}} + } + + // Marshal back to JSON + modifiedJSON, err := json.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified spec") + } + + // Create output directory + openapiDir := filepath.Join(tmpDir, "openapi") + if err := os.MkdirAll(openapiDir, 0755); err != nil { + return "", errors.Wrap(err, "failed to create openapi directory") + } + + // Generate filename + filename := fmt.Sprintf("openapi-spec-%d.json", time.Now().Unix()) + filePath := filepath.Join(openapiDir, filename) + + // Write file + file, err := os.Create(filePath) + if err != nil { + return "", fmt.Errorf("failed to create file: %w", err) + } + + defer func() { + _ = file.Close() + }() + + if _, writeErr := file.Write(modifiedJSON); writeErr != nil { + _ = os.Remove(filePath) + return "", errors.Wrap(writeErr, "failed to write OpenAPI spec to file") + } + + return filePath, nil +} + +// SupportedExtensions returns the list of supported file extensions for OpenAPI +func (d *OpenAPIDownloader) SupportedExtensions() []string { + return []string{".json"} +} diff --git a/pkg/input/formats/openapi/downloader_test.go b/pkg/input/formats/openapi/downloader_test.go new file mode 100644 index 0000000000..10ee93817a --- /dev/null +++ b/pkg/input/formats/openapi/downloader_test.go @@ -0,0 +1,278 @@ +package openapi + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" +) + +func TestOpenAPIDownloader_SupportedExtensions(t *testing.T) { + downloader := &OpenAPIDownloader{} + extensions := downloader.SupportedExtensions() + + expected := []string{".json"} + if len(extensions) != len(expected) { + t.Errorf("Expected %d extensions, got %d", len(expected), len(extensions)) + } + + for i, ext := range extensions { + if ext != expected[i] { + t.Errorf("Expected extension %s, got %s", expected[i], ext) + } + } +} + +func TestOpenAPIDownloader_Download_Success(t *testing.T) { + // Create a mock OpenAPI spec + mockSpec := map[string]interface{}{ + "openapi": "3.0.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "paths": map[string]interface{}{ + "/test": map[string]interface{}{ + "get": map[string]interface{}{ + "summary": "Test endpoint", + }, + }, + }, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + // Test download + downloader := &OpenAPIDownloader{} + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir, nil) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + // Verify servers field was added + servers, exists := downloadedSpec["servers"] + if !exists { + t.Error("Servers field was not added to the spec") + } + + if serversList, ok := servers.([]interface{}); ok { + if len(serversList) == 0 { + t.Error("Servers list is empty") + } + } else { + t.Error("Servers field is not a list") + } +} + +func TestOpenAPIDownloader_Download_NonJSONURL(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download("http://example.com/spec.yaml", tmpDir, nil) + if err == nil { + t.Error("Expected error for non-JSON URL, but got none") + } + + if !strings.Contains(err.Error(), "URL does not appear to be an OpenAPI JSON spec") { + t.Errorf("Unexpected error message: %v", err) + } +} + +func TestOpenAPIDownloader_Download_HTTPError(t *testing.T) { + // Create mock server that returns 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) + if err == nil { + t.Error("Expected error for HTTP 404, but got none") + } +} + +func TestOpenAPIDownloader_Download_InvalidJSON(t *testing.T) { + // Create mock server that returns invalid JSON + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if _, err := w.Write([]byte("invalid json")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) + if err == nil { + t.Error("Expected error for invalid JSON, but got none") + } +} + +func TestOpenAPIDownloader_Download_Timeout(t *testing.T) { + // Create mock server with delay + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(35 * time.Second) // Longer than 30 second timeout + if err := json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) + if err == nil { + t.Error("Expected timeout error, but got none") + } +} + +func TestOpenAPIDownloader_Download_WithExistingServers(t *testing.T) { + // Create a mock OpenAPI spec with existing servers + mockSpec := map[string]interface{}{ + "openapi": "3.0.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "servers": []interface{}{ + map[string]interface{}{ + "url": "https://existing-server.com", + }, + }, + "paths": map[string]interface{}{}, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &OpenAPIDownloader{} + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir, nil) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify existing servers are preserved + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + servers, exists := downloadedSpec["servers"] + if !exists { + t.Error("Servers field was removed from the spec") + } + + if serversList, ok := servers.([]interface{}); ok { + if len(serversList) != 1 { + t.Errorf("Expected 1 server, got %d", len(serversList)) + } + } +} + +// Helper function to check if file exists +func fileExists(filename string) bool { + _, err := os.Stat(filename) + return !os.IsNotExist(err) +} diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go new file mode 100644 index 0000000000..b6b5a333f8 --- /dev/null +++ b/pkg/input/formats/swagger/downloader.go @@ -0,0 +1,165 @@ +package swagger + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/retryablehttp-go" + "gopkg.in/yaml.v3" +) + +// SwaggerDownloader implements the SpecDownloader interface for Swagger 2.0 specs +type SwaggerDownloader struct{} + +// NewDownloader creates a new Swagger downloader +func NewDownloader() formats.SpecDownloader { + return &SwaggerDownloader{} +} + +// This function downloads a Swagger 2.0 spec from the given URL and saves it to tmpDir +func (d *SwaggerDownloader) Download(urlStr, tmpDir string, httpClient *retryablehttp.Client) (string, error) { + // Swagger can be JSON or YAML + supportedExts := d.SupportedExtensions() + isSupported := false + for _, ext := range supportedExts { + if strings.HasSuffix(urlStr, ext) { + isSupported = true + break + } + } + if !isSupported { + return "", fmt.Errorf("URL does not appear to be a Swagger spec (supported: %v)", supportedExts) + } + + const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB + + // Use provided httpClient or create a fallback + var client *http.Client + if httpClient != nil { + client = httpClient.HTTPClient + } else { + // Fallback to simple client if no httpClient provided + client = &http.Client{Timeout: 30 * time.Second} + } + + resp, err := client.Get(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to download Swagger spec") + } + + defer func() { + _ = resp.Body.Close() + }() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("HTTP %d when downloading Swagger spec", resp.StatusCode) + } + + bodyBytes, err := io.ReadAll(io.LimitReader(resp.Body, maxSpecSizeBytes)) + if err != nil { + return "", errors.Wrap(err, "failed to read response body") + } + + // Determine format and parse + var spec map[string]interface{} + var isYAML bool + + // Try JSON first + if err := json.Unmarshal(bodyBytes, &spec); err != nil { + // Then try YAML + if err := yaml.Unmarshal(bodyBytes, &spec); err != nil { + return "", fmt.Errorf("downloaded content is neither valid JSON nor YAML: %w", err) + } + isYAML = true + } + + // Validate it's a Swagger 2.0 spec + if swagger, exists := spec["swagger"]; exists { + if swaggerStr, ok := swagger.(string); ok && strings.HasPrefix(swaggerStr, "2.") { + // Valid Swagger 2.0 spec + } else { + return "", fmt.Errorf("not a valid Swagger 2.0 spec (found version: %v)", swagger) + } + } else { + return "", fmt.Errorf("not a Swagger spec (missing 'swagger' field)") + } + + // Extract host from URL for host configuration + parsedURL, err := url.Parse(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to parse URL") + } + + host := parsedURL.Host + scheme := parsedURL.Scheme + if scheme == "" { + scheme = "https" + } + + // Add host if missing + if _, exists := spec["host"]; !exists { + spec["host"] = host + } + + // Add schemes if missing + if _, exists := spec["schemes"]; !exists { + spec["schemes"] = []string{scheme} + } + + // Create output directory + swaggerDir := filepath.Join(tmpDir, "swagger") + if err := os.MkdirAll(swaggerDir, 0755); err != nil { + return "", errors.Wrap(err, "failed to create swagger directory") + } + + // Generate filename and content based on original format + var filename string + var content []byte + + if isYAML { + filename = fmt.Sprintf("swagger-spec-%d.yaml", time.Now().Unix()) + content, err = yaml.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified YAML spec") + } + } else { + filename = fmt.Sprintf("swagger-spec-%d.json", time.Now().Unix()) + content, err = json.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified JSON spec") + } + } + + filePath := filepath.Join(swaggerDir, filename) + + // Write file + file, err := os.Create(filePath) + if err != nil { + return "", errors.Wrap(err, "failed to create file") + } + + defer func() { + _ = file.Close() + }() + + if _, writeErr := file.Write(content); writeErr != nil { + _ = os.Remove(filePath) + return "", errors.Wrap(writeErr, "failed to write file") + } + + return filePath, nil +} + +// SupportedExtensions returns the list of supported file extensions for Swagger +func (d *SwaggerDownloader) SupportedExtensions() []string { + return []string{".json", ".yaml", ".yml"} +} diff --git a/pkg/input/formats/swagger/downloader_test.go b/pkg/input/formats/swagger/downloader_test.go new file mode 100644 index 0000000000..d55b57395d --- /dev/null +++ b/pkg/input/formats/swagger/downloader_test.go @@ -0,0 +1,359 @@ +package swagger + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "gopkg.in/yaml.v3" +) + +func TestSwaggerDownloader_SupportedExtensions(t *testing.T) { + downloader := &SwaggerDownloader{} + extensions := downloader.SupportedExtensions() + + expected := []string{".json", ".yaml", ".yml"} + if len(extensions) != len(expected) { + t.Errorf("Expected %d extensions, got %d", len(expected), len(extensions)) + } + + for i, ext := range extensions { + if ext != expected[i] { + t.Errorf("Expected extension %s, got %s", expected[i], ext) + } + } +} + +func TestSwaggerDownloader_Download_JSON_Success(t *testing.T) { + // Create a mock Swagger spec (JSON) + mockSpec := map[string]interface{}{ + "swagger": "2.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "paths": map[string]interface{}{ + "/test": map[string]interface{}{ + "get": map[string]interface{}{ + "summary": "Test endpoint", + }, + }, + }, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + // Test download + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir, nil) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + // Verify host field was added + _, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was not added to the spec") + } +} + +func TestSwaggerDownloader_Download_YAML_Success(t *testing.T) { + // Create a mock Swagger spec (YAML) + mockSpecYAML := ` +swagger: "2.0" +info: + title: "Test API" + version: "1.0.0" +paths: + /test: + get: + summary: "Test endpoint" +` + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/yaml") + if _, err := w.Write([]byte(mockSpecYAML)); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } + })) + + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + // Test download + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.yaml", tmpDir, nil) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := yaml.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded YAML: %v", err) + } + + // Verify host field was added + _, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was not added to the spec") + } +} + +func TestSwaggerDownloader_Download_UnsupportedExtension(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + _, err = downloader.Download("http://example.com/spec.xml", tmpDir, nil) + if err == nil { + t.Error("Expected error for unsupported extension, but got none") + } + + if !strings.Contains(err.Error(), "URL does not appear to be a Swagger spec") { + t.Errorf("Unexpected error message: %v", err) + } +} + +func TestSwaggerDownloader_Download_HTTPError(t *testing.T) { + // Create mock server that returns 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) + if err == nil { + t.Error("Expected error for HTTP 404, but got none") + } +} + +func TestSwaggerDownloader_Download_InvalidJSON(t *testing.T) { + // Create mock server that returns invalid JSON + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if _, err := w.Write([]byte("invalid json")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) + if err == nil { + t.Error("Expected error for invalid JSON, but got none") + } +} + +func TestSwaggerDownloader_Download_InvalidYAML(t *testing.T) { + // Create mock server that returns invalid YAML + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/yaml") + if _, err := w.Write([]byte("invalid: yaml: content: [")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.yaml", tmpDir, nil) + if err == nil { + t.Error("Expected error for invalid YAML, but got none") + } +} + +func TestSwaggerDownloader_Download_Timeout(t *testing.T) { + // Create mock server with delay + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(35 * time.Second) // Longer than 30 second timeout + if err := json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) + if err == nil { + t.Error("Expected timeout error, but got none") + } +} + +func TestSwaggerDownloader_Download_WithExistingHost(t *testing.T) { + // Create a mock Swagger spec with existing host + mockSpec := map[string]interface{}{ + "swagger": "2.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "host": "existing-host.com", + "paths": map[string]interface{}{}, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() + + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir, nil) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify existing host is preserved + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + host, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was removed from the spec") + } + + if hostStr, ok := host.(string); !ok || hostStr != "existing-host.com" { + t.Errorf("Expected host 'existing-host.com', got '%v'", host) + } +} + +// Helper function to check if file exists +func fileExists(filename string) bool { + _, err := os.Stat(filename) + return !os.IsNotExist(err) +} diff --git a/pkg/input/provider/interface.go b/pkg/input/provider/interface.go index 9e1d09ab25..33cfbee7fd 100644 --- a/pkg/input/provider/interface.go +++ b/pkg/input/provider/interface.go @@ -7,12 +7,16 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats/openapi" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats/swagger" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/http" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/list" "github.com/projectdiscovery/nuclei/v3/pkg/input/types" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate" configTypes "github.com/projectdiscovery/nuclei/v3/pkg/types" + "github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/utils/errkit" stringsutil "github.com/projectdiscovery/utils/strings" ) @@ -74,6 +78,8 @@ type InputProvider interface { type InputOptions struct { // Options for global config Options *configTypes.Options + // TempDir is the temporary directory for storing files + TempDir string // NotFoundCallback is the callback to call when input is not found // only supported in list input provider NotFoundCallback func(template string) bool @@ -107,20 +113,58 @@ func NewInputProvider(opts InputOptions) (InputProvider, error) { Options: opts.Options, NotFoundCallback: opts.NotFoundCallback, }) - } else { - // use HttpInputProvider - return http.NewHttpInputProvider(&http.HttpMultiFormatOptions{ - InputFile: opts.Options.TargetsFilePath, - InputMode: opts.Options.InputFileMode, - Options: formats.InputFormatOptions{ - Variables: generators.MergeMaps(extraVars, opts.Options.Vars.AsMap()), - SkipFormatValidation: opts.Options.SkipFormatValidation, - RequiredOnly: opts.Options.FormatUseRequiredOnly, - VarsTextTemplating: opts.Options.VarsTextTemplating, - VarsFilePaths: opts.Options.VarsFilePaths, - }, - }) + } else if len(opts.Options.Targets) > 0 && + (strings.EqualFold(opts.Options.InputFileMode, "openapi") || strings.EqualFold(opts.Options.InputFileMode, "swagger")) { + + if len(opts.Options.Targets) > 1 { + return nil, fmt.Errorf("only one target URL is supported in %s input mode", opts.Options.InputFileMode) + } + + target := opts.Options.Targets[0] + if strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://") { + var downloader formats.SpecDownloader + var tempFile string + var err error + + // Get HttpClient from protocolstate if available + var httpClient *retryablehttp.Client + if opts.Options.ExecutionId != "" { + dialers := protocolstate.GetDialersWithId(opts.Options.ExecutionId) + if dialers != nil { + httpClient = dialers.DefaultHTTPClient + } + } + + switch strings.ToLower(opts.Options.InputFileMode) { + case "openapi": + downloader = openapi.NewDownloader() + tempFile, err = downloader.Download(target, opts.TempDir, httpClient) + case "swagger": + downloader = swagger.NewDownloader() + tempFile, err = downloader.Download(target, opts.TempDir, httpClient) + default: + return nil, fmt.Errorf("unsupported input mode: %s", opts.Options.InputFileMode) + } + + if err != nil { + return nil, fmt.Errorf("failed to download %s spec from url %s: %w", opts.Options.InputFileMode, target, err) + } + + opts.Options.TargetsFilePath = tempFile + } } + + return http.NewHttpInputProvider(&http.HttpMultiFormatOptions{ + InputFile: opts.Options.TargetsFilePath, + InputMode: opts.Options.InputFileMode, + Options: formats.InputFormatOptions{ + Variables: generators.MergeMaps(extraVars, opts.Options.Vars.AsMap()), + SkipFormatValidation: opts.Options.SkipFormatValidation, + RequiredOnly: opts.Options.FormatUseRequiredOnly, + VarsTextTemplating: opts.Options.VarsTextTemplating, + VarsFilePaths: opts.Options.VarsFilePaths, + }, + }) } // SupportedInputFormats returns all supported input formats of nuclei diff --git a/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go b/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go index e0046ff412..6478e2c5e8 100644 --- a/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go +++ b/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go @@ -177,7 +177,6 @@ func TestCacheCheckConcurrent(t *testing.T) { wg := sync.WaitGroup{} for i := 1; i <= 100; i++ { wg.Add(1) - i := i go func() { defer wg.Done() cache.MarkFailed(protoType, ctx, errors.New("no address found for host")) diff --git a/pkg/protocols/common/interactsh/interactsh.go b/pkg/protocols/common/interactsh/interactsh.go index 7cdf7c77bb..e92cfaecd9 100644 --- a/pkg/protocols/common/interactsh/interactsh.go +++ b/pkg/protocols/common/interactsh/interactsh.go @@ -455,3 +455,8 @@ func (c *Client) setHostname(hostname string) { c.hostname = hostname } + +// GetHostname returns the configured interactsh server hostname. +func (c *Client) GetHostname() string { + return c.getHostname() +} diff --git a/pkg/protocols/common/protocolstate/memguardian.go b/pkg/protocols/common/protocolstate/memguardian.go index 2f31f4ca7d..c9f2c51a2d 100644 --- a/pkg/protocols/common/protocolstate/memguardian.go +++ b/pkg/protocols/common/protocolstate/memguardian.go @@ -96,7 +96,7 @@ func GlobalGuardBytesBufferAlloc() error { defer muGlobalChange.Unlock() // if current capacity was not reduced decrease it - if MaxBytesBufferAllocOnLowMemory > 0 && httputil.DefaultBytesBufferAlloc == httputil.GetPoolSize() { + if MaxBytesBufferAllocOnLowMemory > 0 && httputil.DefaultBufferSize == httputil.GetPoolSize() { gologger.Debug().Msgf("reducing bytes.buffer pool size to: %d", MaxBytesBufferAllocOnLowMemory) delta := httputil.GetPoolSize() - int64(MaxBytesBufferAllocOnLowMemory) return httputil.ChangePoolSize(-delta) @@ -112,9 +112,9 @@ func GlobalRestoreBytesBufferAlloc() { } defer muGlobalChange.Unlock() - if httputil.DefaultBytesBufferAlloc != httputil.GetPoolSize() { - delta := httputil.DefaultBytesBufferAlloc - httputil.GetPoolSize() - gologger.Debug().Msgf("restoring bytes.buffer pool size to: %d", httputil.DefaultBytesBufferAlloc) + if httputil.DefaultBufferSize != httputil.GetPoolSize() { + delta := httputil.DefaultBufferSize - httputil.GetPoolSize() + gologger.Debug().Msgf("restoring bytes.buffer pool size to: %d", httputil.DefaultBufferSize) _ = httputil.ChangePoolSize(delta) } } diff --git a/pkg/protocols/common/variables/variables.go b/pkg/protocols/common/variables/variables.go index 0724c84c5c..fa5cc1dbc6 100644 --- a/pkg/protocols/common/variables/variables.go +++ b/pkg/protocols/common/variables/variables.go @@ -3,10 +3,13 @@ package variables import ( "strings" + "github.com/Knetic/govaluate" "github.com/invopop/jsonschema" + "github.com/projectdiscovery/nuclei/v3/pkg/operators/common/dsl" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/marker" protocolutils "github.com/projectdiscovery/nuclei/v3/pkg/protocols/utils" "github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/utils" @@ -17,7 +20,9 @@ import ( // Variable is a key-value pair of strings that can be used // throughout template. type Variable struct { - LazyEval bool `yaml:"-" json:"-"` // LazyEval is used to evaluate variables lazily if it using any expression or global variables + // LazyEval is used to evaluate variables lazily if it using any expression + // or global variables. + LazyEval bool `yaml:"-" json:"-"` utils.InsertionOrderedStringMap `yaml:"-" json:"-"` } @@ -128,19 +133,67 @@ func evaluateVariableValue(expression string, values, processing map[string]inte // checkForLazyEval checks if the variables have any lazy evaluation i.e any dsl function // and sets the flag accordingly. func (variables *Variable) checkForLazyEval() bool { + var needsLazy bool + variables.ForEach(func(key string, value interface{}) { + if needsLazy { + return + } + for _, v := range protocolutils.KnownVariables { if stringsutil.ContainsAny(types.ToString(value), v) { - variables.LazyEval = true + needsLazy = true return } } + // this is a hotfix and not the best way to do it // will be refactored once we move scan state to scanContext (see: https://github.com/projectdiscovery/nuclei/issues/4631) if strings.Contains(types.ToString(value), "interactsh-url") { - variables.LazyEval = true + needsLazy = true + return + } + + if hasUndefinedParams(types.ToString(value), variables) { + needsLazy = true return } }) + + variables.LazyEval = needsLazy + return variables.LazyEval } + +// hasUndefinedParams checks if a variable value contains expressions that ref +// parameters not defined in the current variable scope, indicating it needs +// runtime context. +func hasUndefinedParams(value string, variables *Variable) bool { + exprs := expressions.FindExpressions(value, marker.ParenthesisOpen, marker.ParenthesisClose, map[string]interface{}{}) + if len(exprs) == 0 { + return false + } + + definedVars := make(map[string]struct{}) + variables.ForEach(func(key string, _ interface{}) { + definedVars[key] = struct{}{} + }) + + for _, expr := range exprs { + compiled, err := govaluate.NewEvaluableExpressionWithFunctions(expr, dsl.HelperFunctions) + if err != nil { + // NOTE(dwisiswant0): here, it might need runtime context. + return true + } + + vars := compiled.Vars() + for _, paramName := range vars { + // NOTE(dwisiswant0): also here, if it's not in our defined vars. + if _, exists := definedVars[paramName]; !exists { + return true + } + } + } + + return false +} diff --git a/pkg/protocols/common/variables/variables_test.go b/pkg/protocols/common/variables/variables_test.go index 67aee33fc5..cbf560b4eb 100644 --- a/pkg/protocols/common/variables/variables_test.go +++ b/pkg/protocols/common/variables/variables_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + "github.com/projectdiscovery/nuclei/v3/pkg/utils" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" @@ -41,3 +42,108 @@ a6: "123456"` require.Equal(t, map[string]interface{}{"a2": "098f6bcd4621d373cade4e832627b4f6", "a3": "this_is_random_text", "a4": a4, "a5": "moc.elgoog", "a6": "123456"}, result, "could not get correct elements") } + +func TestCheckForLazyEval(t *testing.T) { + t.Run("undefined-parameters-in-expression", func(t *testing.T) { + // Variables with expressions that reference undefined parameters + // should be marked for lazy evaluation + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("var1", "{{sha1(serial)}}") // 'serial' is undefined + variables.Set("var2", "{{replace(user, '.', '')}}") // 'user' is undefined + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters and set LazyEval=true") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("self-referencing-variables", func(t *testing.T) { + // Variables that reference other defined variables should NOT be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("base", "example") + variables.Set("derived", "{{base}}_suffix") // 'base' is defined + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for self-referencing defined variables") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("constant-expressions", func(t *testing.T) { + // Constant expressions without variables should NOT be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("const1", "{{2+2}}") + variables.Set("const2", "{{rand_int(1, 100)}}") + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for constant expressions") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("known-runtime-variables", func(t *testing.T) { + // Variables with known runtime variables (Host, BaseURL, etc.) should be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("url", "{{BaseURL}}/api") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect known runtime variables") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("interactsh-url", func(t *testing.T) { + // Variables with interactsh-url should be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("callback", "{{interactsh-url}}") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect interactsh-url") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("mixed-defined-and-undefined", func(t *testing.T) { + // Mix of defined and undefined parameters in actual expressions + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(3), + } + variables.Set("defined", "value") + variables.Set("uses_defined", "{{base64(defined)}}") // OK - 'defined' exists + variables.Set("uses_undefined", "{{base64(undefined_param)}}") // NOT OK - 'undefined_param' doesn't exist + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters even with some defined") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("plain-strings-no-expressions", func(t *testing.T) { + // Plain string values without expressions + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("plain1", "simple value") + variables.Set("plain2", "another value") + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for plain strings") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("complex-expression-with-undefined", func(t *testing.T) { + // Complex expression with multiple undefined parameters + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("complex", "{{sha1(cert_serial + issuer)}}") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters in complex expressions") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) +} diff --git a/pkg/protocols/file/request.go b/pkg/protocols/file/request.go index 853cbb602f..b715a4bfc9 100644 --- a/pkg/protocols/file/request.go +++ b/pkg/protocols/file/request.go @@ -59,7 +59,7 @@ func (request *Request) ExecuteWithResults(input *contextargs.Context, metadata, } err = request.getInputPaths(input.MetaInput.Input, func(filePath string) { wg.Add() - func(filePath string) { + go func(filePath string) { defer wg.Done() fi, err := os.Open(filePath) if err != nil { diff --git a/pkg/protocols/file/request_test.go b/pkg/protocols/file/request_test.go index 118d1885c4..3f20de2ed5 100644 --- a/pkg/protocols/file/request_test.go +++ b/pkg/protocols/file/request_test.go @@ -7,7 +7,10 @@ import ( "context" "os" "path/filepath" + "sync" + "sync/atomic" "testing" + "time" "github.com/stretchr/testify/require" @@ -132,3 +135,80 @@ func TestFileExecuteWithResults(t *testing.T) { finalEvent = nil } } + +func TestFileProtocolConcurrentExecution(t *testing.T) { + tempDir, err := os.MkdirTemp("", "nuclei-test-*") + require.NoError(t, err) + + defer func() { + _ = os.RemoveAll(tempDir) + }() + + numFiles := 5 + for i := range numFiles { + content := "TEST_CONTENT_MATCH_DATA" + filePath := filepath.Join(tempDir, "test_"+string(rune('0'+i))+".txt") + err := os.WriteFile(filePath, []byte(content), permissionutil.TempFilePermission) + require.NoError(t, err) + } + + options := testutils.DefaultOptions + testutils.Init(options) + templateID := "testing-file-concurrent" + executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{ + ID: templateID, + Info: model.Info{SeverityHolder: severity.Holder{Severity: severity.Low}, Name: "test"}, + }) + + var timesMutex sync.Mutex + var processedFiles int64 + + request := &Request{ + ID: templateID, + MaxSize: "1Gb", + NoRecursive: false, + Extensions: []string{"txt"}, + Archive: false, + Operators: operators.Operators{ + Matchers: []*matchers.Matcher{{ + Name: "test", + Part: "raw", + Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher}, + Words: []string{"TEST_CONTENT_MATCH_DATA"}, + }}, + }, + options: executerOpts, + } + + err = request.Compile(executerOpts) + require.NoError(t, err) + + input := contextargs.NewWithInput(context.Background(), tempDir) + var results []*output.InternalWrappedEvent + var resultMutex sync.Mutex + + startTime := time.Now() + err = request.ExecuteWithResults(input, make(output.InternalEvent), make(output.InternalEvent), func(event *output.InternalWrappedEvent) { + atomic.AddInt64(&processedFiles, 1) + resultMutex.Lock() + results = append(results, event) + resultMutex.Unlock() + + // small delay to make timing differences more observable + time.Sleep(10 * time.Millisecond) + }) + totalTime := time.Since(startTime) + require.NoError(t, err) + + finalProcessedFiles := atomic.LoadInt64(&processedFiles) + t.Logf("Total execution time: %v", totalTime) + t.Logf("Files processed: %d", finalProcessedFiles) + t.Logf("Results returned: %d", len(results)) + + // test 1: all files should be processed + require.Equal(t, int64(numFiles), finalProcessedFiles, "Not all files were processed") + + // test 2: verify callback invocation timing shows concurrency + timesMutex.Lock() + defer timesMutex.Unlock() +} diff --git a/pkg/protocols/http/request.go b/pkg/protocols/http/request.go index c538686e18..3137a552dd 100644 --- a/pkg/protocols/http/request.go +++ b/pkg/protocols/http/request.go @@ -948,7 +948,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ onceFunc := sync.OnceFunc(func() { // if nuclei-project is enabled store the response if not previously done if request.options.ProjectFile != nil && !fromCache { - if err := request.options.ProjectFile.Set(dumpedRequest, resp, respChain.Body().Bytes()); err != nil { + if err := request.options.ProjectFile.Set(dumpedRequest, resp, respChain.BodyBytes()); err != nil { errx = errors.Wrap(err, "could not store in project file") } } @@ -962,7 +962,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ } // log request stats - request.options.Output.RequestStatsLog(strconv.Itoa(respChain.Response().StatusCode), respChain.FullResponse().String()) + request.options.Output.RequestStatsLog(strconv.Itoa(respChain.Response().StatusCode), respChain.FullResponseString()) // save response to projectfile onceFunc() @@ -1003,7 +1003,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ } } - outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponse().String(), respChain.Body().String(), respChain.Headers().String(), duration, generatedRequest.meta) + outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponseString(), respChain.BodyString(), respChain.HeadersString(), duration, generatedRequest.meta) // add response fields to template context and merge templatectx variables to output event request.options.AddTemplateVars(input.MetaInput, request.Type(), request.ID, outputEvent) if request.options.HasTemplateCtx(input.MetaInput) { @@ -1066,7 +1066,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ responseContentType := respChain.Response().Header.Get("Content-Type") isResponseTruncated := request.MaxSize > 0 && respChain.Body().Len() >= request.MaxSize - dumpResponse(event, request, respChain.FullResponse().Bytes(), formedURL, responseContentType, isResponseTruncated, input.MetaInput.Input) + dumpResponse(event, request, respChain.FullResponseBytes(), formedURL, responseContentType, isResponseTruncated, input.MetaInput.Input) callback(event) @@ -1080,7 +1080,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ StatusCode: respChain.Response().StatusCode, Matched: event.HasResults(), RawRequest: string(dumpedRequest), - RawResponse: respChain.FullResponse().String(), + RawResponse: respChain.FullResponseString(), Severity: request.options.TemplateInfo.SeverityHolder.Severity.String(), }) } @@ -1137,6 +1137,15 @@ func (request *Request) addCNameIfAvailable(hostname string, outputEvent map[str return } + if request.options.Interactsh != nil { + interactshDomain := request.options.Interactsh.GetHostname() + if interactshDomain != "" { + if strings.EqualFold(hostname, interactshDomain) || strings.HasSuffix(hostname, "."+interactshDomain) { + return + } + } + } + data, err := request.dialer.GetDNSData(hostname) if err == nil { switch len(data.CNAME) { diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index 8b872d84a9..2d927555fc 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -36,6 +36,7 @@ import ( "github.com/projectdiscovery/utils/errkit" iputil "github.com/projectdiscovery/utils/ip" mapsutil "github.com/projectdiscovery/utils/maps" + sliceutil "github.com/projectdiscovery/utils/slice" syncutil "github.com/projectdiscovery/utils/sync" urlutil "github.com/projectdiscovery/utils/url" ) @@ -133,8 +134,11 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error { } // "Port" is a special variable and it should not contains any dsl expressions - if strings.Contains(request.getPort(), "{{") { - return errkit.New("'Port' variable cannot contain any dsl expressions") + ports := request.getPorts() + for _, port := range ports { + if strings.Contains(port, "{{") { + return errkit.New("'Port' variable cannot contain any dsl expressions") + } } if request.Init != "" { @@ -281,12 +285,28 @@ func (request *Request) GetID() string { // ExecuteWithResults executes the protocol requests and returns results instead of writing them. func (request *Request) ExecuteWithResults(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { + // Get default port(s) if specified in template + ports := request.getPorts() + + var errs []error + + for _, port := range ports { + err := request.executeWithResults(port, target, dynamicValues, previous, callback) + if err != nil { + errs = append(errs, err) + } + } + return errkit.Join(errs...) +} + +// executeWithResults executes the request +func (request *Request) executeWithResults(port string, target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { input := target.Clone() // use network port updates input with new port requested in template file // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc // idea is to reduce redundant dials to http ports - if err := input.UseNetworkPort(request.getPort(), request.getExcludePorts()); err != nil { + if err := input.UseNetworkPort(port, request.getExcludePorts()); err != nil { gologger.Debug().Msgf("Could not network port from constants: %s\n", err) } @@ -755,13 +775,21 @@ func (request *Request) Type() templateTypes.ProtocolType { return templateTypes.JavascriptProtocol } -func (request *Request) getPort() string { +func (request *Request) getPorts() []string { for k, v := range request.Args { if strings.EqualFold(k, "Port") { - return types.ToString(v) + portStr := types.ToString(v) + ports := []string{} + for _, p := range strings.Split(portStr, ",") { + trimmed := strings.TrimSpace(p) + if trimmed != "" { + ports = append(ports, trimmed) + } + } + return sliceutil.Dedupe(ports) } } - return "" + return []string{} } func (request *Request) getExcludePorts() string {