From 6a6de384fca77752a940c7d07c4c118a92e46b11 Mon Sep 17 00:00:00 2001 From: pussycat0x <65701233+pussycat0x@users.noreply.github.com> Date: Sat, 27 Sep 2025 21:00:13 +0530 Subject: [PATCH 01/55] Multi Port Support Added - JS --- cmd/nuclei/ssh.yaml | 31 +++++++++++++++ pkg/protocols/javascript/js.go | 70 +++++++++++++++++++++++++++++++++- 2 files changed, 100 insertions(+), 1 deletion(-) create mode 100644 cmd/nuclei/ssh.yaml diff --git a/cmd/nuclei/ssh.yaml b/cmd/nuclei/ssh.yaml new file mode 100644 index 0000000000..0c5719cb13 --- /dev/null +++ b/cmd/nuclei/ssh.yaml @@ -0,0 +1,31 @@ +id: ssh-auth-methods + +info: + name: SSH Auth Methods - Detection + author: Ice3man543 + severity: info + description: | + SSH (Secure Shell) authentication modes are methods used to verify the identity of users and ensure secure access to remote systems. Common SSH authentication modes include password-based authentication, which relies on a secret passphrase, and public key authentication, which uses cryptographic keys for a more secure and convenient login process. Additionally, multi-factor authentication (MFA) can be employed to enhance security by requiring users to provide multiple forms of authentication, such as a password and a one-time code. + reference: + - https://nmap.org/nsedoc/scripts/ssh-auth-methods.html + metadata: + max-request: 1 + shodan-query: product:"OpenSSH" + tags: js,detect,ssh,enum,network + +javascript: + - pre-condition: | + isPortOpen(Host,Port); + code: | + var m = require("nuclei/ssh"); + var c = m.SSHClient(); + var response = c.ConnectSSHInfoMode(Host, Port); + Export(response); + args: + Host: "{{Host}}" + Port: "222,2222,22" + + extractors: + - type: json + json: + - '.UserAuth' \ No newline at end of file diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index 8b872d84a9..ce97583d3f 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -282,11 +282,60 @@ func (request *Request) GetID() string { // ExecuteWithResults executes the protocol requests and returns results instead of writing them. func (request *Request) ExecuteWithResults(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { + // Get all ports to test + ports := request.getPorts() + + // Check if target already has a specific port (from URL like host:port) + targetURL, err := urlutil.Parse(target.MetaInput.Input) + var urlPort string + if err == nil { + urlPort = targetURL.Port() + } + + // Create a map to track unique ports and avoid duplicates + uniquePorts := make(map[string]bool) + + // Add URL port if it exists + if urlPort != "" { + uniquePorts[urlPort] = true + } + + // Add template ports + for _, port := range ports { + uniquePorts[port] = true + } + + // If no ports found, fallback to single port behavior + if len(uniquePorts) == 0 { + return request.executeWithSinglePort(target, dynamicValues, previous, callback) + } + + // Execute for each unique port + for port := range uniquePorts { + input := target.Clone() + // use network port updates input with new port requested in template file + // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc + // idea is to reduce redundant dials to http ports + if err := input.UseNetworkPort(port, request.getExcludePorts()); err != nil { + gologger.Debug().Msgf("Could not network port from constants: %s\n", err) + } + + if err := request.executeWithSinglePort(input, dynamicValues, previous, callback); err != nil { + gologger.Debug().Msgf("Error executing request for port %s: %s\n", port, err) + } + } + + return nil +} + +// executeWithSinglePort executes the request for a single port +func (request *Request) executeWithSinglePort(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { input := target.Clone() // use network port updates input with new port requested in template file // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc // idea is to reduce redundant dials to http ports - if err := input.UseNetworkPort(request.getPort(), request.getExcludePorts()); err != nil { + // Note: target already has the correct port set from the multi-port loop or from URL + if err := input.UseNetworkPort("", request.getExcludePorts()); err != nil { gologger.Debug().Msgf("Could not network port from constants: %s\n", err) } @@ -764,6 +813,25 @@ func (request *Request) getPort() string { return "" } +// getPorts returns a slice of ports from the Port argument +func (request *Request) getPorts() []string { + portStr := request.getPort() + if portStr == "" { + return []string{} + } + + // Split by comma and clean up whitespace + ports := strings.Split(portStr, ",") + var cleanedPorts []string + for _, port := range ports { + cleaned := strings.TrimSpace(port) + if cleaned != "" { + cleanedPorts = append(cleanedPorts, cleaned) + } + } + return cleanedPorts +} + func (request *Request) getExcludePorts() string { for k, v := range request.Args { if strings.EqualFold(k, "exclude-ports") { From 7e041813910f900d703a085074a6d8e14fdd4c85 Mon Sep 17 00:00:00 2001 From: pussycat0x <65701233+pussycat0x@users.noreply.github.com> Date: Sat, 27 Sep 2025 21:14:20 +0530 Subject: [PATCH 02/55] minor -changes --- cmd/nuclei/ssh.yaml | 2 +- pkg/protocols/javascript/js.go | 59 +++++++++++++++++++++++----------- 2 files changed, 42 insertions(+), 19 deletions(-) diff --git a/cmd/nuclei/ssh.yaml b/cmd/nuclei/ssh.yaml index 0c5719cb13..afc5115d8b 100644 --- a/cmd/nuclei/ssh.yaml +++ b/cmd/nuclei/ssh.yaml @@ -23,7 +23,7 @@ javascript: Export(response); args: Host: "{{Host}}" - Port: "222,2222,22" + Port: "222,22" extractors: - type: json diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index ce97583d3f..35c6c2f335 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -7,6 +7,7 @@ import ( "maps" "net" "strings" + "sync" "sync/atomic" "time" @@ -310,34 +311,56 @@ func (request *Request) ExecuteWithResults(target *contextargs.Context, dynamicV return request.executeWithSinglePort(target, dynamicValues, previous, callback) } - // Execute for each unique port + // Execute for each unique port in parallel + var wg sync.WaitGroup + portList := make([]string, 0, len(uniquePorts)) for port := range uniquePorts { - input := target.Clone() - // use network port updates input with new port requested in template file - // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc - // idea is to reduce redundant dials to http ports - if err := input.UseNetworkPort(port, request.getExcludePorts()); err != nil { - gologger.Debug().Msgf("Could not network port from constants: %s\n", err) - } + portList = append(portList, port) + } - if err := request.executeWithSinglePort(input, dynamicValues, previous, callback); err != nil { - gologger.Debug().Msgf("Error executing request for port %s: %s\n", port, err) - } + // Use a semaphore to limit concurrent goroutines (max 10 concurrent port tests) + semaphore := make(chan struct{}, 10) + + for _, port := range portList { + wg.Add(1) + go func(port string) { + defer wg.Done() + + // Acquire semaphore + semaphore <- struct{}{} + defer func() { <-semaphore }() + + gologger.Debug().Msgf("Testing port: %s\n", port) + input := target.Clone() + + // Parse the original input to get hostname + originalURL, err := urlutil.Parse(target.MetaInput.Input) + if err != nil { + gologger.Debug().Msgf("Could not parse original input: %s\n", err) + return + } + + // Create new input with the specific port + newInput := fmt.Sprintf("%s:%s", originalURL.Hostname(), port) + input.MetaInput.Input = newInput + + if err := request.executeWithSinglePort(input, dynamicValues, previous, callback); err != nil { + gologger.Debug().Msgf("Error executing request for port %s: %s\n", port, err) + } + }(port) } + // Wait for all goroutines to complete + wg.Wait() + return nil } // executeWithSinglePort executes the request for a single port func (request *Request) executeWithSinglePort(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { input := target.Clone() - // use network port updates input with new port requested in template file - // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc - // idea is to reduce redundant dials to http ports - // Note: target already has the correct port set from the multi-port loop or from URL - if err := input.UseNetworkPort("", request.getExcludePorts()); err != nil { - gologger.Debug().Msgf("Could not network port from constants: %s\n", err) - } + // Note: target already has the correct port set from the multi-port loop + // No need to call UseNetworkPort again as the port is already set in the target hostPort, err := getAddress(input.MetaInput.Input) if err != nil { From 07590268c1207f96e78efbc50fae6dc2d0bf02dc Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Mon, 6 Oct 2025 16:10:58 +0200 Subject: [PATCH 03/55] restoring basic sequential multiport support --- pkg/protocols/javascript/js.go | 118 +++++++-------------------------- 1 file changed, 23 insertions(+), 95 deletions(-) diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index 35c6c2f335..b75e26eada 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -7,7 +7,6 @@ import ( "maps" "net" "strings" - "sync" "sync/atomic" "time" @@ -37,6 +36,7 @@ import ( "github.com/projectdiscovery/utils/errkit" iputil "github.com/projectdiscovery/utils/ip" mapsutil "github.com/projectdiscovery/utils/maps" + sliceutil "github.com/projectdiscovery/utils/slice" syncutil "github.com/projectdiscovery/utils/sync" urlutil "github.com/projectdiscovery/utils/url" ) @@ -134,8 +134,11 @@ func (request *Request) Compile(options *protocols.ExecutorOptions) error { } // "Port" is a special variable and it should not contains any dsl expressions - if strings.Contains(request.getPort(), "{{") { - return errkit.New("'Port' variable cannot contain any dsl expressions") + ports := request.getPorts() + for _, port := range ports { + if strings.Contains(port, "{{") { + return errkit.New("'Port' variable cannot contain any dsl expressions") + } } if request.Init != "" { @@ -282,85 +285,28 @@ func (request *Request) GetID() string { // ExecuteWithResults executes the protocol requests and returns results instead of writing them. func (request *Request) ExecuteWithResults(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { - - // Get all ports to test + // Get default port(s) if specified in template ports := request.getPorts() - // Check if target already has a specific port (from URL like host:port) - targetURL, err := urlutil.Parse(target.MetaInput.Input) - var urlPort string - if err == nil { - urlPort = targetURL.Port() - } - - // Create a map to track unique ports and avoid duplicates - uniquePorts := make(map[string]bool) - - // Add URL port if it exists - if urlPort != "" { - uniquePorts[urlPort] = true - } - - // Add template ports for _, port := range ports { - uniquePorts[port] = true - } - - // If no ports found, fallback to single port behavior - if len(uniquePorts) == 0 { - return request.executeWithSinglePort(target, dynamicValues, previous, callback) - } - - // Execute for each unique port in parallel - var wg sync.WaitGroup - portList := make([]string, 0, len(uniquePorts)) - for port := range uniquePorts { - portList = append(portList, port) - } - - // Use a semaphore to limit concurrent goroutines (max 10 concurrent port tests) - semaphore := make(chan struct{}, 10) - - for _, port := range portList { - wg.Add(1) - go func(port string) { - defer wg.Done() - - // Acquire semaphore - semaphore <- struct{}{} - defer func() { <-semaphore }() - - gologger.Debug().Msgf("Testing port: %s\n", port) - input := target.Clone() - - // Parse the original input to get hostname - originalURL, err := urlutil.Parse(target.MetaInput.Input) - if err != nil { - gologger.Debug().Msgf("Could not parse original input: %s\n", err) - return - } - - // Create new input with the specific port - newInput := fmt.Sprintf("%s:%s", originalURL.Hostname(), port) - input.MetaInput.Input = newInput - - if err := request.executeWithSinglePort(input, dynamicValues, previous, callback); err != nil { - gologger.Debug().Msgf("Error executing request for port %s: %s\n", port, err) - } - }(port) + err := request.executeWithResults(port, target, dynamicValues, previous, callback) + if err != nil { + return err + } } - // Wait for all goroutines to complete - wg.Wait() - return nil } -// executeWithSinglePort executes the request for a single port -func (request *Request) executeWithSinglePort(target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { +// executeWithResults executes the request +func (request *Request) executeWithResults(port string, target *contextargs.Context, dynamicValues, previous output.InternalEvent, callback protocols.OutputEventCallback) error { input := target.Clone() - // Note: target already has the correct port set from the multi-port loop - // No need to call UseNetworkPort again as the port is already set in the target + // use network port updates input with new port requested in template file + // and it is ignored if input port is not standard http(s) ports like 80,8080,8081 etc + // idea is to reduce redundant dials to http ports + if err := input.UseNetworkPort(port, request.getExcludePorts()); err != nil { + gologger.Debug().Msgf("Could not network port from constants: %s\n", err) + } hostPort, err := getAddress(input.MetaInput.Input) if err != nil { @@ -827,32 +773,14 @@ func (request *Request) Type() templateTypes.ProtocolType { return templateTypes.JavascriptProtocol } -func (request *Request) getPort() string { +func (request *Request) getPorts() []string { for k, v := range request.Args { if strings.EqualFold(k, "Port") { - return types.ToString(v) - } - } - return "" -} - -// getPorts returns a slice of ports from the Port argument -func (request *Request) getPorts() []string { - portStr := request.getPort() - if portStr == "" { - return []string{} - } - - // Split by comma and clean up whitespace - ports := strings.Split(portStr, ",") - var cleanedPorts []string - for _, port := range ports { - cleaned := strings.TrimSpace(port) - if cleaned != "" { - cleanedPorts = append(cleanedPorts, cleaned) + ports := types.ToStringSlice(strings.Split(types.ToString(v), ",")) + return sliceutil.Dedupe(ports) } } - return cleanedPorts + return []string{} } func (request *Request) getExcludePorts() string { From 8c560523e3fefb9bd70e47f59a691f40aca63a38 Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Mon, 6 Oct 2025 21:29:56 +0200 Subject: [PATCH 04/55] better error handling --- pkg/protocols/javascript/js.go | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index b75e26eada..5bfd660c91 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -288,14 +288,16 @@ func (request *Request) ExecuteWithResults(target *contextargs.Context, dynamicV // Get default port(s) if specified in template ports := request.getPorts() + var errs []error + for _, port := range ports { err := request.executeWithResults(port, target, dynamicValues, previous, callback) if err != nil { - return err + errs = append(errs, err) } } - return nil + return errkit.Join(errs...) } // executeWithResults executes the request From c3750be38025156a30cb97efbf9be2e9d13b0b80 Mon Sep 17 00:00:00 2001 From: tvroi Date: Sun, 19 Oct 2025 15:33:31 +0700 Subject: [PATCH 05/55] feat(openapi/swagger): direct fuzzing using target url --- internal/runner/runner.go | 10 +- pkg/input/formats/formats.go | 9 + pkg/input/formats/openapi/downloader.go | 120 ++++++++ pkg/input/formats/openapi/downloader_test.go | 240 +++++++++++++++ pkg/input/formats/swagger/downloader.go | 148 +++++++++ pkg/input/formats/swagger/downloader_test.go | 306 +++++++++++++++++++ pkg/input/provider/interface.go | 59 +++- 7 files changed, 874 insertions(+), 18 deletions(-) create mode 100644 pkg/input/formats/openapi/downloader.go create mode 100644 pkg/input/formats/openapi/downloader_test.go create mode 100644 pkg/input/formats/swagger/downloader.go create mode 100644 pkg/input/formats/swagger/downloader_test.go diff --git a/internal/runner/runner.go b/internal/runner/runner.go index 59910f8246..1594f1e260 100644 --- a/internal/runner/runner.go +++ b/internal/runner/runner.go @@ -254,8 +254,12 @@ func New(options *types.Options) (*Runner, error) { os.Exit(0) } + if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { + runner.tmpDir = tmpDir + } + // create the input provider and load the inputs - inputProvider, err := provider.NewInputProvider(provider.InputOptions{Options: options}) + inputProvider, err := provider.NewInputProvider(provider.InputOptions{Options: options, TempDir: runner.tmpDir}) if err != nil { return nil, errors.Wrap(err, "could not create input provider") } @@ -386,10 +390,6 @@ func New(options *types.Options) (*Runner, error) { } runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration) - if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { - runner.tmpDir = tmpDir - } - return runner, nil } diff --git a/pkg/input/formats/formats.go b/pkg/input/formats/formats.go index c7798286a3..4cbd96a59f 100644 --- a/pkg/input/formats/formats.go +++ b/pkg/input/formats/formats.go @@ -47,6 +47,15 @@ type Format interface { SetOptions(options InputFormatOptions) } +// SpecDownloader is an interface for downloading API specifications from URLs +type SpecDownloader interface { + // Download downloads the spec from the given URL and saves it to tmpDir + // Returns the path to the downloaded file + Download(url, tmpDir string) (string, error) + // SupportedExtensions returns the list of supported file extensions + SupportedExtensions() []string +} + var ( DefaultVarDumpFileName = "required_openapi_params.yaml" ErrNoVarsDumpFile = errors.New("no required params file found") diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go new file mode 100644 index 0000000000..22ca513877 --- /dev/null +++ b/pkg/input/formats/openapi/downloader.go @@ -0,0 +1,120 @@ +package openapi + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" +) + +// OpenAPIDownloader implements the SpecDownloader interface for OpenAPI 3.0 specs +type OpenAPIDownloader struct{} + +// NewDownloader creates a new OpenAPI downloader +func NewDownloader() formats.SpecDownloader { + return &OpenAPIDownloader{} +} + +// This function downloads an OpenAPI 3.0 spec from the given URL and saves it to tmpDir +func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { + // Validate URL format, OpenAPI 3.0 specs are typically JSON + if !strings.HasSuffix(urlStr, ".json") && !strings.Contains(urlStr, "openapi") { + return "", fmt.Errorf("URL does not appear to be an OpenAPI JSON spec") + } + + client := &http.Client{Timeout: 30 * time.Second} + + resp, err := client.Get(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to download OpenAPI spec") + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("HTTP %d when downloading OpenAPI spec", resp.StatusCode) + } + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return "", errors.Wrap(err, "failed to read response body") + } + + // Validate it's a valid JSON and has OpenAPI structure + var spec map[string]interface{} + if err := json.Unmarshal(bodyBytes, &spec); err != nil { + return "", fmt.Errorf("downloaded content is not valid JSON: %w", err) + } + + // Check if it's an OpenAPI 3.0 spec + if openapi, exists := spec["openapi"]; exists { + if openapiStr, ok := openapi.(string); ok && strings.HasPrefix(openapiStr, "3.") { + // Valid OpenAPI 3.0 spec + } else { + return "", fmt.Errorf("not a valid OpenAPI 3.0 spec (found version: %v)", openapi) + } + } else { + return "", fmt.Errorf("not an OpenAPI spec (missing 'openapi' field)") + } + + // Extract host from URL for server configuration + parsedURL, err := url.Parse(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to parse URL") + } + host := parsedURL.Host + + // Add servers section if missing or empty + servers, exists := spec["servers"] + if !exists || servers == nil { + spec["servers"] = []map[string]interface{}{ + {"url": "https://" + host}, + } + } else if serversList, ok := servers.([]interface{}); ok && len(serversList) == 0 { + spec["servers"] = []map[string]interface{}{ + {"url": "https://" + host}, + } + } + + // Marshal back to JSON + modifiedJSON, err := json.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified spec") + } + + // Create output directory + openapiDir := filepath.Join(tmpDir, "openapi") + if err := os.MkdirAll(openapiDir, 0755); err != nil { + return "", errors.Wrap(err, "failed to create openapi directory") + } + + // Generate filename + filename := fmt.Sprintf("openapi-spec-%d.json", time.Now().Unix()) + filePath := filepath.Join(openapiDir, filename) + + // Write file + file, err := os.Create(filePath) + if err != nil { + return "", fmt.Errorf("failed to create file: %w", err) + } + defer file.Close() + + if _, err := file.Write(modifiedJSON); err != nil { + os.Remove(filePath) + return "", errors.Wrap(err, "failed to write OpenAPI spec to file") + } + + return filePath, nil +} + +// SupportedExtensions returns the list of supported file extensions for OpenAPI +func (d *OpenAPIDownloader) SupportedExtensions() []string { + return []string{".json"} +} diff --git a/pkg/input/formats/openapi/downloader_test.go b/pkg/input/formats/openapi/downloader_test.go new file mode 100644 index 0000000000..e5fc7784a4 --- /dev/null +++ b/pkg/input/formats/openapi/downloader_test.go @@ -0,0 +1,240 @@ +package openapi + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" +) + +func TestOpenAPIDownloader_SupportedExtensions(t *testing.T) { + downloader := &OpenAPIDownloader{} + extensions := downloader.SupportedExtensions() + + expected := []string{".json"} + if len(extensions) != len(expected) { + t.Errorf("Expected %d extensions, got %d", len(expected), len(extensions)) + } + + for i, ext := range extensions { + if ext != expected[i] { + t.Errorf("Expected extension %s, got %s", expected[i], ext) + } + } +} + +func TestOpenAPIDownloader_Download_Success(t *testing.T) { + // Create a mock OpenAPI spec + mockSpec := map[string]interface{}{ + "openapi": "3.0.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "paths": map[string]interface{}{ + "/test": map[string]interface{}{ + "get": map[string]interface{}{ + "summary": "Test endpoint", + }, + }, + }, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(mockSpec) + })) + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + // Test download + downloader := &OpenAPIDownloader{} + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + // Verify servers field was added + servers, exists := downloadedSpec["servers"] + if !exists { + t.Error("Servers field was not added to the spec") + } + + if serversList, ok := servers.([]interface{}); ok { + if len(serversList) == 0 { + t.Error("Servers list is empty") + } + } else { + t.Error("Servers field is not a list") + } +} + +func TestOpenAPIDownloader_Download_NonJSONURL(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download("http://example.com/spec.yaml", tmpDir) + if err == nil { + t.Error("Expected error for non-JSON URL, but got none") + } + + if !strings.Contains(err.Error(), "URL does not appear to be an OpenAPI JSON spec") { + t.Errorf("Unexpected error message: %v", err) + } +} + +func TestOpenAPIDownloader_Download_HTTPError(t *testing.T) { + // Create mock server that returns 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + if err == nil { + t.Error("Expected error for HTTP 404, but got none") + } +} + +func TestOpenAPIDownloader_Download_InvalidJSON(t *testing.T) { + // Create mock server that returns invalid JSON + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte("invalid json")) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + if err == nil { + t.Error("Expected error for invalid JSON, but got none") + } +} + +func TestOpenAPIDownloader_Download_Timeout(t *testing.T) { + // Create mock server with delay + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(35 * time.Second) // Longer than 30 second timeout + json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &OpenAPIDownloader{} + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + if err == nil { + t.Error("Expected timeout error, but got none") + } +} + +func TestOpenAPIDownloader_Download_WithExistingServers(t *testing.T) { + // Create a mock OpenAPI spec with existing servers + mockSpec := map[string]interface{}{ + "openapi": "3.0.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "servers": []interface{}{ + map[string]interface{}{ + "url": "https://existing-server.com", + }, + }, + "paths": map[string]interface{}{}, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(mockSpec) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "openapi_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &OpenAPIDownloader{} + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify existing servers are preserved + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + servers, exists := downloadedSpec["servers"] + if !exists { + t.Error("Servers field was removed from the spec") + } + + if serversList, ok := servers.([]interface{}); ok { + if len(serversList) != 1 { + t.Errorf("Expected 1 server, got %d", len(serversList)) + } + } +} + +// Helper function to check if file exists +func fileExists(filename string) bool { + _, err := os.Stat(filename) + return !os.IsNotExist(err) +} diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go new file mode 100644 index 0000000000..010287f6fd --- /dev/null +++ b/pkg/input/formats/swagger/downloader.go @@ -0,0 +1,148 @@ +package swagger + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "os" + "path/filepath" + "strings" + "time" + + "github.com/pkg/errors" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "gopkg.in/yaml.v3" +) + +// SwaggerDownloader implements the SpecDownloader interface for Swagger 2.0 specs +type SwaggerDownloader struct{} + +// NewDownloader creates a new Swagger downloader +func NewDownloader() formats.SpecDownloader { + return &SwaggerDownloader{} +} + +// This function downloads a Swagger 2.0 spec from the given URL and saves it to tmpDir +func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { + // Swagger can be JSON or YAML + supportedExts := []string{".json", ".yaml", ".yml"} + isSupported := false + for _, ext := range supportedExts { + if strings.HasSuffix(urlStr, ext) { + isSupported = true + break + } + } + if !isSupported && !strings.Contains(urlStr, "swagger") { + return "", fmt.Errorf("URL does not appear to be a Swagger spec (supported: %v)", supportedExts) + } + + client := &http.Client{Timeout: 30 * time.Second} + + resp, err := client.Get(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to download Swagger spec") + } + defer resp.Body.Close() + + if resp.StatusCode != http.StatusOK { + return "", fmt.Errorf("HTTP %d when downloading Swagger spec", resp.StatusCode) + } + + bodyBytes, err := io.ReadAll(resp.Body) + if err != nil { + return "", errors.Wrap(err, "failed to read response body") + } + + // Determine format and parse + var spec map[string]interface{} + var isYAML bool + + // Try JSON first + if err := json.Unmarshal(bodyBytes, &spec); err != nil { + // Then try YAML + if err := yaml.Unmarshal(bodyBytes, &spec); err != nil { + return "", fmt.Errorf("downloaded content is neither valid JSON nor YAML: %w", err) + } + isYAML = true + } + + // Validate it's a Swagger 2.0 spec + if swagger, exists := spec["swagger"]; exists { + if swaggerStr, ok := swagger.(string); ok && strings.HasPrefix(swaggerStr, "2.") { + // Valid Swagger 2.0 spec + } else { + return "", fmt.Errorf("not a valid Swagger 2.0 spec (found version: %v)", swagger) + } + } else { + return "", fmt.Errorf("not a Swagger spec (missing 'swagger' field)") + } + + // Extract host from URL for host configuration + parsedURL, err := url.Parse(urlStr) + if err != nil { + return "", errors.Wrap(err, "failed to parse URL") + } + host := parsedURL.Host + + // Add host if missing + if _, exists := spec["host"]; !exists { + spec["host"] = host + } + + // Add schemes if missing + if _, exists := spec["schemes"]; !exists { + scheme := parsedURL.Scheme + if scheme == "" { + scheme = "https" + } + spec["schemes"] = []string{scheme} + } + + // Create output directory + swaggerDir := filepath.Join(tmpDir, "swagger") + if err := os.MkdirAll(swaggerDir, 0755); err != nil { + return "", errors.Wrap(err, "failed to create swagger directory") + } + + // Generate filename and content based on original format + var filename string + var content []byte + + if isYAML { + filename = fmt.Sprintf("swagger-spec-%d.yaml", time.Now().Unix()) + content, err = yaml.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified YAML spec") + } + } else { + filename = fmt.Sprintf("swagger-spec-%d.json", time.Now().Unix()) + content, err = json.Marshal(spec) + if err != nil { + return "", errors.Wrap(err, "failed to marshal modified JSON spec") + } + } + + filePath := filepath.Join(swaggerDir, filename) + + // Write file + file, err := os.Create(filePath) + if err != nil { + return "", errors.Wrap(err, "failed to create file") + } + defer file.Close() + + if _, err := file.Write(content); err != nil { + os.Remove(filePath) + return "", errors.Wrap(err, "failed to write file") + } + + return filePath, nil +} + +// SupportedExtensions returns the list of supported file extensions for Swagger +func (d *SwaggerDownloader) SupportedExtensions() []string { + return []string{".json", ".yaml", ".yml"} +} diff --git a/pkg/input/formats/swagger/downloader_test.go b/pkg/input/formats/swagger/downloader_test.go new file mode 100644 index 0000000000..abc45dfbb1 --- /dev/null +++ b/pkg/input/formats/swagger/downloader_test.go @@ -0,0 +1,306 @@ +package swagger + +import ( + "encoding/json" + "net/http" + "net/http/httptest" + "os" + "strings" + "testing" + "time" + + "gopkg.in/yaml.v3" +) + +func TestSwaggerDownloader_SupportedExtensions(t *testing.T) { + downloader := &SwaggerDownloader{} + extensions := downloader.SupportedExtensions() + + expected := []string{".json", ".yaml", ".yml"} + if len(extensions) != len(expected) { + t.Errorf("Expected %d extensions, got %d", len(expected), len(extensions)) + } + + for i, ext := range extensions { + if ext != expected[i] { + t.Errorf("Expected extension %s, got %s", expected[i], ext) + } + } +} + +func TestSwaggerDownloader_Download_JSON_Success(t *testing.T) { + // Create a mock Swagger spec (JSON) + mockSpec := map[string]interface{}{ + "swagger": "2.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "paths": map[string]interface{}{ + "/test": map[string]interface{}{ + "get": map[string]interface{}{ + "summary": "Test endpoint", + }, + }, + }, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(mockSpec) + })) + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + // Test download + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + // Verify host field was added + _, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was not added to the spec") + } +} + +func TestSwaggerDownloader_Download_YAML_Success(t *testing.T) { + // Create a mock Swagger spec (YAML) + mockSpecYAML := ` +swagger: "2.0" +info: + title: "Test API" + version: "1.0.0" +paths: + /test: + get: + summary: "Test endpoint" +` + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/yaml") + w.Write([]byte(mockSpecYAML)) + })) + defer server.Close() + + // Create temp directory + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + // Test download + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.yaml", tmpDir) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify file exists + if !fileExists(filePath) { + t.Errorf("Downloaded file does not exist: %s", filePath) + } + + // Verify file content + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := yaml.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded YAML: %v", err) + } + + // Verify host field was added + _, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was not added to the spec") + } +} + +func TestSwaggerDownloader_Download_UnsupportedExtension(t *testing.T) { + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + _, err = downloader.Download("http://example.com/spec.xml", tmpDir) + if err == nil { + t.Error("Expected error for unsupported extension, but got none") + } + + if !strings.Contains(err.Error(), "URL does not appear to be a Swagger spec") { + t.Errorf("Unexpected error message: %v", err) + } +} + +func TestSwaggerDownloader_Download_HTTPError(t *testing.T) { + // Create mock server that returns 404 + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusNotFound) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + if err == nil { + t.Error("Expected error for HTTP 404, but got none") + } +} + +func TestSwaggerDownloader_Download_InvalidJSON(t *testing.T) { + // Create mock server that returns invalid JSON + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.Write([]byte("invalid json")) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + if err == nil { + t.Error("Expected error for invalid JSON, but got none") + } +} + +func TestSwaggerDownloader_Download_InvalidYAML(t *testing.T) { + // Create mock server that returns invalid YAML + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/yaml") + w.Write([]byte("invalid: yaml: content: [")) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.yaml", tmpDir) + if err == nil { + t.Error("Expected error for invalid YAML, but got none") + } +} + +func TestSwaggerDownloader_Download_Timeout(t *testing.T) { + // Create mock server with delay + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + time.Sleep(35 * time.Second) // Longer than 30 second timeout + json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + if err == nil { + t.Error("Expected timeout error, but got none") + } +} + +func TestSwaggerDownloader_Download_WithExistingHost(t *testing.T) { + // Create a mock Swagger spec with existing host + mockSpec := map[string]interface{}{ + "swagger": "2.0", + "info": map[string]interface{}{ + "title": "Test API", + "version": "1.0.0", + }, + "host": "existing-host.com", + "paths": map[string]interface{}{}, + } + + // Create mock server + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + json.NewEncoder(w).Encode(mockSpec) + })) + defer server.Close() + + tmpDir, err := os.MkdirTemp("", "swagger_test") + if err != nil { + t.Fatalf("Failed to create temp dir: %v", err) + } + defer os.RemoveAll(tmpDir) + + downloader := &SwaggerDownloader{} + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir) + if err != nil { + t.Fatalf("Download failed: %v", err) + } + + // Verify existing host is preserved + content, err := os.ReadFile(filePath) + if err != nil { + t.Fatalf("Failed to read downloaded file: %v", err) + } + + var downloadedSpec map[string]interface{} + if err := json.Unmarshal(content, &downloadedSpec); err != nil { + t.Fatalf("Failed to parse downloaded JSON: %v", err) + } + + host, exists := downloadedSpec["host"] + if !exists { + t.Error("Host field was removed from the spec") + } + + if hostStr, ok := host.(string); !ok || hostStr != "existing-host.com" { + t.Errorf("Expected host 'existing-host.com', got '%v'", host) + } +} + +// Helper function to check if file exists +func fileExists(filename string) bool { + _, err := os.Stat(filename) + return !os.IsNotExist(err) +} diff --git a/pkg/input/provider/interface.go b/pkg/input/provider/interface.go index 9e1d09ab25..881f59aa33 100644 --- a/pkg/input/provider/interface.go +++ b/pkg/input/provider/interface.go @@ -7,6 +7,8 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats/openapi" + "github.com/projectdiscovery/nuclei/v3/pkg/input/formats/swagger" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/http" "github.com/projectdiscovery/nuclei/v3/pkg/input/provider/list" "github.com/projectdiscovery/nuclei/v3/pkg/input/types" @@ -74,6 +76,8 @@ type InputProvider interface { type InputOptions struct { // Options for global config Options *configTypes.Options + // TempDir is the temporary directory for storing files + TempDir string // NotFoundCallback is the callback to call when input is not found // only supported in list input provider NotFoundCallback func(template string) bool @@ -107,20 +111,49 @@ func NewInputProvider(opts InputOptions) (InputProvider, error) { Options: opts.Options, NotFoundCallback: opts.NotFoundCallback, }) - } else { - // use HttpInputProvider - return http.NewHttpInputProvider(&http.HttpMultiFormatOptions{ - InputFile: opts.Options.TargetsFilePath, - InputMode: opts.Options.InputFileMode, - Options: formats.InputFormatOptions{ - Variables: generators.MergeMaps(extraVars, opts.Options.Vars.AsMap()), - SkipFormatValidation: opts.Options.SkipFormatValidation, - RequiredOnly: opts.Options.FormatUseRequiredOnly, - VarsTextTemplating: opts.Options.VarsTextTemplating, - VarsFilePaths: opts.Options.VarsFilePaths, - }, - }) + } else if len(opts.Options.Targets) > 0 && + (strings.EqualFold(opts.Options.InputFileMode, "openapi") || strings.EqualFold(opts.Options.InputFileMode, "swagger")) { + + if len(opts.Options.Targets) > 1 { + return nil, fmt.Errorf("only one target URL is supported in %s input mode", opts.Options.InputFileMode) + } + + target := opts.Options.Targets[0] + if strings.HasPrefix(target, "http://") || strings.HasPrefix(target, "https://") { + var downloader formats.SpecDownloader + var tempFile string + var err error + + switch strings.ToLower(opts.Options.InputFileMode) { + case "openapi": + downloader = openapi.NewDownloader() + tempFile, err = downloader.Download(target, opts.TempDir) + case "swagger": + downloader = swagger.NewDownloader() + tempFile, err = downloader.Download(target, opts.TempDir) + default: + return nil, fmt.Errorf("unsupported input mode: %s", opts.Options.InputFileMode) + } + + if err != nil { + return nil, fmt.Errorf("failed to download %s spec from url %s: %w", opts.Options.InputFileMode, target, err) + } + + opts.Options.TargetsFilePath = tempFile + } } + + return http.NewHttpInputProvider(&http.HttpMultiFormatOptions{ + InputFile: opts.Options.TargetsFilePath, + InputMode: opts.Options.InputFileMode, + Options: formats.InputFormatOptions{ + Variables: generators.MergeMaps(extraVars, opts.Options.Vars.AsMap()), + SkipFormatValidation: opts.Options.SkipFormatValidation, + RequiredOnly: opts.Options.FormatUseRequiredOnly, + VarsTextTemplating: opts.Options.VarsTextTemplating, + VarsFilePaths: opts.Options.VarsFilePaths, + }, + }) } // SupportedInputFormats returns all supported input formats of nuclei From 1684f4143e8268d57a52ab033bc38ac331b39439 Mon Sep 17 00:00:00 2001 From: tvroi Date: Mon, 20 Oct 2025 18:36:17 +0700 Subject: [PATCH 06/55] fix (openapi/swagger): improve error handling and tmpDir cleanup --- internal/runner/runner.go | 17 ++++- pkg/input/formats/openapi/downloader.go | 38 +++++++---- pkg/input/formats/openapi/downloader_test.go | 58 +++++++++++++--- pkg/input/formats/swagger/downloader.go | 26 ++++++-- pkg/input/formats/swagger/downloader_test.go | 69 ++++++++++++++++---- 5 files changed, 168 insertions(+), 40 deletions(-) diff --git a/internal/runner/runner.go b/internal/runner/runner.go index 1594f1e260..9000bacb04 100644 --- a/internal/runner/runner.go +++ b/internal/runner/runner.go @@ -254,9 +254,20 @@ func New(options *types.Options) (*Runner, error) { os.Exit(0) } - if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { - runner.tmpDir = tmpDir + tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*") + if err != nil { + return nil, errors.Wrap(err, "could not create temporary directory") } + runner.tmpDir = tmpDir + + // Cleanup tmpDir only if initialization fails + // On successful initialization, Close() method will handle cleanup + cleanupOnError := true + defer func() { + if cleanupOnError && runner.tmpDir != "" { + _ = os.RemoveAll(runner.tmpDir) + } + }() // create the input provider and load the inputs inputProvider, err := provider.NewInputProvider(provider.InputOptions{Options: options, TempDir: runner.tmpDir}) @@ -390,6 +401,8 @@ func New(options *types.Options) (*Runner, error) { } runner.rateLimiter = utils.GetRateLimiter(context.Background(), options.RateLimit, options.RateLimitDuration) + // Initialization successful, disable cleanup on error + cleanupOnError = false return runner, nil } diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index 22ca513877..a1d9c80428 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -30,19 +30,26 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { return "", fmt.Errorf("URL does not appear to be an OpenAPI JSON spec") } - client := &http.Client{Timeout: 30 * time.Second} + var httpTimeout = 30 * time.Second + const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB + client := &http.Client{Timeout: httpTimeout} resp, err := client.Get(urlStr) if err != nil { return "", errors.Wrap(err, "failed to download OpenAPI spec") } - defer resp.Body.Close() + + defer func() { + if err := resp.Body.Close(); err != nil { + errors.Wrap(err, "failed to close response body") + } + }() if resp.StatusCode != http.StatusOK { return "", fmt.Errorf("HTTP %d when downloading OpenAPI spec", resp.StatusCode) } - bodyBytes, err := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(io.LimitReader(resp.Body, maxSpecSizeBytes)) if err != nil { return "", errors.Wrap(err, "failed to read response body") } @@ -70,17 +77,17 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { return "", errors.Wrap(err, "failed to parse URL") } host := parsedURL.Host + scheme := parsedURL.Scheme + if scheme == "" { + scheme = "https" + } // Add servers section if missing or empty servers, exists := spec["servers"] if !exists || servers == nil { - spec["servers"] = []map[string]interface{}{ - {"url": "https://" + host}, - } - } else if serversList, ok := servers.([]interface{}); ok && len(serversList) == 0 { - spec["servers"] = []map[string]interface{}{ - {"url": "https://" + host}, - } + spec["servers"] = []map[string]interface{}{{"url": scheme + "://" + host}} + } else if serverList, ok := servers.([]interface{}); ok && len(serverList) == 0 { + spec["servers"] = []map[string]interface{}{{"url": scheme + "://" + host}} } // Marshal back to JSON @@ -92,6 +99,7 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { // Create output directory openapiDir := filepath.Join(tmpDir, "openapi") if err := os.MkdirAll(openapiDir, 0755); err != nil { + return "", errors.Wrap(err, "failed to create openapi directory") } @@ -104,10 +112,16 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { if err != nil { return "", fmt.Errorf("failed to create file: %w", err) } - defer file.Close() + + defer func() { + if err := file.Close(); err != nil { + errors.Wrap(err, "failed to close file") + } + }() if _, err := file.Write(modifiedJSON); err != nil { - os.Remove(filePath) + _ = os.Remove(filePath) + return "", errors.Wrap(err, "failed to write OpenAPI spec to file") } diff --git a/pkg/input/formats/openapi/downloader_test.go b/pkg/input/formats/openapi/downloader_test.go index e5fc7784a4..2add3d4f0d 100644 --- a/pkg/input/formats/openapi/downloader_test.go +++ b/pkg/input/formats/openapi/downloader_test.go @@ -46,7 +46,9 @@ func TestOpenAPIDownloader_Download_Success(t *testing.T) { // Create mock server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(mockSpec) + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() @@ -55,7 +57,12 @@ func TestOpenAPIDownloader_Download_Success(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() // Test download downloader := &OpenAPIDownloader{} @@ -100,7 +107,12 @@ func TestOpenAPIDownloader_Download_NonJSONURL(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &OpenAPIDownloader{} _, err = downloader.Download("http://example.com/spec.yaml", tmpDir) @@ -124,7 +136,12 @@ func TestOpenAPIDownloader_Download_HTTPError(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &OpenAPIDownloader{} _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) @@ -137,7 +154,9 @@ func TestOpenAPIDownloader_Download_InvalidJSON(t *testing.T) { // Create mock server that returns invalid JSON server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - w.Write([]byte("invalid json")) + if _, err := w.Write([]byte("invalid json")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } })) defer server.Close() @@ -145,7 +164,12 @@ func TestOpenAPIDownloader_Download_InvalidJSON(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &OpenAPIDownloader{} _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) @@ -158,7 +182,9 @@ func TestOpenAPIDownloader_Download_Timeout(t *testing.T) { // Create mock server with delay server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { time.Sleep(35 * time.Second) // Longer than 30 second timeout - json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}) + if err := json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() @@ -166,7 +192,12 @@ func TestOpenAPIDownloader_Download_Timeout(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &OpenAPIDownloader{} _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) @@ -194,7 +225,9 @@ func TestOpenAPIDownloader_Download_WithExistingServers(t *testing.T) { // Create mock server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(mockSpec) + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() @@ -202,7 +235,12 @@ func TestOpenAPIDownloader_Download_WithExistingServers(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &OpenAPIDownloader{} filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir) diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index 010287f6fd..f78f77f7da 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -39,19 +39,26 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { return "", fmt.Errorf("URL does not appear to be a Swagger spec (supported: %v)", supportedExts) } - client := &http.Client{Timeout: 30 * time.Second} + var httpTimeout = 30 * time.Second + const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB + client := &http.Client{Timeout: httpTimeout} resp, err := client.Get(urlStr) if err != nil { return "", errors.Wrap(err, "failed to download Swagger spec") } - defer resp.Body.Close() + + defer func() { + if err := resp.Body.Close(); err != nil { + errors.Wrap(err, "failed to close response body") + } + }() if resp.StatusCode != http.StatusOK { return "", fmt.Errorf("HTTP %d when downloading Swagger spec", resp.StatusCode) } - bodyBytes, err := io.ReadAll(resp.Body) + bodyBytes, err := io.ReadAll(io.LimitReader(resp.Body, maxSpecSizeBytes)) if err != nil { return "", errors.Wrap(err, "failed to read response body") } @@ -132,10 +139,19 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { if err != nil { return "", errors.Wrap(err, "failed to create file") } - defer file.Close() + + defer func() { + if err := file.Close(); err != nil { + errors.Wrap(err, "failed to close file") + } + }() if _, err := file.Write(content); err != nil { - os.Remove(filePath) + err := os.Remove(filePath) + if err != nil { + errors.Wrap(err, "failed to remove incomplete file") + } + return "", errors.Wrap(err, "failed to write file") } diff --git a/pkg/input/formats/swagger/downloader_test.go b/pkg/input/formats/swagger/downloader_test.go index abc45dfbb1..7d85a276a6 100644 --- a/pkg/input/formats/swagger/downloader_test.go +++ b/pkg/input/formats/swagger/downloader_test.go @@ -48,7 +48,9 @@ func TestSwaggerDownloader_Download_JSON_Success(t *testing.T) { // Create mock server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(mockSpec) + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() @@ -57,7 +59,12 @@ func TestSwaggerDownloader_Download_JSON_Success(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() // Test download downloader := &SwaggerDownloader{} @@ -107,6 +114,7 @@ paths: w.Header().Set("Content-Type", "application/yaml") w.Write([]byte(mockSpecYAML)) })) + defer server.Close() // Create temp directory @@ -114,7 +122,12 @@ paths: if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() // Test download downloader := &SwaggerDownloader{} @@ -151,7 +164,12 @@ func TestSwaggerDownloader_Download_UnsupportedExtension(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} _, err = downloader.Download("http://example.com/spec.xml", tmpDir) @@ -175,7 +193,12 @@ func TestSwaggerDownloader_Download_HTTPError(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) @@ -188,7 +211,9 @@ func TestSwaggerDownloader_Download_InvalidJSON(t *testing.T) { // Create mock server that returns invalid JSON server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - w.Write([]byte("invalid json")) + if _, err := w.Write([]byte("invalid json")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } })) defer server.Close() @@ -196,7 +221,12 @@ func TestSwaggerDownloader_Download_InvalidJSON(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) @@ -217,7 +247,12 @@ func TestSwaggerDownloader_Download_InvalidYAML(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} _, err = downloader.Download(server.URL+"/swagger.yaml", tmpDir) @@ -230,7 +265,9 @@ func TestSwaggerDownloader_Download_Timeout(t *testing.T) { // Create mock server with delay server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { time.Sleep(35 * time.Second) // Longer than 30 second timeout - json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}) + if err := json.NewEncoder(w).Encode(map[string]interface{}{"test": "data"}); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() @@ -238,7 +275,12 @@ func TestSwaggerDownloader_Download_Timeout(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) @@ -270,7 +312,12 @@ func TestSwaggerDownloader_Download_WithExistingHost(t *testing.T) { if err != nil { t.Fatalf("Failed to create temp dir: %v", err) } - defer os.RemoveAll(tmpDir) + + defer func() { + if err := os.RemoveAll(tmpDir); err != nil { + t.Fatalf("Failed to remove temp dir: %v", err) + } + }() downloader := &SwaggerDownloader{} filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir) From f0429aa4b732e75078796dc9aae81ebf23f4e9f0 Mon Sep 17 00:00:00 2001 From: tvroi Date: Mon, 20 Oct 2025 18:49:06 +0700 Subject: [PATCH 07/55] fix(openapi/swagger): err shadowing on write failure --- pkg/input/formats/openapi/downloader.go | 5 ++--- pkg/input/formats/swagger/downloader.go | 10 +++------- 2 files changed, 5 insertions(+), 10 deletions(-) diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index a1d9c80428..72c7edb4db 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -119,10 +119,9 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { } }() - if _, err := file.Write(modifiedJSON); err != nil { + if _, writeErr := file.Write(modifiedJSON); writeErr != nil { _ = os.Remove(filePath) - - return "", errors.Wrap(err, "failed to write OpenAPI spec to file") + return "", errors.Wrap(writeErr, "failed to write OpenAPI spec to file") } return filePath, nil diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index f78f77f7da..de30079bd8 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -146,13 +146,9 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { } }() - if _, err := file.Write(content); err != nil { - err := os.Remove(filePath) - if err != nil { - errors.Wrap(err, "failed to remove incomplete file") - } - - return "", errors.Wrap(err, "failed to write file") + if _, writeErr := file.Write(content); writeErr != nil { + _ = os.Remove(filePath) + return "", errors.Wrap(writeErr, "failed to write file") } return filePath, nil From 89cfb75bb67ea44782ef039605d42b44b4062728 Mon Sep 17 00:00:00 2001 From: tvroi Date: Mon, 20 Oct 2025 18:56:47 +0700 Subject: [PATCH 08/55] fix(openapi/swagger): remove discarded error in defer --- pkg/input/formats/openapi/downloader.go | 9 ++------- pkg/input/formats/swagger/downloader.go | 8 ++------ 2 files changed, 4 insertions(+), 13 deletions(-) diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index 72c7edb4db..3c50cc9fc4 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -40,9 +40,7 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { } defer func() { - if err := resp.Body.Close(); err != nil { - errors.Wrap(err, "failed to close response body") - } + _ = resp.Body.Close() }() if resp.StatusCode != http.StatusOK { @@ -99,7 +97,6 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { // Create output directory openapiDir := filepath.Join(tmpDir, "openapi") if err := os.MkdirAll(openapiDir, 0755); err != nil { - return "", errors.Wrap(err, "failed to create openapi directory") } @@ -114,9 +111,7 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { } defer func() { - if err := file.Close(); err != nil { - errors.Wrap(err, "failed to close file") - } + _ = file.Close() }() if _, writeErr := file.Write(modifiedJSON); writeErr != nil { diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index de30079bd8..3a770e307c 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -49,9 +49,7 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { } defer func() { - if err := resp.Body.Close(); err != nil { - errors.Wrap(err, "failed to close response body") - } + _ = resp.Body.Close() }() if resp.StatusCode != http.StatusOK { @@ -141,9 +139,7 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { } defer func() { - if err := file.Close(); err != nil { - errors.Wrap(err, "failed to close file") - } + _ = file.Close() }() if _, writeErr := file.Write(content); writeErr != nil { From f57bd8c8eea4ddba2d21e4f439ca0c187201b46a Mon Sep 17 00:00:00 2001 From: tvroi Date: Tue, 21 Oct 2025 20:16:05 +0700 Subject: [PATCH 09/55] fix(openapi/swagger): linter and url validation --- pkg/input/formats/openapi/downloader.go | 2 +- pkg/input/formats/swagger/downloader.go | 11 ++++++----- pkg/input/formats/swagger/downloader_test.go | 12 +++++++++--- 3 files changed, 16 insertions(+), 9 deletions(-) diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index 3c50cc9fc4..1089642a94 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -26,7 +26,7 @@ func NewDownloader() formats.SpecDownloader { // This function downloads an OpenAPI 3.0 spec from the given URL and saves it to tmpDir func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { // Validate URL format, OpenAPI 3.0 specs are typically JSON - if !strings.HasSuffix(urlStr, ".json") && !strings.Contains(urlStr, "openapi") { + if !strings.HasSuffix(urlStr, ".json") { return "", fmt.Errorf("URL does not appear to be an OpenAPI JSON spec") } diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index 3a770e307c..10c3ba25a8 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -35,7 +35,7 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { break } } - if !isSupported && !strings.Contains(urlStr, "swagger") { + if !isSupported { return "", fmt.Errorf("URL does not appear to be a Swagger spec (supported: %v)", supportedExts) } @@ -90,7 +90,12 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { if err != nil { return "", errors.Wrap(err, "failed to parse URL") } + host := parsedURL.Host + scheme := parsedURL.Scheme + if scheme == "" { + scheme = "https" + } // Add host if missing if _, exists := spec["host"]; !exists { @@ -99,10 +104,6 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { // Add schemes if missing if _, exists := spec["schemes"]; !exists { - scheme := parsedURL.Scheme - if scheme == "" { - scheme = "https" - } spec["schemes"] = []string{scheme} } diff --git a/pkg/input/formats/swagger/downloader_test.go b/pkg/input/formats/swagger/downloader_test.go index 7d85a276a6..41d9958d23 100644 --- a/pkg/input/formats/swagger/downloader_test.go +++ b/pkg/input/formats/swagger/downloader_test.go @@ -112,7 +112,9 @@ paths: // Create mock server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/yaml") - w.Write([]byte(mockSpecYAML)) + if _, err := w.Write([]byte(mockSpecYAML)); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } })) defer server.Close() @@ -239,7 +241,9 @@ func TestSwaggerDownloader_Download_InvalidYAML(t *testing.T) { // Create mock server that returns invalid YAML server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/yaml") - w.Write([]byte("invalid: yaml: content: [")) + if _, err := w.Write([]byte("invalid: yaml: content: [")); err != nil { + http.Error(w, "failed to write response", http.StatusInternalServerError) + } })) defer server.Close() @@ -304,7 +308,9 @@ func TestSwaggerDownloader_Download_WithExistingHost(t *testing.T) { // Create mock server server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { w.Header().Set("Content-Type", "application/json") - json.NewEncoder(w).Encode(mockSpec) + if err := json.NewEncoder(w).Encode(mockSpec); err != nil { + http.Error(w, "failed to encode response", http.StatusInternalServerError) + } })) defer server.Close() From e168f8dbfaf26ef52ab8a67704e12f992d13ea3e Mon Sep 17 00:00:00 2001 From: tvroi Date: Tue, 21 Oct 2025 20:27:33 +0700 Subject: [PATCH 10/55] fix(openapi/swagger): remove code duplication --- pkg/input/formats/swagger/downloader.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index 10c3ba25a8..21bc3945df 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -27,7 +27,7 @@ func NewDownloader() formats.SpecDownloader { // This function downloads a Swagger 2.0 spec from the given URL and saves it to tmpDir func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { // Swagger can be JSON or YAML - supportedExts := []string{".json", ".yaml", ".yml"} + supportedExts := d.SupportedExtensions() isSupported := false for _, ext := range supportedExts { if strings.HasSuffix(urlStr, ext) { From 6f59472f78a87c9c174130805185369a2c17610a Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Wed, 29 Oct 2025 19:03:59 +0400 Subject: [PATCH 11/55] reusing dialer --- pkg/input/formats/formats.go | 4 +++- pkg/input/formats/openapi/downloader.go | 16 +++++++++++++--- pkg/input/formats/openapi/downloader_test.go | 12 ++++++------ pkg/input/formats/swagger/downloader.go | 16 +++++++++++++--- pkg/input/formats/swagger/downloader_test.go | 16 ++++++++-------- pkg/input/provider/interface.go | 15 +++++++++++++-- 6 files changed, 56 insertions(+), 23 deletions(-) diff --git a/pkg/input/formats/formats.go b/pkg/input/formats/formats.go index 4cbd96a59f..9de4d0d013 100644 --- a/pkg/input/formats/formats.go +++ b/pkg/input/formats/formats.go @@ -7,6 +7,7 @@ import ( "strings" "github.com/projectdiscovery/nuclei/v3/pkg/input/types" + "github.com/projectdiscovery/retryablehttp-go" fileutil "github.com/projectdiscovery/utils/file" "gopkg.in/yaml.v3" ) @@ -51,7 +52,8 @@ type Format interface { type SpecDownloader interface { // Download downloads the spec from the given URL and saves it to tmpDir // Returns the path to the downloaded file - Download(url, tmpDir string) (string, error) + // httpClient is a retryablehttp.Client instance (can be nil for fallback) + Download(url, tmpDir string, httpClient *retryablehttp.Client) (string, error) // SupportedExtensions returns the list of supported file extensions SupportedExtensions() []string } diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index 1089642a94..b7c363aad2 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "io" + "log" "net/http" "net/url" "os" @@ -13,6 +14,7 @@ import ( "github.com/pkg/errors" "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/retryablehttp-go" ) // OpenAPIDownloader implements the SpecDownloader interface for OpenAPI 3.0 specs @@ -24,15 +26,23 @@ func NewDownloader() formats.SpecDownloader { } // This function downloads an OpenAPI 3.0 spec from the given URL and saves it to tmpDir -func (d *OpenAPIDownloader) Download(urlStr, tmpDir string) (string, error) { +func (d *OpenAPIDownloader) Download(urlStr, tmpDir string, httpClient *retryablehttp.Client) (string, error) { // Validate URL format, OpenAPI 3.0 specs are typically JSON if !strings.HasSuffix(urlStr, ".json") { return "", fmt.Errorf("URL does not appear to be an OpenAPI JSON spec") } - var httpTimeout = 30 * time.Second const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB - client := &http.Client{Timeout: httpTimeout} + + // Use provided httpClient or create a fallback + var client *http.Client + if httpClient != nil { + client = httpClient.HTTPClient + } else { + // Fallback to simple client if no httpClient provided + log.Fatal("no httpClient provided") + client = &http.Client{Timeout: 30 * time.Second} + } resp, err := client.Get(urlStr) if err != nil { diff --git a/pkg/input/formats/openapi/downloader_test.go b/pkg/input/formats/openapi/downloader_test.go index 2add3d4f0d..10ee93817a 100644 --- a/pkg/input/formats/openapi/downloader_test.go +++ b/pkg/input/formats/openapi/downloader_test.go @@ -66,7 +66,7 @@ func TestOpenAPIDownloader_Download_Success(t *testing.T) { // Test download downloader := &OpenAPIDownloader{} - filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir) + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir, nil) if err != nil { t.Fatalf("Download failed: %v", err) } @@ -115,7 +115,7 @@ func TestOpenAPIDownloader_Download_NonJSONURL(t *testing.T) { }() downloader := &OpenAPIDownloader{} - _, err = downloader.Download("http://example.com/spec.yaml", tmpDir) + _, err = downloader.Download("http://example.com/spec.yaml", tmpDir, nil) if err == nil { t.Error("Expected error for non-JSON URL, but got none") } @@ -144,7 +144,7 @@ func TestOpenAPIDownloader_Download_HTTPError(t *testing.T) { }() downloader := &OpenAPIDownloader{} - _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) if err == nil { t.Error("Expected error for HTTP 404, but got none") } @@ -172,7 +172,7 @@ func TestOpenAPIDownloader_Download_InvalidJSON(t *testing.T) { }() downloader := &OpenAPIDownloader{} - _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) if err == nil { t.Error("Expected error for invalid JSON, but got none") } @@ -200,7 +200,7 @@ func TestOpenAPIDownloader_Download_Timeout(t *testing.T) { }() downloader := &OpenAPIDownloader{} - _, err = downloader.Download(server.URL+"/openapi.json", tmpDir) + _, err = downloader.Download(server.URL+"/openapi.json", tmpDir, nil) if err == nil { t.Error("Expected timeout error, but got none") } @@ -243,7 +243,7 @@ func TestOpenAPIDownloader_Download_WithExistingServers(t *testing.T) { }() downloader := &OpenAPIDownloader{} - filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir) + filePath, err := downloader.Download(server.URL+"/openapi.json", tmpDir, nil) if err != nil { t.Fatalf("Download failed: %v", err) } diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index 21bc3945df..40f0a2727a 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -4,6 +4,7 @@ import ( "encoding/json" "fmt" "io" + "log" "net/http" "net/url" "os" @@ -13,6 +14,7 @@ import ( "github.com/pkg/errors" "github.com/projectdiscovery/nuclei/v3/pkg/input/formats" + "github.com/projectdiscovery/retryablehttp-go" "gopkg.in/yaml.v3" ) @@ -25,7 +27,7 @@ func NewDownloader() formats.SpecDownloader { } // This function downloads a Swagger 2.0 spec from the given URL and saves it to tmpDir -func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { +func (d *SwaggerDownloader) Download(urlStr, tmpDir string, httpClient *retryablehttp.Client) (string, error) { // Swagger can be JSON or YAML supportedExts := d.SupportedExtensions() isSupported := false @@ -39,9 +41,17 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string) (string, error) { return "", fmt.Errorf("URL does not appear to be a Swagger spec (supported: %v)", supportedExts) } - var httpTimeout = 30 * time.Second const maxSpecSizeBytes = 10 * 1024 * 1024 // 10MB - client := &http.Client{Timeout: httpTimeout} + + // Use provided httpClient or create a fallback + var client *http.Client + if httpClient != nil { + client = httpClient.HTTPClient + } else { + // Fallback to simple client if no httpClient provided + log.Fatal("no httpClient provided") + client = &http.Client{Timeout: 30 * time.Second} + } resp, err := client.Get(urlStr) if err != nil { diff --git a/pkg/input/formats/swagger/downloader_test.go b/pkg/input/formats/swagger/downloader_test.go index 41d9958d23..d55b57395d 100644 --- a/pkg/input/formats/swagger/downloader_test.go +++ b/pkg/input/formats/swagger/downloader_test.go @@ -68,7 +68,7 @@ func TestSwaggerDownloader_Download_JSON_Success(t *testing.T) { // Test download downloader := &SwaggerDownloader{} - filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir) + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir, nil) if err != nil { t.Fatalf("Download failed: %v", err) } @@ -133,7 +133,7 @@ paths: // Test download downloader := &SwaggerDownloader{} - filePath, err := downloader.Download(server.URL+"/swagger.yaml", tmpDir) + filePath, err := downloader.Download(server.URL+"/swagger.yaml", tmpDir, nil) if err != nil { t.Fatalf("Download failed: %v", err) } @@ -174,7 +174,7 @@ func TestSwaggerDownloader_Download_UnsupportedExtension(t *testing.T) { }() downloader := &SwaggerDownloader{} - _, err = downloader.Download("http://example.com/spec.xml", tmpDir) + _, err = downloader.Download("http://example.com/spec.xml", tmpDir, nil) if err == nil { t.Error("Expected error for unsupported extension, but got none") } @@ -203,7 +203,7 @@ func TestSwaggerDownloader_Download_HTTPError(t *testing.T) { }() downloader := &SwaggerDownloader{} - _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) if err == nil { t.Error("Expected error for HTTP 404, but got none") } @@ -231,7 +231,7 @@ func TestSwaggerDownloader_Download_InvalidJSON(t *testing.T) { }() downloader := &SwaggerDownloader{} - _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) if err == nil { t.Error("Expected error for invalid JSON, but got none") } @@ -259,7 +259,7 @@ func TestSwaggerDownloader_Download_InvalidYAML(t *testing.T) { }() downloader := &SwaggerDownloader{} - _, err = downloader.Download(server.URL+"/swagger.yaml", tmpDir) + _, err = downloader.Download(server.URL+"/swagger.yaml", tmpDir, nil) if err == nil { t.Error("Expected error for invalid YAML, but got none") } @@ -287,7 +287,7 @@ func TestSwaggerDownloader_Download_Timeout(t *testing.T) { }() downloader := &SwaggerDownloader{} - _, err = downloader.Download(server.URL+"/swagger.json", tmpDir) + _, err = downloader.Download(server.URL+"/swagger.json", tmpDir, nil) if err == nil { t.Error("Expected timeout error, but got none") } @@ -326,7 +326,7 @@ func TestSwaggerDownloader_Download_WithExistingHost(t *testing.T) { }() downloader := &SwaggerDownloader{} - filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir) + filePath, err := downloader.Download(server.URL+"/swagger.json", tmpDir, nil) if err != nil { t.Fatalf("Download failed: %v", err) } diff --git a/pkg/input/provider/interface.go b/pkg/input/provider/interface.go index 881f59aa33..33cfbee7fd 100644 --- a/pkg/input/provider/interface.go +++ b/pkg/input/provider/interface.go @@ -14,7 +14,9 @@ import ( "github.com/projectdiscovery/nuclei/v3/pkg/input/types" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/contextargs" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/protocolstate" configTypes "github.com/projectdiscovery/nuclei/v3/pkg/types" + "github.com/projectdiscovery/retryablehttp-go" "github.com/projectdiscovery/utils/errkit" stringsutil "github.com/projectdiscovery/utils/strings" ) @@ -124,13 +126,22 @@ func NewInputProvider(opts InputOptions) (InputProvider, error) { var tempFile string var err error + // Get HttpClient from protocolstate if available + var httpClient *retryablehttp.Client + if opts.Options.ExecutionId != "" { + dialers := protocolstate.GetDialersWithId(opts.Options.ExecutionId) + if dialers != nil { + httpClient = dialers.DefaultHTTPClient + } + } + switch strings.ToLower(opts.Options.InputFileMode) { case "openapi": downloader = openapi.NewDownloader() - tempFile, err = downloader.Download(target, opts.TempDir) + tempFile, err = downloader.Download(target, opts.TempDir, httpClient) case "swagger": downloader = swagger.NewDownloader() - tempFile, err = downloader.Download(target, opts.TempDir) + tempFile, err = downloader.Download(target, opts.TempDir, httpClient) default: return nil, fmt.Errorf("unsupported input mode: %s", opts.Options.InputFileMode) } From c814128ee2c924d4d8b62c5aef0a198eb6465d7d Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Wed, 29 Oct 2025 19:54:51 +0400 Subject: [PATCH 12/55] removing debug log --- pkg/input/formats/openapi/downloader.go | 2 -- pkg/input/formats/swagger/downloader.go | 2 -- 2 files changed, 4 deletions(-) diff --git a/pkg/input/formats/openapi/downloader.go b/pkg/input/formats/openapi/downloader.go index b7c363aad2..955fdc50c6 100644 --- a/pkg/input/formats/openapi/downloader.go +++ b/pkg/input/formats/openapi/downloader.go @@ -4,7 +4,6 @@ import ( "encoding/json" "fmt" "io" - "log" "net/http" "net/url" "os" @@ -40,7 +39,6 @@ func (d *OpenAPIDownloader) Download(urlStr, tmpDir string, httpClient *retryabl client = httpClient.HTTPClient } else { // Fallback to simple client if no httpClient provided - log.Fatal("no httpClient provided") client = &http.Client{Timeout: 30 * time.Second} } diff --git a/pkg/input/formats/swagger/downloader.go b/pkg/input/formats/swagger/downloader.go index 40f0a2727a..b6b5a333f8 100644 --- a/pkg/input/formats/swagger/downloader.go +++ b/pkg/input/formats/swagger/downloader.go @@ -4,7 +4,6 @@ import ( "encoding/json" "fmt" "io" - "log" "net/http" "net/url" "os" @@ -49,7 +48,6 @@ func (d *SwaggerDownloader) Download(urlStr, tmpDir string, httpClient *retryabl client = httpClient.HTTPClient } else { // Fallback to simple client if no httpClient provided - log.Fatal("no httpClient provided") client = &http.Client{Timeout: 30 * time.Second} } From 17360cb6e41ed2bbfb8151b7b44123d011dd662d Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Tue, 23 Sep 2025 19:38:12 +0700 Subject: [PATCH 13/55] fix: restore parallel processing in workflow & file proto add missing `go` keyword to anonymous funcs that were intended to run as goroutines but were executing synchronously instead. Fixes #6492 Signed-off-by: Dwi Siswanto --- pkg/core/workflow_execute.go | 2 +- pkg/protocols/file/request.go | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 55d19dd677..735db753ca 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -36,7 +36,7 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b for _, template := range w.Workflows { swg.Add() - func(template *workflows.WorkflowTemplate) { + go func(template *workflows.WorkflowTemplate) { defer swg.Done() if err := e.runWorkflowStep(template, ctx, results, swg, w); err != nil { diff --git a/pkg/protocols/file/request.go b/pkg/protocols/file/request.go index 853cbb602f..b715a4bfc9 100644 --- a/pkg/protocols/file/request.go +++ b/pkg/protocols/file/request.go @@ -59,7 +59,7 @@ func (request *Request) ExecuteWithResults(input *contextargs.Context, metadata, } err = request.getInputPaths(input.MetaInput.Input, func(filePath string) { wg.Add() - func(filePath string) { + go func(filePath string) { defer wg.Done() fi, err := os.Open(filePath) if err != nil { From 984deac200a42900427d2c8848c5abce7bc7b842 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Tue, 23 Sep 2025 23:55:42 +0700 Subject: [PATCH 14/55] test: adds `Test(FileProtocol|Workflows)ConcurrentExecution` tests Signed-off-by: Dwi Siswanto --- pkg/core/workflow_execute_test.go | 112 +++++++++++++++++++++++++++++ pkg/protocols/file/request_test.go | 77 ++++++++++++++++++++ 2 files changed, 189 insertions(+) diff --git a/pkg/core/workflow_execute_test.go b/pkg/core/workflow_execute_test.go index 0c478a5e13..c8fa9d387e 100644 --- a/pkg/core/workflow_execute_test.go +++ b/pkg/core/workflow_execute_test.go @@ -2,7 +2,9 @@ package core import ( "context" + "sync" "testing" + "time" "github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice" "github.com/projectdiscovery/nuclei/v3/pkg/operators" @@ -175,6 +177,116 @@ func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) { require.Equal(t, "", secondInput, "could not get correct second input") } +func TestWorkflowsConcurrentExecution(t *testing.T) { + progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) + + numTemplates := 4 + processingTime := 40 * time.Millisecond + var allExecutionTimes []time.Time + var timesMutex sync.Mutex + var executedInputs []string + var inputsMutex sync.Mutex + + var workflowTemplates []*workflows.WorkflowTemplate + for range numTemplates { + template := &workflows.WorkflowTemplate{ + Executers: []*workflows.ProtocolExecuterPair{{ + Executer: &timedMockExecuter{ + result: true, + processingTime: processingTime, + timesMutex: ×Mutex, + executeHook: func(input *contextargs.MetaInput) { + inputsMutex.Lock() + executedInputs = append(executedInputs, input.Input) + inputsMutex.Unlock() + }, + }, + Options: &protocols.ExecutorOptions{Progress: progressBar}, + }}, + } + workflowTemplates = append(workflowTemplates, template) + } + + workflow := &workflows.Workflow{ + Options: &protocols.ExecutorOptions{ + Options: &types.Options{TemplateThreads: numTemplates}, + }, + Workflows: workflowTemplates, + } + + engine := &Engine{} + input := contextargs.NewWithInput(context.Background(), "https://test.com") + ctx := scan.NewScanContext(context.Background(), input) + + startTime := time.Now() + matched := engine.executeWorkflow(ctx, workflow) + totalTime := time.Since(startTime) + + // Collect execution times from all executers + for _, template := range workflowTemplates { + for _, executer := range template.Executers { + if timedExec, ok := executer.Executer.(*timedMockExecuter); ok { + timesMutex.Lock() + allExecutionTimes = append(allExecutionTimes, timedExec.executionTimes...) + timesMutex.Unlock() + } + } + } + + t.Logf("Workflow execution completed in: %v", totalTime) + t.Logf("Templates executed: %d", len(executedInputs)) + t.Logf("Execution times collected: %d", len(allExecutionTimes)) + + // test 1: verify workflow execution completed successfully + require.True(t, matched, "Workflow execution should have matched") + + // test 2: verify all templates were executed + inputsMutex.Lock() + require.Equal(t, numTemplates, len(executedInputs), "All templates should have been executed") + inputsMutex.Unlock() +} + +// timedMockExecuter extends mockExecuter with timing capabilities for concurrency testing +type timedMockExecuter struct { + result bool + executeHook func(input *contextargs.MetaInput) + outputs []*output.InternalWrappedEvent + processingTime time.Duration + executionTimes []time.Time + timesMutex *sync.Mutex +} + +func (m *timedMockExecuter) Compile() error { return nil } +func (m *timedMockExecuter) Requests() int { return 1 } + +func (m *timedMockExecuter) Execute(ctx *scan.ScanContext) (bool, error) { + // Track execution start time + if m.timesMutex != nil { + m.timesMutex.Lock() + m.executionTimes = append(m.executionTimes, time.Now()) + m.timesMutex.Unlock() + } + + if m.executeHook != nil { + m.executeHook(ctx.Input.MetaInput) + } + + // Simulate processing time + if m.processingTime > 0 { + time.Sleep(m.processingTime) + } + + return m.result, nil +} + +func (m *timedMockExecuter) ExecuteWithResults(ctx *scan.ScanContext) ([]*output.ResultEvent, error) { + _, err := m.Execute(ctx) + for _, output := range m.outputs { + ctx.LogEvent(output) + } + return ctx.GenerateResult(), err +} + type mockExecuter struct { result bool executeHook func(input *contextargs.MetaInput) diff --git a/pkg/protocols/file/request_test.go b/pkg/protocols/file/request_test.go index 118d1885c4..f305eab2c4 100644 --- a/pkg/protocols/file/request_test.go +++ b/pkg/protocols/file/request_test.go @@ -7,7 +7,10 @@ import ( "context" "os" "path/filepath" + "sync" + "sync/atomic" "testing" + "time" "github.com/stretchr/testify/require" @@ -132,3 +135,77 @@ func TestFileExecuteWithResults(t *testing.T) { finalEvent = nil } } + +func TestFileProtocolConcurrentExecution(t *testing.T) { + tempDir, err := os.MkdirTemp("", "nuclei-test-*") + require.NoError(t, err) + defer os.RemoveAll(tempDir) + + numFiles := 5 + for i := range numFiles { + content := "TEST_CONTENT_MATCH_DATA" + filePath := filepath.Join(tempDir, "test_"+string(rune('0'+i))+".txt") + err := os.WriteFile(filePath, []byte(content), permissionutil.TempFilePermission) + require.NoError(t, err) + } + + options := testutils.DefaultOptions + testutils.Init(options) + templateID := "testing-file-concurrent" + executerOpts := testutils.NewMockExecuterOptions(options, &testutils.TemplateInfo{ + ID: templateID, + Info: model.Info{SeverityHolder: severity.Holder{Severity: severity.Low}, Name: "test"}, + }) + + var timesMutex sync.Mutex + var processedFiles int64 + + request := &Request{ + ID: templateID, + MaxSize: "1Gb", + NoRecursive: false, + Extensions: []string{"txt"}, + Archive: false, + Operators: operators.Operators{ + Matchers: []*matchers.Matcher{{ + Name: "test", + Part: "raw", + Type: matchers.MatcherTypeHolder{MatcherType: matchers.WordsMatcher}, + Words: []string{"TEST_CONTENT_MATCH_DATA"}, + }}, + }, + options: executerOpts, + } + + err = request.Compile(executerOpts) + require.NoError(t, err) + + input := contextargs.NewWithInput(context.Background(), tempDir) + var results []*output.InternalWrappedEvent + var resultMutex sync.Mutex + + startTime := time.Now() + err = request.ExecuteWithResults(input, make(output.InternalEvent), make(output.InternalEvent), func(event *output.InternalWrappedEvent) { + atomic.AddInt64(&processedFiles, 1) + resultMutex.Lock() + results = append(results, event) + resultMutex.Unlock() + + // small delay to make timing differences more observable + time.Sleep(10 * time.Millisecond) + }) + totalTime := time.Since(startTime) + require.NoError(t, err) + + finalProcessedFiles := atomic.LoadInt64(&processedFiles) + t.Logf("Total execution time: %v", totalTime) + t.Logf("Files processed: %d", finalProcessedFiles) + t.Logf("Results returned: %d", len(results)) + + // test 1: all files should be processed + require.Equal(t, int64(numFiles), finalProcessedFiles, "Not all files were processed") + + // test 2: verify callback invocation timing shows concurrency + timesMutex.Lock() + defer timesMutex.Unlock() +} From e60f4158ea4f3ef92106b46ad707412e1bf4495f Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Wed, 24 Sep 2025 00:05:15 +0700 Subject: [PATCH 15/55] chore(file): satisfy lints Signed-off-by: Dwi Siswanto --- pkg/protocols/file/request_test.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pkg/protocols/file/request_test.go b/pkg/protocols/file/request_test.go index f305eab2c4..3f20de2ed5 100644 --- a/pkg/protocols/file/request_test.go +++ b/pkg/protocols/file/request_test.go @@ -139,7 +139,10 @@ func TestFileExecuteWithResults(t *testing.T) { func TestFileProtocolConcurrentExecution(t *testing.T) { tempDir, err := os.MkdirTemp("", "nuclei-test-*") require.NoError(t, err) - defer os.RemoveAll(tempDir) + + defer func() { + _ = os.RemoveAll(tempDir) + }() numFiles := 5 for i := range numFiles { From ed1c5752f35bb2e4d4974451e7a629a709f9c5b4 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Wed, 24 Sep 2025 18:09:31 +0700 Subject: [PATCH 16/55] refactor(integration-test): enhance debug mode detects * replace hardcoded `DEBUG` env var check with extensible helper func. * add support for GitHub Actions Runner env var. * accept multiple truthy value variants. Signed-off-by: Dwi Siswanto --- cmd/integration-test/integration-test.go | 29 +++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) diff --git a/cmd/integration-test/integration-test.go b/cmd/integration-test/integration-test.go index 10af587c04..0f16192e37 100644 --- a/cmd/integration-test/integration-test.go +++ b/cmd/integration-test/integration-test.go @@ -6,6 +6,7 @@ import ( "os" "regexp" "runtime" + "slices" "strings" "github.com/kitabisa/go-ci" @@ -24,7 +25,7 @@ type TestCaseInfo struct { } var ( - debug = os.Getenv("DEBUG") == "true" + debug = isDebugMode() customTests = os.Getenv("TESTS") protocol = os.Getenv("PROTO") @@ -59,6 +60,7 @@ var ( "matcher-status": matcherStatusTestcases, "exporters": exportersTestCases, } + // flakyTests are run with a retry count of 3 flakyTests = map[string]bool{ "protocols/http/self-contained-file-input.yaml": true, @@ -89,11 +91,12 @@ func main() { } // start fuzz playground server - defer fuzzplayground.Cleanup() server := fuzzplayground.GetPlaygroundServer() defer func() { + fuzzplayground.Cleanup() _ = server.Close() }() + go func() { if err := server.Start("localhost:8082"); err != nil { if !strings.Contains(err.Error(), "Server closed") { @@ -103,7 +106,6 @@ func main() { }() customTestsList := normalizeSplit(customTests) - failedTestTemplatePaths := runTests(customTestsList) if len(failedTestTemplatePaths) > 0 { @@ -130,6 +132,27 @@ func main() { } } +// isDebugMode checks if debug mode is enabled via any of the supported debug +// environment variables. +func isDebugMode() bool { + debugEnvVars := []string{ + "DEBUG", + "ACTIONS_RUNNER_DEBUG", // GitHub Actions runner debug + // Add more debug environment variables here as needed + } + + truthyValues := []string{"true", "1", "yes", "on", "enabled"} + + for _, envVar := range debugEnvVars { + envValue := strings.ToLower(strings.TrimSpace(os.Getenv(envVar))) + if slices.Contains(truthyValues, envValue) { + return true + } + } + + return false +} + // execute a testcase with retry and consider best of N // intended for flaky tests like interactsh func executeWithRetry(testCase testutils.TestCase, templatePath string, retryCount int) (string, error) { From 133e8b98c083339a216970144686b1858dc47c76 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Thu, 25 Sep 2025 09:23:10 +0700 Subject: [PATCH 17/55] fix(core): race cond in workflow execution caused by shared context callbacks. it was exposed after adding concurrent exec to workflow processing and occurred when multiple goroutines attempted to write to the same `ctx.OnResult` callback field simultaneously, causing data races during workflow template exec. Signed-off-by: Dwi Siswanto --- pkg/core/workflow_execute.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 735db753ca..4ec592d6a7 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -38,8 +38,8 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b go func(template *workflows.WorkflowTemplate) { defer swg.Done() - - if err := e.runWorkflowStep(template, ctx, results, swg, w); err != nil { + newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) + if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) } }(template) From 1093bbc62dd10837a177ab3f844b977f19cde6bb Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Fri, 31 Oct 2025 16:20:30 +0400 Subject: [PATCH 18/55] introducing workflow sequential mode --- pkg/core/workflow_execute.go | 28 +++++++++++++++++++--------- pkg/workflows/workflows.go | 6 ++++++ 2 files changed, 25 insertions(+), 9 deletions(-) diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 4ec592d6a7..2d789ffa45 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -27,24 +27,34 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b // we can know the nesting level only at runtime, so the best we can do here is increase template threads by one unit in case it's equal to 1 to allow // at least one subtemplate to go through, which it's idempotent to one in-flight template as the parent one is in an idle state + templateThreads := w.Options.Options.TemplateThreads if templateThreads == 1 { templateThreads++ } + + runWorkflowStep := func(template *workflows.WorkflowTemplate, ctx *scan.ScanContext, results *atomic.Bool, swg *syncutil.AdaptiveWaitGroup, w *workflows.Workflow) { + if err := e.runWorkflowStep(template, ctx, results, swg, w); err != nil { + gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) + } + } + swg, _ := syncutil.New(syncutil.WithSize(templateThreads)) for _, template := range w.Workflows { - swg.Add() - - go func(template *workflows.WorkflowTemplate) { - defer swg.Done() - newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) - if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { - gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) - } - }(template) + if w.Mode == "sequential" { + runWorkflowStep(template, ctx, results, swg, w) + } else { + swg.Add() + go func(template *workflows.WorkflowTemplate) { + defer swg.Done() + newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) + runWorkflowStep(template, newCtx, results, swg, w) + }(template) + } } swg.Wait() + return results.Load() } diff --git a/pkg/workflows/workflows.go b/pkg/workflows/workflows.go index 5f41329ab3..bb8b308faf 100644 --- a/pkg/workflows/workflows.go +++ b/pkg/workflows/workflows.go @@ -11,6 +11,12 @@ import ( // Workflow is a workflow to execute with chained requests, etc. type Workflow struct { + // description: | + // Mode to execute workflow (default: "concurrent"). + // values: + // - empty|concurrent (default) + // - sequential + Mode string `yaml:"mode,omitempty" json:"mode,omitempty" jsonschema:"title=mode to execute workflow,description=Mode to execute workflow,enum=concurrent,enum=sequential"` // description: | // Workflows is a list of workflows to execute for a template. Workflows []*WorkflowTemplate `yaml:"workflows,omitempty" json:"workflows,omitempty" jsonschema:"title=list of workflows to execute,description=List of workflows to execute for template"` From f544ea706c9789e11d35e67cfff07db8ceb463ff Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Sun, 2 Nov 2025 21:24:40 +0700 Subject: [PATCH 19/55] Revert "introducing workflow sequential mode" This reverts commit 1093bbc62dd10837a177ab3f844b977f19cde6bb. --- pkg/core/workflow_execute.go | 28 +++++++++------------------- pkg/workflows/workflows.go | 6 ------ 2 files changed, 9 insertions(+), 25 deletions(-) diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 2d789ffa45..4ec592d6a7 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -27,34 +27,24 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b // we can know the nesting level only at runtime, so the best we can do here is increase template threads by one unit in case it's equal to 1 to allow // at least one subtemplate to go through, which it's idempotent to one in-flight template as the parent one is in an idle state - templateThreads := w.Options.Options.TemplateThreads if templateThreads == 1 { templateThreads++ } - - runWorkflowStep := func(template *workflows.WorkflowTemplate, ctx *scan.ScanContext, results *atomic.Bool, swg *syncutil.AdaptiveWaitGroup, w *workflows.Workflow) { - if err := e.runWorkflowStep(template, ctx, results, swg, w); err != nil { - gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) - } - } - swg, _ := syncutil.New(syncutil.WithSize(templateThreads)) for _, template := range w.Workflows { - if w.Mode == "sequential" { - runWorkflowStep(template, ctx, results, swg, w) - } else { - swg.Add() - go func(template *workflows.WorkflowTemplate) { - defer swg.Done() - newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) - runWorkflowStep(template, newCtx, results, swg, w) - }(template) - } + swg.Add() + + go func(template *workflows.WorkflowTemplate) { + defer swg.Done() + newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) + if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { + gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) + } + }(template) } swg.Wait() - return results.Load() } diff --git a/pkg/workflows/workflows.go b/pkg/workflows/workflows.go index bb8b308faf..5f41329ab3 100644 --- a/pkg/workflows/workflows.go +++ b/pkg/workflows/workflows.go @@ -11,12 +11,6 @@ import ( // Workflow is a workflow to execute with chained requests, etc. type Workflow struct { - // description: | - // Mode to execute workflow (default: "concurrent"). - // values: - // - empty|concurrent (default) - // - sequential - Mode string `yaml:"mode,omitempty" json:"mode,omitempty" jsonschema:"title=mode to execute workflow,description=Mode to execute workflow,enum=concurrent,enum=sequential"` // description: | // Workflows is a list of workflows to execute for a template. Workflows []*WorkflowTemplate `yaml:"workflows,omitempty" json:"workflows,omitempty" jsonschema:"title=list of workflows to execute,description=List of workflows to execute for template"` From 69c831bad2774fb907f1bf875fd367565baaced2 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Sun, 2 Nov 2025 21:35:28 +0700 Subject: [PATCH 20/55] refactor(core): keep workflow exec seq Signed-off-by: Dwi Siswanto --- pkg/core/workflow_execute.go | 15 +++---- pkg/core/workflow_execute_test.go | 69 ------------------------------- 2 files changed, 6 insertions(+), 78 deletions(-) diff --git a/pkg/core/workflow_execute.go b/pkg/core/workflow_execute.go index 4ec592d6a7..697312aa47 100644 --- a/pkg/core/workflow_execute.go +++ b/pkg/core/workflow_execute.go @@ -34,17 +34,14 @@ func (e *Engine) executeWorkflow(ctx *scan.ScanContext, w *workflows.Workflow) b swg, _ := syncutil.New(syncutil.WithSize(templateThreads)) for _, template := range w.Workflows { - swg.Add() - - go func(template *workflows.WorkflowTemplate) { - defer swg.Done() - newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) - if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { - gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) - } - }(template) + newCtx := scan.NewScanContext(ctx.Context(), ctx.Input.Clone()) + if err := e.runWorkflowStep(template, newCtx, results, swg, w); err != nil { + gologger.Warning().Msgf(workflowStepExecutionError, template.Template, err) + } } + swg.Wait() + return results.Load() } diff --git a/pkg/core/workflow_execute_test.go b/pkg/core/workflow_execute_test.go index c8fa9d387e..19389fd303 100644 --- a/pkg/core/workflow_execute_test.go +++ b/pkg/core/workflow_execute_test.go @@ -177,75 +177,6 @@ func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) { require.Equal(t, "", secondInput, "could not get correct second input") } -func TestWorkflowsConcurrentExecution(t *testing.T) { - progressBar, _ := progress.NewStatsTicker(0, false, false, false, 0) - - numTemplates := 4 - processingTime := 40 * time.Millisecond - var allExecutionTimes []time.Time - var timesMutex sync.Mutex - var executedInputs []string - var inputsMutex sync.Mutex - - var workflowTemplates []*workflows.WorkflowTemplate - for range numTemplates { - template := &workflows.WorkflowTemplate{ - Executers: []*workflows.ProtocolExecuterPair{{ - Executer: &timedMockExecuter{ - result: true, - processingTime: processingTime, - timesMutex: ×Mutex, - executeHook: func(input *contextargs.MetaInput) { - inputsMutex.Lock() - executedInputs = append(executedInputs, input.Input) - inputsMutex.Unlock() - }, - }, - Options: &protocols.ExecutorOptions{Progress: progressBar}, - }}, - } - workflowTemplates = append(workflowTemplates, template) - } - - workflow := &workflows.Workflow{ - Options: &protocols.ExecutorOptions{ - Options: &types.Options{TemplateThreads: numTemplates}, - }, - Workflows: workflowTemplates, - } - - engine := &Engine{} - input := contextargs.NewWithInput(context.Background(), "https://test.com") - ctx := scan.NewScanContext(context.Background(), input) - - startTime := time.Now() - matched := engine.executeWorkflow(ctx, workflow) - totalTime := time.Since(startTime) - - // Collect execution times from all executers - for _, template := range workflowTemplates { - for _, executer := range template.Executers { - if timedExec, ok := executer.Executer.(*timedMockExecuter); ok { - timesMutex.Lock() - allExecutionTimes = append(allExecutionTimes, timedExec.executionTimes...) - timesMutex.Unlock() - } - } - } - - t.Logf("Workflow execution completed in: %v", totalTime) - t.Logf("Templates executed: %d", len(executedInputs)) - t.Logf("Execution times collected: %d", len(allExecutionTimes)) - - // test 1: verify workflow execution completed successfully - require.True(t, matched, "Workflow execution should have matched") - - // test 2: verify all templates were executed - inputsMutex.Lock() - require.Equal(t, numTemplates, len(executedInputs), "All templates should have been executed") - inputsMutex.Unlock() -} - // timedMockExecuter extends mockExecuter with timing capabilities for concurrency testing type timedMockExecuter struct { result bool From fb93cbeeda5b809ca9ad7a8d73f740d11b4a04db Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Sun, 2 Nov 2025 21:44:06 +0700 Subject: [PATCH 21/55] test(core): rm unused tests Signed-off-by: Dwi Siswanto --- pkg/core/workflow_execute_test.go | 43 ------------------------------- 1 file changed, 43 deletions(-) diff --git a/pkg/core/workflow_execute_test.go b/pkg/core/workflow_execute_test.go index 19389fd303..0c478a5e13 100644 --- a/pkg/core/workflow_execute_test.go +++ b/pkg/core/workflow_execute_test.go @@ -2,9 +2,7 @@ package core import ( "context" - "sync" "testing" - "time" "github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice" "github.com/projectdiscovery/nuclei/v3/pkg/operators" @@ -177,47 +175,6 @@ func TestWorkflowsSubtemplatesWithMatcherNoMatch(t *testing.T) { require.Equal(t, "", secondInput, "could not get correct second input") } -// timedMockExecuter extends mockExecuter with timing capabilities for concurrency testing -type timedMockExecuter struct { - result bool - executeHook func(input *contextargs.MetaInput) - outputs []*output.InternalWrappedEvent - processingTime time.Duration - executionTimes []time.Time - timesMutex *sync.Mutex -} - -func (m *timedMockExecuter) Compile() error { return nil } -func (m *timedMockExecuter) Requests() int { return 1 } - -func (m *timedMockExecuter) Execute(ctx *scan.ScanContext) (bool, error) { - // Track execution start time - if m.timesMutex != nil { - m.timesMutex.Lock() - m.executionTimes = append(m.executionTimes, time.Now()) - m.timesMutex.Unlock() - } - - if m.executeHook != nil { - m.executeHook(ctx.Input.MetaInput) - } - - // Simulate processing time - if m.processingTime > 0 { - time.Sleep(m.processingTime) - } - - return m.result, nil -} - -func (m *timedMockExecuter) ExecuteWithResults(ctx *scan.ScanContext) ([]*output.ResultEvent, error) { - _, err := m.Execute(ctx) - for _, output := range m.outputs { - ctx.LogEvent(output) - } - return ctx.GenerateResult(), err -} - type mockExecuter struct { result bool executeHook func(input *contextargs.MetaInput) From 2829fd30bb51a2cf9dae9068230bc9dc3ea8e496 Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Fri, 7 Nov 2025 12:01:46 +0100 Subject: [PATCH 22/55] fix(sdk): configure tmpDir for SDK Closes #6595. --- lib/sdk.go | 7 +++++++ lib/sdk_private.go | 32 +++++++++++++++++++------------- 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/lib/sdk.go b/lib/sdk.go index 3ed252178f..a8530f29af 100644 --- a/lib/sdk.go +++ b/lib/sdk.go @@ -5,6 +5,7 @@ import ( "bytes" "context" "io" + "os" "sync" "github.com/projectdiscovery/gologger" @@ -92,6 +93,9 @@ type NucleiEngine struct { // Logger instance for the engine Logger *gologger.Logger + + // General purpose temporary directory + tmpDir string } // LoadAllTemplates loads all nuclei template based on given options @@ -231,6 +235,9 @@ func (e *NucleiEngine) closeInternal() { if e.httpxClient != nil { _ = e.httpxClient.Close() } + if e.tmpDir != "" { + _ = os.RemoveAll(e.tmpDir) + } } // Close all resources used by nuclei engine diff --git a/lib/sdk_private.go b/lib/sdk_private.go index d80a0fd068..9a2ba9b110 100644 --- a/lib/sdk_private.go +++ b/lib/sdk_private.go @@ -3,6 +3,7 @@ package nuclei import ( "context" "fmt" + "os" "strings" "sync" "time" @@ -170,20 +171,25 @@ func (e *NucleiEngine) init(ctx context.Context) error { e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) } + if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { + e.tmpDir = tmpDir + } + e.executerOpts = &protocols.ExecutorOptions{ - Output: e.customWriter, - Options: e.opts, - Progress: e.customProgress, - Catalog: e.catalog, - IssuesClient: e.rc, - RateLimiter: e.rateLimiter, - Interactsh: e.interactshClient, - Colorizer: aurora.NewAurora(true), - ResumeCfg: types.NewResumeCfg(), - Browser: e.browserInstance, - Parser: e.parser, - InputHelper: input.NewHelper(), - Logger: e.opts.Logger, + Output: e.customWriter, + Options: e.opts, + Progress: e.customProgress, + Catalog: e.catalog, + IssuesClient: e.rc, + RateLimiter: e.rateLimiter, + Interactsh: e.interactshClient, + Colorizer: aurora.NewAurora(true), + ResumeCfg: types.NewResumeCfg(), + Browser: e.browserInstance, + Parser: e.parser, + InputHelper: input.NewHelper(), + TemporaryDirectory: e.tmpDir, + Logger: e.opts.Logger, } if e.opts.ShouldUseHostError() && e.hostErrCache != nil { e.executerOpts.HostErrorsCache = e.hostErrCache From 7b0c6fb782c27c636e066a71fe57920a5838e9a2 Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Fri, 7 Nov 2025 12:14:45 +0100 Subject: [PATCH 23/55] docs(sdk): update comment to more accurately reflect purpose --- lib/sdk.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/sdk.go b/lib/sdk.go index a8530f29af..99523c79a0 100644 --- a/lib/sdk.go +++ b/lib/sdk.go @@ -94,7 +94,7 @@ type NucleiEngine struct { // Logger instance for the engine Logger *gologger.Logger - // General purpose temporary directory + // Temporary directory for SDK-managed template files tmpDir string } From 96bfb2bac1261777f4d5e2ed5a779c4b78b632b8 Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Mon, 10 Nov 2025 07:31:18 +0100 Subject: [PATCH 24/55] feat(sdk): add tmpDir configuration option for SDK users --- lib/config.go | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/lib/config.go b/lib/config.go index cdc56ce063..057fdef060 100644 --- a/lib/config.go +++ b/lib/config.go @@ -559,3 +559,14 @@ func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions { return nil } } + +// WithTemporaryDirectory allows setting a parent directory for SDK-managed temporary files. +// A temporary directory will be created inside the provided directory and cleaned up on engine close. +// If not set, a temporary directory will be automatically created in the system temp location. +// The parent directory will be created if it doesn't exist. +func WithTemporaryDirectory(parentDir string) NucleiSDKOptions { + return func(e *NucleiEngine) error { + e.tmpDir = parentDir + return nil + } +} From efcc8e95c3f5c6fd0185a578d73a30741c18222a Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Mon, 10 Nov 2025 07:54:54 +0100 Subject: [PATCH 25/55] fix(sdk): init default engine tmpDir when unconfigured --- lib/sdk_private.go | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/sdk_private.go b/lib/sdk_private.go index 9a2ba9b110..2c40fbf620 100644 --- a/lib/sdk_private.go +++ b/lib/sdk_private.go @@ -171,8 +171,12 @@ func (e *NucleiEngine) init(ctx context.Context) error { e.catalog = disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) } - if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err == nil { - e.tmpDir = tmpDir + if e.tmpDir == "" { + if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err != nil { + return err + } else { + e.tmpDir = tmpDir + } } e.executerOpts = &protocols.ExecutorOptions{ From 87d62d3a8ff87a7e30fc023ee66f565fef3e8be4 Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Mon, 10 Nov 2025 08:18:14 +0100 Subject: [PATCH 26/55] style(sdk): remove unnecessary else block --- lib/sdk_private.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/sdk_private.go b/lib/sdk_private.go index 2c40fbf620..ba394f024c 100644 --- a/lib/sdk_private.go +++ b/lib/sdk_private.go @@ -172,11 +172,11 @@ func (e *NucleiEngine) init(ctx context.Context) error { } if e.tmpDir == "" { - if tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*"); err != nil { + tmpDir, err := os.MkdirTemp("", "nuclei-tmp-*") + if err != nil { return err - } else { - e.tmpDir = tmpDir } + e.tmpDir = tmpDir } e.executerOpts = &protocols.ExecutorOptions{ From 3eff4146e120d1bfcd2983a0abe7335bf535d6fb Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Mon, 10 Nov 2025 10:41:56 +0100 Subject: [PATCH 27/55] feat(sdk): create parent & tmp dir in WithTemporaryDirectory --- lib/config.go | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/lib/config.go b/lib/config.go index 057fdef060..bc8e09f3e6 100644 --- a/lib/config.go +++ b/lib/config.go @@ -3,6 +3,7 @@ package nuclei import ( "context" "errors" + "os" "time" "github.com/projectdiscovery/goflags" @@ -566,7 +567,14 @@ func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions { // The parent directory will be created if it doesn't exist. func WithTemporaryDirectory(parentDir string) NucleiSDKOptions { return func(e *NucleiEngine) error { - e.tmpDir = parentDir + if err := os.MkdirAll(parentDir, 0755); err != nil { + return err + } + tmpDir, err := os.MkdirTemp(parentDir, "nuclei-tmp-*") + if err != nil { + return err + } + e.tmpDir = tmpDir return nil } } From 954c825fab32dc3bd3e4999a10899e00c8076e40 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Wed, 12 Nov 2025 08:44:26 +0700 Subject: [PATCH 28/55] test(cmd): enable `BenchmarkRunEnumeration/Default` bench Signed-off-by: Dwi Siswanto --- cmd/nuclei/main_benchmark_test.go | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/cmd/nuclei/main_benchmark_test.go b/cmd/nuclei/main_benchmark_test.go index 04e17bf904..7155f357eb 100644 --- a/cmd/nuclei/main_benchmark_test.go +++ b/cmd/nuclei/main_benchmark_test.go @@ -106,10 +106,9 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { } defer nucleiRunner.Close() - b.ResetTimer() b.ReportAllocs() - for i := 0; i < b.N; i++ { + for b.Loop() { if err := nucleiRunner.RunEnumeration(); err != nil { b.Fatalf("%s failed: %s", b.Name(), err) } @@ -118,12 +117,12 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { func BenchmarkRunEnumeration(b *testing.B) { // Default case: run enumeration with default options == all nuclei-templates - // b.Run("Default", func(b *testing.B) { - // options := getDefaultOptions() - // options.Targets = []string{targetURL} + b.Run("Default", func(b *testing.B) { + options := getDefaultOptions() + options.Targets = []string{targetURL} - // runEnumBenchmark(b, options) - // }) + runEnumBenchmark(b, options) + }) // Case: https://github.com/projectdiscovery/nuclei/pull/6258 b.Run("Multiproto", func(b *testing.B) { From 0f5d0d797601843f9ce4b7ef4f19c05a17feb751 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Wed, 12 Nov 2025 08:47:57 +0700 Subject: [PATCH 29/55] test(cmd): collect CPU & heap profiles Signed-off-by: Dwi Siswanto --- cmd/nuclei/main_benchmark_test.go | 61 +++++++++++++++++++++++++++++++ 1 file changed, 61 insertions(+) diff --git a/cmd/nuclei/main_benchmark_test.go b/cmd/nuclei/main_benchmark_test.go index 7155f357eb..e4d315986b 100644 --- a/cmd/nuclei/main_benchmark_test.go +++ b/cmd/nuclei/main_benchmark_test.go @@ -1,9 +1,13 @@ package main_test import ( + "fmt" "net/http" "net/http/httptest" "os" + "runtime" + "runtime/pprof" + "strings" "testing" "time" @@ -48,6 +52,31 @@ func TestMain(m *testing.M) { os.Exit(exitCode) } +// getUniqFilename generates a unique filename by appending .N if file exists +// Similar to wget's behavior: file.cpu.prof, file.cpu.1.prof, file.cpu.2.prof, etc. +func getUniqFilename(basePath string) string { + if _, err := os.Stat(basePath); os.IsNotExist(err) { + return basePath + } + + lastDot := strings.LastIndex(basePath, ".") + var name, ext string + if lastDot != -1 { + name = basePath[:lastDot] + ext = basePath[lastDot:] + } else { + name = basePath + ext = "" + } + + for i := 1; ; i++ { + newPath := fmt.Sprintf("%s.%d%s", name, i, ext) + if _, err := os.Stat(newPath); os.IsNotExist(err) { + return newPath + } + } +} + func getDefaultOptions() *types.Options { return &types.Options{ RemoteTemplateDomainList: []string{"cloud.projectdiscovery.io"}, @@ -106,6 +135,22 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { } defer nucleiRunner.Close() + benchNameSlug := strings.ReplaceAll(b.Name(), "/", "-") + + // Start CPU profiling + cpuProfileBase := fmt.Sprintf("%s.cpu.prof", benchNameSlug) + cpuProfilePath := getUniqFilename(cpuProfileBase) + cpuProfile, err := os.Create(cpuProfilePath) + if err != nil { + b.Fatalf("failed to create CPU profile: %s", err) + } + defer cpuProfile.Close() + + if err := pprof.StartCPUProfile(cpuProfile); err != nil { + b.Fatalf("failed to start CPU profile: %s", err) + } + defer pprof.StopCPUProfile() + b.ReportAllocs() for b.Loop() { @@ -113,6 +158,22 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { b.Fatalf("%s failed: %s", b.Name(), err) } } + + b.StopTimer() + + // Write heap profile + heapProfileBase := fmt.Sprintf("%s.heap.prof", benchNameSlug) + heapProfilePath := getUniqFilename(heapProfileBase) + heapProfile, err := os.Create(heapProfilePath) + if err != nil { + b.Fatalf("failed to create heap profile: %s", err) + } + defer heapProfile.Close() + + runtime.GC() // Force GC before heap profile + if err := pprof.WriteHeapProfile(heapProfile); err != nil { + b.Fatalf("failed to write heap profile: %s", err) + } } func BenchmarkRunEnumeration(b *testing.B) { From 3a4e436eed5da6b46b96dd493b84a7375d18bdce Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Wed, 12 Nov 2025 08:55:40 +0700 Subject: [PATCH 30/55] chore(cmd): satisfy lints Signed-off-by: Dwi Siswanto --- cmd/nuclei/main_benchmark_test.go | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/cmd/nuclei/main_benchmark_test.go b/cmd/nuclei/main_benchmark_test.go index e4d315986b..26d7a1965b 100644 --- a/cmd/nuclei/main_benchmark_test.go +++ b/cmd/nuclei/main_benchmark_test.go @@ -144,7 +144,7 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { if err != nil { b.Fatalf("failed to create CPU profile: %s", err) } - defer cpuProfile.Close() + defer func() { _ = cpuProfile.Close() }() if err := pprof.StartCPUProfile(cpuProfile); err != nil { b.Fatalf("failed to start CPU profile: %s", err) @@ -168,7 +168,7 @@ func runEnumBenchmark(b *testing.B, options *types.Options) { if err != nil { b.Fatalf("failed to create heap profile: %s", err) } - defer heapProfile.Close() + defer func() { _ = heapProfile.Close() }() runtime.GC() // Force GC before heap profile if err := pprof.WriteHeapProfile(heapProfile); err != nil { From 141f34a8ae5bf7b0b8c3cf84b3524949044bce53 Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Sat, 15 Nov 2025 20:04:32 +0400 Subject: [PATCH 31/55] Merge pull request #6610 from projectdiscovery/feat-result-upload allow custom id for upload --- internal/pdcp/writer.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/internal/pdcp/writer.go b/internal/pdcp/writer.go index 778d2ccc98..602942642e 100644 --- a/internal/pdcp/writer.go +++ b/internal/pdcp/writer.go @@ -104,7 +104,7 @@ func NewUploadWriter(ctx context.Context, logger *gologger.Logger, creds *pdcpau // SetScanID sets the scan id for the upload writer func (u *UploadWriter) SetScanID(id string) error { if !xidRegex.MatchString(id) { - return fmt.Errorf("invalid scan id provided") + gologger.Warning().Msgf("invalid asset id provided (unknown xid format): %s", id) } u.scanID = id return nil From 7877341a132cc43b92203dd308c4dec3b04aaf4e Mon Sep 17 00:00:00 2001 From: circleous Date: Mon, 17 Nov 2025 12:50:54 +0700 Subject: [PATCH 32/55] feat: write resume file specified by flag --- cmd/nuclei/main.go | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/cmd/nuclei/main.go b/cmd/nuclei/main.go index e2de5ff03b..bc1eb4af45 100644 --- a/cmd/nuclei/main.go +++ b/cmd/nuclei/main.go @@ -195,7 +195,10 @@ func main() { } // Setup graceful exits - resumeFileName := types.DefaultResumeFilePath() + resumeFileName := options.Resume + if resumeFileName == "" { + resumeFileName = types.DefaultResumeFilePath() + } c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) go func() { @@ -255,7 +258,7 @@ on extensive configurability, massive extensibility and ease of use.`) flagSet.StringSliceVarP(&options.Targets, "target", "u", nil, "target URLs/hosts to scan", goflags.CommaSeparatedStringSliceOptions), flagSet.StringVarP(&options.TargetsFilePath, "list", "l", "", "path to file containing a list of target URLs/hosts to scan (one per line)"), flagSet.StringSliceVarP(&options.ExcludeTargets, "exclude-hosts", "eh", nil, "hosts to exclude to scan from the input list (ip, cidr, hostname)", goflags.FileCommaSeparatedStringSliceOptions), - flagSet.StringVar(&options.Resume, "resume", "", "resume scan using resume.cfg (clustering will be disabled)"), + flagSet.StringVar(&options.Resume, "resume", "", "resume scan from and save to specified file (clustering will be disabled)"), flagSet.BoolVarP(&options.ScanAllIPs, "scan-all-ips", "sa", false, "scan all the IP's associated with dns record"), flagSet.StringSliceVarP(&options.IPVersion, "ip-version", "iv", nil, "IP version to scan of hostname (4,6) - (default 4)", goflags.CommaSeparatedStringSliceOptions), ) From a42dd7a6d95f7d9b35e2d2ba8bd970791b89a258 Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Tue, 18 Nov 2025 17:06:04 +0400 Subject: [PATCH 33/55] updating docs --- README.md | 2 +- README_CN.md | 2 +- README_ES.md | 2 +- README_ID.md | 2 +- README_JP.md | 2 +- README_KR.md | 2 +- README_PT-BR.md | 2 +- cmd/nuclei/main.go | 8 ++++---- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/README.md b/README.md index a422e5dad9..ae4f3eab32 100644 --- a/README.md +++ b/README.md @@ -140,7 +140,7 @@ TARGET: -u, -target string[] target URLs/hosts to scan -l, -list string path to file containing a list of target URLs/hosts to scan (one per line) -eh, -exclude-hosts string[] hosts to exclude to scan from the input list (ip, cidr, hostname) - -resume string resume scan using resume.cfg (clustering will be disabled) + -resume string resume scan from and save to specified file (clustering will be disabled) -sa, -scan-all-ips scan all the IP's associated with dns record -iv, -ip-version string[] IP version to scan of hostname (4,6) - (default 4) diff --git a/README_CN.md b/README_CN.md index 5fb4dd14e9..0396b0ba6e 100644 --- a/README_CN.md +++ b/README_CN.md @@ -119,7 +119,7 @@ Nuclei是一款注重于可配置性、可扩展性和易用性的基于模板 目标: -u, -target string[] 指定扫描的目标URL/主机(多个目标则指定多个-u参数) -l, -list string 指定包含要扫描的目标URL/主机列表的文件路径(一行一个) - -resume string 使用指定的resume.cfg文件恢复扫描(将禁用请求聚类) + -resume string 从指定文件恢复扫描并保存到指定文件(将禁用请求聚类) -sa, -scan-all-ips 扫描由目标解析出来的所有IP(针对域名对应多个IP的情况) -iv, -ip-version string[] 要扫描的主机名的IP版本(4,6)-(默认为4) diff --git a/README_ES.md b/README_ES.md index ec7949efef..4432699dc0 100644 --- a/README_ES.md +++ b/README_ES.md @@ -118,7 +118,7 @@ TARGET: -u, -target string[] URLs/hosts a escanear -l, -list string ruta al archivo que contiene la lista de URLs/hosts a escanear (uno por línea) -eh, -exclude-hosts string[] hosts a excluir para escanear de la lista de entrada (ip, cidr, hostname) - -resume string reanudar el escaneo usando resume.cfg (la clusterización quedará inhabilitada) + -resume string reanudar el escaneo desde y guardar en el archivo especificado (la clusterización quedará inhabilitada) -sa, -scan-all-ips escanear todas las IP asociadas al registro dns -iv, -ip-version string[] versión IP a escanear del nombre de host (4,6) - (por defecto 4) diff --git a/README_ID.md b/README_ID.md index 459352b4d5..db18e1db62 100644 --- a/README_ID.md +++ b/README_ID.md @@ -98,7 +98,7 @@ Flags: TARGET: -u, -target string[] target URLs/hosts to scan -l, -list string path to file containing a list of target URLs/hosts to scan (one per line) - -resume string resume scan using resume.cfg (clustering will be disabled) + -resume string resume scan from and save to specified file (clustering will be disabled) -sa, -scan-all-ips scan all the IP's associated with dns record -iv, -ip-version string[] IP version to scan of hostname (4,6) - (default 4) diff --git a/README_JP.md b/README_JP.md index d80fb4dfcb..33e5c282e7 100644 --- a/README_JP.md +++ b/README_JP.md @@ -113,7 +113,7 @@ Nucleiは、広範な設定可能性、大規模な拡張性、および使い ターゲット: -u, -target string[] スキャンする対象のURL/ホスト -l, -list string スキャンする対象のURL/ホストのリストが含まれているファイルへのパス(1行に1つ) - -resume string resume.cfgを使用してスキャンを再開(クラスタリングは無効になります) + -resume string 指定されたファイルからスキャンを再開し、指定されたファイルに保存(クラスタリングは無効になります) -sa, -scan-all-ips DNSレコードに関連付けられているすべてのIPをスキャン -iv, -ip-version string[] ホスト名のスキャンするIPバージョン(4,6)-(デフォルトは4) diff --git a/README_KR.md b/README_KR.md index d0828564ab..2b137443c7 100644 --- a/README_KR.md +++ b/README_KR.md @@ -96,7 +96,7 @@ Nuclei는 빠르고, 템플릿 기반의 취약점 스캐너로 TARGET: -u, -target string[] 스캔할 대상 URL/호스트 -l, -list string 스캔할 대상 URL/호스트 목록이 있는 파일 경로 (한 줄에 하나씩) - -resume string resume.cfg를 사용하여 스캔 재개 (클러스터링은 비활성화됨) + -resume string 지정된 파일에서 스캔을 재개하고 지정된 파일에 저장 (클러스터링은 비활성화됨) -sa, -scan-all-ips dns 레코드와 관련된 모든 IP 스캔 -iv, -ip-version string[] 스캔할 호스트의 IP 버전 (4,6) - (기본값 4) diff --git a/README_PT-BR.md b/README_PT-BR.md index e63a3d8a1e..64e4f4e307 100644 --- a/README_PT-BR.md +++ b/README_PT-BR.md @@ -118,7 +118,7 @@ TARGET: -u, -target string[] URLs/hosts a serem escaneados -l, -list string caminho do arquivo contendo a lista de URLs/hosts a serem escaneados (um por linha) -eh, -exclude-hosts string[] hosts a serem excluídos do escaneamento na lista de entrada (ip, cidr, hostname) - -resume string retomar o escaneamento usando resume.cfg (a clusterização será desabilitada) + -resume string retomar o escaneamento a partir de e salvar no arquivo especificado (a clusterização será desabilitada) -sa, -scan-all-ips escanear todos os IPs associados ao registro DNS -iv, -ip-version string[] versão de IP a escanear do nome do host (4,6) - (padrão 4) diff --git a/cmd/nuclei/main.go b/cmd/nuclei/main.go index bc1eb4af45..1a51c9f010 100644 --- a/cmd/nuclei/main.go +++ b/cmd/nuclei/main.go @@ -194,10 +194,10 @@ func main() { }) } - // Setup graceful exits - resumeFileName := options.Resume - if resumeFileName == "" { - resumeFileName = types.DefaultResumeFilePath() + // Setup filename for graceful exits + resumeFileName := types.DefaultResumeFilePath() + if options.Resume == "" { + resumeFileName = options.Resume } c := make(chan os.Signal, 1) signal.Notify(c, os.Interrupt) From 6730f5262fe50432e8f1144ba74388c0370501ad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Nov 2025 05:02:21 +0000 Subject: [PATCH 34/55] chore(deps): bump the modules group with 6 updates Bumps the modules group with 6 updates: | Package | From | To | | --- | --- | --- | | [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.59` | `1.1.60` | | [github.com/projectdiscovery/httpx](https://github.com/projectdiscovery/httpx) | `1.7.2-0.20250911192144-fc425deb041a` | `1.7.2` | | [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.27` | `0.1.28` | | [github.com/projectdiscovery/utils](https://github.com/projectdiscovery/utils) | `0.6.1-0.20251030144701-ce5c4b44e1e6` | `0.6.1` | | [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.54` | `0.2.55` | | [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.2.9` | `1.2.10` | Updates `github.com/projectdiscovery/gologger` from 1.1.59 to 1.1.60 - [Release notes](https://github.com/projectdiscovery/gologger/releases) - [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.59...v1.1.60) Updates `github.com/projectdiscovery/httpx` from 1.7.2-0.20250911192144-fc425deb041a to 1.7.2 - [Release notes](https://github.com/projectdiscovery/httpx/releases) - [Changelog](https://github.com/projectdiscovery/httpx/blob/dev/.goreleaser.yml) - [Commits](https://github.com/projectdiscovery/httpx/commits/v1.7.2) Updates `github.com/projectdiscovery/networkpolicy` from 0.1.27 to 0.1.28 - [Release notes](https://github.com/projectdiscovery/networkpolicy/releases) - [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.27...v0.1.28) Updates `github.com/projectdiscovery/utils` from 0.6.1-0.20251030144701-ce5c4b44e1e6 to 0.6.1 - [Release notes](https://github.com/projectdiscovery/utils/releases) - [Changelog](https://github.com/projectdiscovery/utils/blob/main/CHANGELOG.md) - [Commits](https://github.com/projectdiscovery/utils/commits/v0.6.1) Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.54 to 0.2.55 - [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases) - [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.54...v0.2.55) Updates `github.com/projectdiscovery/cdncheck` from 1.2.9 to 1.2.10 - [Release notes](https://github.com/projectdiscovery/cdncheck/releases) - [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml) - [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.2.9...v1.2.10) --- updated-dependencies: - dependency-name: github.com/projectdiscovery/gologger dependency-version: 1.1.60 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/httpx dependency-version: 1.7.2 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/networkpolicy dependency-version: 0.1.28 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/utils dependency-version: 0.6.1 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/wappalyzergo dependency-version: 0.2.55 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/cdncheck dependency-version: 1.2.10 dependency-type: indirect update-type: version-update:semver-patch dependency-group: modules ... Signed-off-by: dependabot[bot] --- go.mod | 14 +++++++------- go.sum | 24 ++++++++++++------------ 2 files changed, 19 insertions(+), 19 deletions(-) diff --git a/go.mod b/go.mod index c3a527351b..7f334ef28f 100644 --- a/go.mod +++ b/go.mod @@ -96,21 +96,21 @@ require ( github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/goflags v0.1.74 - github.com/projectdiscovery/gologger v1.1.59 + github.com/projectdiscovery/gologger v1.1.60 github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 - github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a + github.com/projectdiscovery/httpx v1.7.2 github.com/projectdiscovery/mapcidr v1.1.97 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 - github.com/projectdiscovery/networkpolicy v0.1.27 + github.com/projectdiscovery/networkpolicy v0.1.28 github.com/projectdiscovery/ratelimit v0.0.82 github.com/projectdiscovery/rdap v0.9.0 github.com/projectdiscovery/sarif v0.0.1 github.com/projectdiscovery/tlsx v1.2.1 github.com/projectdiscovery/uncover v1.1.0 github.com/projectdiscovery/useragent v0.0.102 - github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6 - github.com/projectdiscovery/wappalyzergo v0.2.54 + github.com/projectdiscovery/utils v0.6.1 + github.com/projectdiscovery/wappalyzergo v0.2.55 github.com/redis/go-redis/v9 v9.11.0 github.com/seh-msft/burpxml v1.0.1 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 @@ -322,7 +322,7 @@ require ( github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/blackrock v0.0.1 // indirect - github.com/projectdiscovery/cdncheck v1.2.9 // indirect + github.com/projectdiscovery/cdncheck v1.2.10 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect @@ -421,4 +421,4 @@ require ( ) // https://go.dev/ref/mod#go-mod-file-retract -retract v3.2.0 // retract due to broken js protocol issue \ No newline at end of file +retract v3.2.0 // retract due to broken js protocol issue diff --git a/go.sum b/go.sum index a1063e60e0..3897295000 100644 --- a/go.sum +++ b/go.sum @@ -817,8 +817,8 @@ github.com/projectdiscovery/asnmap v1.1.1 h1:ImJiKIaACOT7HPx4Pabb5dksolzaFYsD1kI github.com/projectdiscovery/asnmap v1.1.1/go.mod h1:QT7jt9nQanj+Ucjr9BqGr1Q2veCCKSAVyUzLXfEcQ60= github.com/projectdiscovery/blackrock v0.0.1 h1:lHQqhaaEFjgf5WkuItbpeCZv2DUIE45k0VbGJyft6LQ= github.com/projectdiscovery/blackrock v0.0.1/go.mod h1:ANUtjDfaVrqB453bzToU+YB4cUbvBRpLvEwoWIwlTss= -github.com/projectdiscovery/cdncheck v1.2.9 h1:DsT+uZdGduJSsSrTbFRl1JDcsDHrPKi0v+/KziQnuTw= -github.com/projectdiscovery/cdncheck v1.2.9/go.mod h1:ibL9HoZs2JYTEUBOZo4f+W+XEzQifFLOf4bpgFStgj4= +github.com/projectdiscovery/cdncheck v1.2.10 h1:Ox86LS8RFjq6pYNTP3Eqdawlor/h+bnb7BTEKBpzFyM= +github.com/projectdiscovery/cdncheck v1.2.10/go.mod h1:ibL9HoZs2JYTEUBOZo4f+W+XEzQifFLOf4bpgFStgj4= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= github.com/projectdiscovery/dsl v0.8.4 h1:p3rvzJae9BecOMufdYex3DX9zZeQNaXwVQe4kCEAOtE= @@ -835,16 +835,16 @@ github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG90 github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY= github.com/projectdiscovery/goflags v0.1.74 h1:n85uTRj5qMosm0PFBfsvOL24I7TdWRcWq/1GynhXS7c= github.com/projectdiscovery/goflags v0.1.74/go.mod h1:UMc9/7dFz2oln+10tv6cy+7WZKTHf9UGhaNkF95emh4= -github.com/projectdiscovery/gologger v1.1.59 h1:3XFidZHrUqtvL1CUbw7L1jtwiUmTZxT2CoQ0I/yiNh4= -github.com/projectdiscovery/gologger v1.1.59/go.mod h1:8FJFKmo0N4ITIH3n1Jy4ze6ijr+mA3t78g+VpN8uBRU= +github.com/projectdiscovery/gologger v1.1.60 h1:N2Zyu4WA2RgUeqSAdfhv/CLS4de8lDDc2+IdLKcAd5U= +github.com/projectdiscovery/gologger v1.1.60/go.mod h1:8FJFKmo0N4ITIH3n1Jy4ze6ijr+mA3t78g+VpN8uBRU= github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M= github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 h1:yHh46pJovYbyiaHCV7oIDinFmy+Fyq36H1BowJgb0M0= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81/go.mod h1:9lmGPBDGZVANzCGjQg+V32n8Y3Cgjo/4kT0E88lsVTI= github.com/projectdiscovery/hmap v0.0.95 h1:OO6MCySlK2xMzvJmsYUwdaI7YWv/U437OtsN0Ovw72k= github.com/projectdiscovery/hmap v0.0.95/go.mod h1:KiTRdGd/GzX7uaoFWPrPBxPf4X/uZ9HTQ9dQ8x7x1bo= -github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a h1:5NBp4BegAQuT3QSnbBKt05LH1nOyEeFAXYh1+aE3Nlo= -github.com/projectdiscovery/httpx v1.7.2-0.20250911192144-fc425deb041a/go.mod h1:SQl92RiEuBnv1QQ8aQLC3b1lfgGHttoqUV0cTTvlzxQ= +github.com/projectdiscovery/httpx v1.7.2 h1:AfJ5wjhKOlywX+x+gPO4iPqgFEyoIJwvXsLpQQgs4+c= +github.com/projectdiscovery/httpx v1.7.2/go.mod h1:hm0uTQGUTU1K0AQ1NQVfFrKfiS4u9Ynh8wArdXUXBS4= github.com/projectdiscovery/interactsh v1.2.4 h1:WUSj+fxbcV53J64oIAhbYzCKD1w/IyenyRBhkI5jiqI= github.com/projectdiscovery/interactsh v1.2.4/go.mod h1:E/IVNZ80/WKz8zTwGJWQygxIbhlRmuzZFsZwcGSZTdc= github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb h1:MGtI4oE12ruWv11ZlPXXd7hl/uAaQZrFvrIDYDeVMd8= @@ -855,8 +855,8 @@ github.com/projectdiscovery/mapcidr v1.1.97 h1:7FkxNNVXp+m1rIu5Nv/2SrF9k4+LwP8Qu github.com/projectdiscovery/mapcidr v1.1.97/go.mod h1:9dgTJh1SP02gYZdpzMjm6vtYFkEHQHoTyaVNvaeJ7lA= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 h1:L/e8z8yw1pfT6bg35NiN7yd1XKtJap5Nk6lMwQ0RNi8= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc= -github.com/projectdiscovery/networkpolicy v0.1.27 h1:GsbvDIW3nPstAx8Beke6rtn95PhXnOcoXrnjcohn5Xk= -github.com/projectdiscovery/networkpolicy v0.1.27/go.mod h1:/3XfgnxKNuxaTZc6wZ/Pq6fiKvK8N4OQyLmfcUeDk2E= +github.com/projectdiscovery/networkpolicy v0.1.28 h1:Rwg8iZmM4n+CRWyUClthaSrTqDAW8zBI2HULRO1CF3k= +github.com/projectdiscovery/networkpolicy v0.1.28/go.mod h1:/3XfgnxKNuxaTZc6wZ/Pq6fiKvK8N4OQyLmfcUeDk2E= github.com/projectdiscovery/ratelimit v0.0.82 h1:rtO5SQf5uQFu5zTahTaTcO06OxmG8EIF1qhdFPIyTak= github.com/projectdiscovery/ratelimit v0.0.82/go.mod h1:z076BrLkBb5yS7uhHNoCTf8X/BvFSGRxwQ8EzEL9afM= github.com/projectdiscovery/rawhttp v0.1.90 h1:LOSZ6PUH08tnKmWsIwvwv1Z/4zkiYKYOSZ6n+8RFKtw= @@ -877,10 +877,10 @@ github.com/projectdiscovery/uncover v1.1.0 h1:UDp/qLZn78YZb6VPoOrfyP1vz+ojEx8VrT github.com/projectdiscovery/uncover v1.1.0/go.mod h1:2rXINmMe/lmVAt2jn9CpAOs9An57/JEeLZobY3Z9kUs= github.com/projectdiscovery/useragent v0.0.102 h1:Xfr8a7LQhIu0zeSz5gBxGCdyuqZbhkOMAEQUcEZXyBU= github.com/projectdiscovery/useragent v0.0.102/go.mod h1:DIfLRBKZ6dLhHRnMYkxdg6Jpu0kpE3pJlMG94dsIchY= -github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6 h1:nvszzYNHYnc8X+Dm68zMuYNNesZJp7QWfe8EEyL4azc= -github.com/projectdiscovery/utils v0.6.1-0.20251030144701-ce5c4b44e1e6/go.mod h1:GOjhpPLmpMHcYJKI0vhjvjdczMQf3jWdUgYiBeKkwVk= -github.com/projectdiscovery/wappalyzergo v0.2.54 h1:8w0qUb0dO9N5FN1y4M8pIzDNqLCj0MrITqV/1xp05Lw= -github.com/projectdiscovery/wappalyzergo v0.2.54/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= +github.com/projectdiscovery/utils v0.6.1 h1:9bf3J2G4WJMULGm4Xq7+96+Uj4QpYID/tNnzberR6RE= +github.com/projectdiscovery/utils v0.6.1/go.mod h1:j4Fb6PDir9PcTxLOL9cpSVDPVKtLTZwdVxxMAeG0JjA= +github.com/projectdiscovery/wappalyzergo v0.2.55 h1:Yw8bSrEH/+prUrkb6c4NMwi5m2Sso28nrdy4vFxYn8I= +github.com/projectdiscovery/wappalyzergo v0.2.55/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= github.com/projectdiscovery/yamldoc-go v1.0.6 h1:GCEdIRlQjDux28xTXKszM7n3jlMf152d5nqVpVoetas= github.com/projectdiscovery/yamldoc-go v1.0.6/go.mod h1:R5lWrNzP+7Oyn77NDVPnBsxx2/FyQZBBkIAaSaCQFxw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= From 8b8a3a11b6537397fb135384f633e51e03ef789f Mon Sep 17 00:00:00 2001 From: Niek den Breeje Date: Tue, 18 Nov 2025 17:31:01 +0100 Subject: [PATCH 35/55] refactor(sdk): don't create parentDir when configuring tmpDir --- lib/config.go | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/lib/config.go b/lib/config.go index bc8e09f3e6..f624ef42e7 100644 --- a/lib/config.go +++ b/lib/config.go @@ -564,12 +564,9 @@ func WithOptions(opts *pkgtypes.Options) NucleiSDKOptions { // WithTemporaryDirectory allows setting a parent directory for SDK-managed temporary files. // A temporary directory will be created inside the provided directory and cleaned up on engine close. // If not set, a temporary directory will be automatically created in the system temp location. -// The parent directory will be created if it doesn't exist. +// The parent directory is assumed to exist. func WithTemporaryDirectory(parentDir string) NucleiSDKOptions { return func(e *NucleiEngine) error { - if err := os.MkdirAll(parentDir, 0755); err != nil { - return err - } tmpDir, err := os.MkdirTemp(parentDir, "nuclei-tmp-*") if err != nil { return err From 9b37f1b4417e50b354080eeb2811b513dd248bbb Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Tue, 18 Nov 2025 21:43:28 +0400 Subject: [PATCH 36/55] adding test case --- cmd/integration-test/javascript.go | 44 ++++++++++++++++++++++++++---- cmd/nuclei/ssh.yaml | 31 --------------------- pkg/protocols/javascript/js.go | 9 +++++- 3 files changed, 46 insertions(+), 38 deletions(-) delete mode 100644 cmd/nuclei/ssh.yaml diff --git a/cmd/integration-test/javascript.go b/cmd/integration-test/javascript.go index 6e99b7f844..3fb7e1baa9 100644 --- a/cmd/integration-test/javascript.go +++ b/cmd/integration-test/javascript.go @@ -17,15 +17,17 @@ var jsTestcases = []TestCaseInfo{ {Path: "protocols/javascript/net-https.yaml", TestCase: &javascriptNetHttps{}}, {Path: "protocols/javascript/oracle-auth-test.yaml", TestCase: &javascriptOracleAuthTest{}, DisableOn: func() bool { return osutils.IsWindows() || osutils.IsOSX() }}, {Path: "protocols/javascript/vnc-pass-brute.yaml", TestCase: &javascriptVncPassBrute{}}, + {Path: "protocols/javascript/multi-ports.yaml", TestCase: &javascriptMultiPortsSSH{}}, } var ( - redisResource *dockertest.Resource - sshResource *dockertest.Resource - oracleResource *dockertest.Resource - vncResource *dockertest.Resource - pool *dockertest.Pool - defaultRetry = 3 + redisResource *dockertest.Resource + sshResource *dockertest.Resource + oracleResource *dockertest.Resource + vncResource *dockertest.Resource + multiPortsSShResource *dockertest.Resource + pool *dockertest.Pool + defaultRetry = 3 ) type javascriptNetHttps struct{} @@ -167,6 +169,36 @@ func (j *javascriptVncPassBrute) Execute(filePath string) error { return multierr.Combine(errs...) } +type javascriptMultiPortsSSH struct{} + +func (j *javascriptMultiPortsSSH) Execute(filePath string) error { + if sshResource == nil || pool == nil { + // skip test as redis is not running + return nil + } + finalURL := "scanme.sh" + errs := []error{} + for i := 0; i < defaultRetry; i++ { + results := []string{} + var err error + _ = pool.Retry(func() error { + //let ssh server start + time.Sleep(3 * time.Second) + results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug) + return nil + }) + if err != nil { + return err + } + if err := expectResultsCount(results, 1); err == nil { + return nil + } else { + errs = append(errs, err) + } + } + return multierr.Combine(errs...) +} + // purge any given resource if it is not nil func purge(resource *dockertest.Resource) { if resource != nil && pool != nil { diff --git a/cmd/nuclei/ssh.yaml b/cmd/nuclei/ssh.yaml deleted file mode 100644 index afc5115d8b..0000000000 --- a/cmd/nuclei/ssh.yaml +++ /dev/null @@ -1,31 +0,0 @@ -id: ssh-auth-methods - -info: - name: SSH Auth Methods - Detection - author: Ice3man543 - severity: info - description: | - SSH (Secure Shell) authentication modes are methods used to verify the identity of users and ensure secure access to remote systems. Common SSH authentication modes include password-based authentication, which relies on a secret passphrase, and public key authentication, which uses cryptographic keys for a more secure and convenient login process. Additionally, multi-factor authentication (MFA) can be employed to enhance security by requiring users to provide multiple forms of authentication, such as a password and a one-time code. - reference: - - https://nmap.org/nsedoc/scripts/ssh-auth-methods.html - metadata: - max-request: 1 - shodan-query: product:"OpenSSH" - tags: js,detect,ssh,enum,network - -javascript: - - pre-condition: | - isPortOpen(Host,Port); - code: | - var m = require("nuclei/ssh"); - var c = m.SSHClient(); - var response = c.ConnectSSHInfoMode(Host, Port); - Export(response); - args: - Host: "{{Host}}" - Port: "222,22" - - extractors: - - type: json - json: - - '.UserAuth' \ No newline at end of file diff --git a/pkg/protocols/javascript/js.go b/pkg/protocols/javascript/js.go index 5bfd660c91..2d927555fc 100644 --- a/pkg/protocols/javascript/js.go +++ b/pkg/protocols/javascript/js.go @@ -778,7 +778,14 @@ func (request *Request) Type() templateTypes.ProtocolType { func (request *Request) getPorts() []string { for k, v := range request.Args { if strings.EqualFold(k, "Port") { - ports := types.ToStringSlice(strings.Split(types.ToString(v), ",")) + portStr := types.ToString(v) + ports := []string{} + for _, p := range strings.Split(portStr, ",") { + trimmed := strings.TrimSpace(p) + if trimmed != "" { + ports = append(ports, trimmed) + } + } return sliceutil.Dedupe(ports) } } From c746a8fdae6e2616d3701bc9e2b8032888f209bb Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Tue, 18 Nov 2025 22:20:17 +0400 Subject: [PATCH 37/55] lint --- cmd/integration-test/javascript.go | 13 ++++++------- lib/example_test.go | 1 - pkg/operators/common/dsl/dsl.go | 2 +- pkg/scan/events/scan_noop.go | 1 - pkg/scan/events/stats_build.go | 1 - pkg/utils/json/json.go | 2 -- pkg/utils/json/json_fallback.go | 1 - 7 files changed, 7 insertions(+), 14 deletions(-) diff --git a/cmd/integration-test/javascript.go b/cmd/integration-test/javascript.go index 3fb7e1baa9..b7a8ad6b85 100644 --- a/cmd/integration-test/javascript.go +++ b/cmd/integration-test/javascript.go @@ -21,13 +21,12 @@ var jsTestcases = []TestCaseInfo{ } var ( - redisResource *dockertest.Resource - sshResource *dockertest.Resource - oracleResource *dockertest.Resource - vncResource *dockertest.Resource - multiPortsSShResource *dockertest.Resource - pool *dockertest.Pool - defaultRetry = 3 + redisResource *dockertest.Resource + sshResource *dockertest.Resource + oracleResource *dockertest.Resource + vncResource *dockertest.Resource + pool *dockertest.Pool + defaultRetry = 3 ) type javascriptNetHttps struct{} diff --git a/lib/example_test.go b/lib/example_test.go index 1d82073e1f..81c7fc106e 100644 --- a/lib/example_test.go +++ b/lib/example_test.go @@ -1,5 +1,4 @@ //go:build !race -// +build !race package nuclei_test diff --git a/pkg/operators/common/dsl/dsl.go b/pkg/operators/common/dsl/dsl.go index a424790d55..10c1782ec7 100644 --- a/pkg/operators/common/dsl/dsl.go +++ b/pkg/operators/common/dsl/dsl.go @@ -115,7 +115,7 @@ func init() { })) dsl.PrintDebugCallback = func(args ...interface{}) error { - gologger.Debug().Msgf("print_debug value: %s", fmt.Sprint(args)) + gologger.Debug().Msgf("print_debug value: %s", fmt.Sprint(args)) //nolint return nil } diff --git a/pkg/scan/events/scan_noop.go b/pkg/scan/events/scan_noop.go index 055baed4ee..2815bf183e 100644 --- a/pkg/scan/events/scan_noop.go +++ b/pkg/scan/events/scan_noop.go @@ -1,5 +1,4 @@ //go:build !stats -// +build !stats package events diff --git a/pkg/scan/events/stats_build.go b/pkg/scan/events/stats_build.go index 7d03e42e52..43e67865ee 100644 --- a/pkg/scan/events/stats_build.go +++ b/pkg/scan/events/stats_build.go @@ -1,5 +1,4 @@ //go:build stats -// +build stats package events diff --git a/pkg/utils/json/json.go b/pkg/utils/json/json.go index 0546406959..515ca3e9ed 100644 --- a/pkg/utils/json/json.go +++ b/pkg/utils/json/json.go @@ -1,6 +1,4 @@ //go:build (linux || darwin || windows) && (amd64 || arm64) -// +build linux darwin windows -// +build amd64 arm64 package json diff --git a/pkg/utils/json/json_fallback.go b/pkg/utils/json/json_fallback.go index 3c87dd9220..2e8bf98e01 100644 --- a/pkg/utils/json/json_fallback.go +++ b/pkg/utils/json/json_fallback.go @@ -1,5 +1,4 @@ //go:build !(linux || darwin || windows) || !(amd64 || arm64) -// +build !linux,!darwin,!windows !amd64,!arm64 package json From 232de93297b11bf4490b6a70c3e4e95e47104f70 Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Tue, 18 Nov 2025 22:21:13 +0400 Subject: [PATCH 38/55] removing unused check --- cmd/integration-test/javascript.go | 4 ---- 1 file changed, 4 deletions(-) diff --git a/cmd/integration-test/javascript.go b/cmd/integration-test/javascript.go index b7a8ad6b85..4b96bd8d9c 100644 --- a/cmd/integration-test/javascript.go +++ b/cmd/integration-test/javascript.go @@ -171,10 +171,6 @@ func (j *javascriptVncPassBrute) Execute(filePath string) error { type javascriptMultiPortsSSH struct{} func (j *javascriptMultiPortsSSH) Execute(filePath string) error { - if sshResource == nil || pool == nil { - // skip test as redis is not running - return nil - } finalURL := "scanme.sh" errs := []error{} for i := 0; i < defaultRetry; i++ { From 761c7c0c296c1b61ea78764ba696a7ca6abfb0fc Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Tue, 18 Nov 2025 22:23:35 +0400 Subject: [PATCH 39/55] adding multiport template --- .../protocols/javascript/multi-ports.yaml | 28 +++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 integration_tests/protocols/javascript/multi-ports.yaml diff --git a/integration_tests/protocols/javascript/multi-ports.yaml b/integration_tests/protocols/javascript/multi-ports.yaml new file mode 100644 index 0000000000..aa1c5fe2d9 --- /dev/null +++ b/integration_tests/protocols/javascript/multi-ports.yaml @@ -0,0 +1,28 @@ +id: multi-ports + +info: + name: Multi Ports - Detection + author: pdteam + severity: info + description: | + Multi Ports template for testing + metadata: + max-request: 1 + tags: js,detect,multi-ports,enum,network + +javascript: + - pre-condition: | + isPortOpen(Host,Port); + code: | + var m = require("nuclei/ssh"); + var c = m.SSHClient(); + var response = c.ConnectSSHInfoMode(Host, Port); + Export(response); + args: + Host: "{{Host}}" + Port: "2222,22" # Port 22 should match + + extractors: + - type: json + json: + - '.UserAuth' \ No newline at end of file From 3ea8dc29c38ddaa2fed732089f122aea27040204 Mon Sep 17 00:00:00 2001 From: Mzack9999 Date: Wed, 19 Nov 2025 13:28:32 +0400 Subject: [PATCH 40/55] refactor test --- cmd/integration-test/javascript.go | 25 +++++-------------------- 1 file changed, 5 insertions(+), 20 deletions(-) diff --git a/cmd/integration-test/javascript.go b/cmd/integration-test/javascript.go index 4b96bd8d9c..c0b8c19900 100644 --- a/cmd/integration-test/javascript.go +++ b/cmd/integration-test/javascript.go @@ -171,27 +171,12 @@ func (j *javascriptVncPassBrute) Execute(filePath string) error { type javascriptMultiPortsSSH struct{} func (j *javascriptMultiPortsSSH) Execute(filePath string) error { - finalURL := "scanme.sh" - errs := []error{} - for i := 0; i < defaultRetry; i++ { - results := []string{} - var err error - _ = pool.Retry(func() error { - //let ssh server start - time.Sleep(3 * time.Second) - results, err = testutils.RunNucleiTemplateAndGetResults(filePath, finalURL, debug) - return nil - }) - if err != nil { - return err - } - if err := expectResultsCount(results, 1); err == nil { - return nil - } else { - errs = append(errs, err) - } + // use scanme.sh as target to ensure we match on the 2nd default port 22 + results, err := testutils.RunNucleiTemplateAndGetResults(filePath, "scanme.sh", debug) + if err != nil { + return err } - return multierr.Combine(errs...) + return expectResultsCount(results, 1) } // purge any given resource if it is not nil From aad0e51fefe8e83eb327e38d8a2006b8a430976b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 20 Nov 2025 02:39:36 +0000 Subject: [PATCH 41/55] chore(deps): bump golang.org/x/crypto Bumps the go_modules group with 1 update in the / directory: [golang.org/x/crypto](https://github.com/golang/crypto). Updates `golang.org/x/crypto` from 0.43.0 to 0.45.0 - [Commits](https://github.com/golang/crypto/compare/v0.43.0...v0.45.0) --- updated-dependencies: - dependency-name: golang.org/x/crypto dependency-version: 0.45.0 dependency-type: indirect dependency-group: go_modules ... Signed-off-by: dependabot[bot] --- go.mod | 16 ++++++++-------- go.sum | 32 ++++++++++++++++---------------- 2 files changed, 24 insertions(+), 24 deletions(-) diff --git a/go.mod b/go.mod index 7f334ef28f..08f76b056e 100644 --- a/go.mod +++ b/go.mod @@ -39,9 +39,9 @@ require ( github.com/valyala/fasttemplate v1.2.2 github.com/weppos/publicsuffix-go v0.50.0 go.uber.org/multierr v1.11.0 - golang.org/x/net v0.46.0 + golang.org/x/net v0.47.0 golang.org/x/oauth2 v0.30.0 - golang.org/x/text v0.30.0 + golang.org/x/text v0.31.0 gopkg.in/yaml.v2 v2.4.0 ) @@ -123,7 +123,7 @@ require ( github.com/zmap/zgrab2 v0.1.8 gitlab.com/gitlab-org/api/client-go v0.130.1 go.mongodb.org/mongo-driver v1.17.4 - golang.org/x/term v0.36.0 + golang.org/x/term v0.37.0 gopkg.in/yaml.v3 v3.0.1 moul.io/http2curl v1.0.0 ) @@ -379,7 +379,7 @@ require ( go.opentelemetry.io/otel/trace v1.38.0 // indirect go4.org v0.0.0-20230225012048-214862532bf5 // indirect golang.org/x/arch v0.3.0 // indirect - golang.org/x/sync v0.17.0 // indirect + golang.org/x/sync v0.18.0 // indirect gopkg.in/djherbis/times.v1 v1.3.0 // indirect mellium.im/sasl v0.3.2 // indirect ) @@ -403,12 +403,12 @@ require ( go.etcd.io/bbolt v1.4.0 // indirect go.uber.org/zap v1.27.0 // indirect goftp.io/server/v2 v2.0.1 // indirect - golang.org/x/crypto v0.43.0 // indirect + golang.org/x/crypto v0.45.0 // indirect golang.org/x/exp v0.0.0-20250911091902-df9299821621 - golang.org/x/mod v0.28.0 // indirect - golang.org/x/sys v0.37.0 // indirect + golang.org/x/mod v0.29.0 // indirect + golang.org/x/sys v0.38.0 // indirect golang.org/x/time v0.14.0 // indirect - golang.org/x/tools v0.37.0 + golang.org/x/tools v0.38.0 google.golang.org/protobuf v1.36.6 // indirect gopkg.in/alecthomas/kingpin.v2 v2.2.6 // indirect gopkg.in/corvus-ch/zbase32.v1 v1.0.0 // indirect diff --git a/go.sum b/go.sum index 3897295000..837246a360 100644 --- a/go.sum +++ b/go.sum @@ -1204,8 +1204,8 @@ golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/crypto v0.31.0/go.mod h1:kDsLvtWBEx7MV9tJOj9bnXsPbxwJQ6csT/x4KIN4Ssk= -golang.org/x/crypto v0.43.0 h1:dduJYIi3A3KOfdGOHX8AVZ/jGiyPa3IbBozJ5kNuE04= -golang.org/x/crypto v0.43.0/go.mod h1:BFbav4mRNlXJL4wNeejLpWxB7wMbc79PdRGhWKncxR0= +golang.org/x/crypto v0.45.0 h1:jMBrvKuj23MTlT0bQEOBcAE0mjg8mK9RXFhRH6nyF3Q= +golang.org/x/crypto v0.45.0/go.mod h1:XTGrrkGJve7CYK7J8PEww4aY7gM3qMCElcJQ8n8JdX4= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -1243,8 +1243,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.15.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= -golang.org/x/mod v0.28.0 h1:gQBtGhjxykdjY9YhZpSlZIsbnaE2+PgjfLWUQTnoZ1U= -golang.org/x/mod v0.28.0/go.mod h1:yfB/L0NOf/kmEbXjzCPOx1iK1fRutOydrCMsqRhEBxI= +golang.org/x/mod v0.29.0 h1:HV8lRxZC4l2cr3Zq1LvtOsi/ThTgWnUk/y64QSs8GwA= +golang.org/x/mod v0.29.0/go.mod h1:NyhrlYXJ2H4eJiRy/WDBO6HMqZQ6q9nk4JzS3NuCK+w= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -1295,8 +1295,8 @@ golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE= golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/net v0.33.0/go.mod h1:HXLR5J+9DxmrqMwG9qjGCxZ+zKXxBru04zlTvWlWuN4= -golang.org/x/net v0.46.0 h1:giFlY12I07fugqwPuWJi68oOnpfqFnJIJzaIIm2JVV4= -golang.org/x/net v0.46.0/go.mod h1:Q9BGdFy1y4nkUwiLvT5qtyhAnEHgnQ/zd8PfU6nc210= +golang.org/x/net v0.47.0 h1:Mx+4dIFzqraBXUugkia1OOvlD6LemFo1ALMHjrXDOhY= +golang.org/x/net v0.47.0/go.mod h1:/jNxtkgq5yWUGYkaZGqo27cfGZ1c5Nen03aYrrKpVRU= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -1325,8 +1325,8 @@ golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y= golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= -golang.org/x/sync v0.17.0 h1:l60nONMj9l5drqw6jlhIELNv9I0A4OFgRsG9k2oT9Ug= -golang.org/x/sync v0.17.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= +golang.org/x/sync v0.18.0 h1:kr88TuHDroi+UVf+0hZnirlk8o8T+4MrK6mr60WkH/I= +golang.org/x/sync v0.18.0/go.mod h1:9KTHXmSnoGruLpwFjVSX0lNNA75CykiMECbovNTZqGI= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -1391,8 +1391,8 @@ golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.28.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= -golang.org/x/sys v0.37.0 h1:fdNQudmxPjkdUTPnLn5mdQv7Zwvbvpaxqs831goi9kQ= -golang.org/x/sys v0.37.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= +golang.org/x/sys v0.38.0 h1:3yZWxaJjBmCWXqhN1qh02AkOnCQ1poK6oF+a7xWL6Gc= +golang.org/x/sys v0.38.0/go.mod h1:OgkHotnGiDImocRcuBABYBEXf8A9a87e/uXjp9XT3ks= golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE= golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= @@ -1406,8 +1406,8 @@ golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/term v0.27.0/go.mod h1:iMsnZpn0cago0GOrHO2+Y7u7JPn5AylBrcoWkElMTSM= -golang.org/x/term v0.36.0 h1:zMPR+aF8gfksFprF/Nc/rd1wRS1EI6nDBGyWAvDzx2Q= -golang.org/x/term v0.36.0/go.mod h1:Qu394IJq6V6dCBRgwqshf3mPF85AqzYEzofzRdZkWss= +golang.org/x/term v0.37.0 h1:8EGAD0qCmHYZg6J17DvsMy9/wJ7/D/4pV/wfnld5lTU= +golang.org/x/term v0.37.0/go.mod h1:5pB4lxRNYYVZuTLmy8oR2BH8dflOR+IbTYFD8fi3254= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -1424,8 +1424,8 @@ golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= -golang.org/x/text v0.30.0 h1:yznKA/E9zq54KzlzBEAWn1NXSQ8DIp/NYMy88xJjl4k= -golang.org/x/text v0.30.0/go.mod h1:yDdHFIX9t+tORqspjENWgzaCVXgk0yYnYuSZ8UzzBVM= +golang.org/x/text v0.31.0 h1:aC8ghyu4JhP8VojJ2lEHBnochRno1sgL6nEi9WGFGMM= +golang.org/x/text v0.31.0/go.mod h1:tKRAlv61yKIjGGHX/4tP1LTbc13YSec1pxVEWXzfoeM= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -1479,8 +1479,8 @@ golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= -golang.org/x/tools v0.37.0 h1:DVSRzp7FwePZW356yEAChSdNcQo6Nsp+fex1SUW09lE= -golang.org/x/tools v0.37.0/go.mod h1:MBN5QPQtLMHVdvsbtarmTNukZDdgwdwlO5qGacAzF0w= +golang.org/x/tools v0.38.0 h1:Hx2Xv8hISq8Lm16jvBZ2VQf+RLmbd7wVUsALibYI/IQ= +golang.org/x/tools v0.38.0/go.mod h1:yEsQ/d/YK8cjh0L6rZlY8tgtlKiBNTL14pGDJPJpYQs= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= From ad1e6f8d75467275de6fab16339003c20a8fcb22 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com> Date: Fri, 21 Nov 2025 12:53:58 +0700 Subject: [PATCH 42/55] feat(variables): check for undefined params for lazy eval (#6618) * feat(variables): check for undefined params for lazy eval Signed-off-by: Dwi Siswanto * test(variables): add TestCheckForLazyEval Signed-off-by: Dwi Siswanto * fix(variables): fail safe on err compile expr Signed-off-by: Dwi Siswanto --------- Signed-off-by: Dwi Siswanto --- pkg/protocols/common/variables/variables.go | 59 +++++++++- .../common/variables/variables_test.go | 106 ++++++++++++++++++ 2 files changed, 162 insertions(+), 3 deletions(-) diff --git a/pkg/protocols/common/variables/variables.go b/pkg/protocols/common/variables/variables.go index 0724c84c5c..fa5cc1dbc6 100644 --- a/pkg/protocols/common/variables/variables.go +++ b/pkg/protocols/common/variables/variables.go @@ -3,10 +3,13 @@ package variables import ( "strings" + "github.com/Knetic/govaluate" "github.com/invopop/jsonschema" + "github.com/projectdiscovery/nuclei/v3/pkg/operators/common/dsl" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/expressions" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/generators" "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/interactsh" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/common/marker" protocolutils "github.com/projectdiscovery/nuclei/v3/pkg/protocols/utils" "github.com/projectdiscovery/nuclei/v3/pkg/types" "github.com/projectdiscovery/nuclei/v3/pkg/utils" @@ -17,7 +20,9 @@ import ( // Variable is a key-value pair of strings that can be used // throughout template. type Variable struct { - LazyEval bool `yaml:"-" json:"-"` // LazyEval is used to evaluate variables lazily if it using any expression or global variables + // LazyEval is used to evaluate variables lazily if it using any expression + // or global variables. + LazyEval bool `yaml:"-" json:"-"` utils.InsertionOrderedStringMap `yaml:"-" json:"-"` } @@ -128,19 +133,67 @@ func evaluateVariableValue(expression string, values, processing map[string]inte // checkForLazyEval checks if the variables have any lazy evaluation i.e any dsl function // and sets the flag accordingly. func (variables *Variable) checkForLazyEval() bool { + var needsLazy bool + variables.ForEach(func(key string, value interface{}) { + if needsLazy { + return + } + for _, v := range protocolutils.KnownVariables { if stringsutil.ContainsAny(types.ToString(value), v) { - variables.LazyEval = true + needsLazy = true return } } + // this is a hotfix and not the best way to do it // will be refactored once we move scan state to scanContext (see: https://github.com/projectdiscovery/nuclei/issues/4631) if strings.Contains(types.ToString(value), "interactsh-url") { - variables.LazyEval = true + needsLazy = true + return + } + + if hasUndefinedParams(types.ToString(value), variables) { + needsLazy = true return } }) + + variables.LazyEval = needsLazy + return variables.LazyEval } + +// hasUndefinedParams checks if a variable value contains expressions that ref +// parameters not defined in the current variable scope, indicating it needs +// runtime context. +func hasUndefinedParams(value string, variables *Variable) bool { + exprs := expressions.FindExpressions(value, marker.ParenthesisOpen, marker.ParenthesisClose, map[string]interface{}{}) + if len(exprs) == 0 { + return false + } + + definedVars := make(map[string]struct{}) + variables.ForEach(func(key string, _ interface{}) { + definedVars[key] = struct{}{} + }) + + for _, expr := range exprs { + compiled, err := govaluate.NewEvaluableExpressionWithFunctions(expr, dsl.HelperFunctions) + if err != nil { + // NOTE(dwisiswant0): here, it might need runtime context. + return true + } + + vars := compiled.Vars() + for _, paramName := range vars { + // NOTE(dwisiswant0): also here, if it's not in our defined vars. + if _, exists := definedVars[paramName]; !exists { + return true + } + } + } + + return false +} diff --git a/pkg/protocols/common/variables/variables_test.go b/pkg/protocols/common/variables/variables_test.go index 67aee33fc5..cbf560b4eb 100644 --- a/pkg/protocols/common/variables/variables_test.go +++ b/pkg/protocols/common/variables/variables_test.go @@ -4,6 +4,7 @@ import ( "testing" "time" + "github.com/projectdiscovery/nuclei/v3/pkg/utils" "github.com/projectdiscovery/nuclei/v3/pkg/utils/json" "github.com/stretchr/testify/require" "gopkg.in/yaml.v2" @@ -41,3 +42,108 @@ a6: "123456"` require.Equal(t, map[string]interface{}{"a2": "098f6bcd4621d373cade4e832627b4f6", "a3": "this_is_random_text", "a4": a4, "a5": "moc.elgoog", "a6": "123456"}, result, "could not get correct elements") } + +func TestCheckForLazyEval(t *testing.T) { + t.Run("undefined-parameters-in-expression", func(t *testing.T) { + // Variables with expressions that reference undefined parameters + // should be marked for lazy evaluation + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("var1", "{{sha1(serial)}}") // 'serial' is undefined + variables.Set("var2", "{{replace(user, '.', '')}}") // 'user' is undefined + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters and set LazyEval=true") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("self-referencing-variables", func(t *testing.T) { + // Variables that reference other defined variables should NOT be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("base", "example") + variables.Set("derived", "{{base}}_suffix") // 'base' is defined + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for self-referencing defined variables") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("constant-expressions", func(t *testing.T) { + // Constant expressions without variables should NOT be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("const1", "{{2+2}}") + variables.Set("const2", "{{rand_int(1, 100)}}") + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for constant expressions") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("known-runtime-variables", func(t *testing.T) { + // Variables with known runtime variables (Host, BaseURL, etc.) should be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("url", "{{BaseURL}}/api") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect known runtime variables") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("interactsh-url", func(t *testing.T) { + // Variables with interactsh-url should be lazy + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("callback", "{{interactsh-url}}") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect interactsh-url") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("mixed-defined-and-undefined", func(t *testing.T) { + // Mix of defined and undefined parameters in actual expressions + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(3), + } + variables.Set("defined", "value") + variables.Set("uses_defined", "{{base64(defined)}}") // OK - 'defined' exists + variables.Set("uses_undefined", "{{base64(undefined_param)}}") // NOT OK - 'undefined_param' doesn't exist + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters even with some defined") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) + + t.Run("plain-strings-no-expressions", func(t *testing.T) { + // Plain string values without expressions + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(2), + } + variables.Set("plain1", "simple value") + variables.Set("plain2", "another value") + + result := variables.checkForLazyEval() + require.False(t, result, "should not set LazyEval for plain strings") + require.False(t, variables.LazyEval, "LazyEval flag should be false") + }) + + t.Run("complex-expression-with-undefined", func(t *testing.T) { + // Complex expression with multiple undefined parameters + variables := &Variable{ + InsertionOrderedStringMap: *utils.NewEmptyInsertionOrderedStringMap(1), + } + variables.Set("complex", "{{sha1(cert_serial + issuer)}}") + + result := variables.checkForLazyEval() + require.True(t, result, "should detect undefined parameters in complex expressions") + require.True(t, variables.LazyEval, "LazyEval flag should be true") + }) +} From a52e6753a275682e424dea25a0937dbe448f0a91 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Fri, 21 Nov 2025 16:19:33 +0700 Subject: [PATCH 43/55] chore(deps): bump github.com/projectdiscovery/fastdialer@v0.4.16 Signed-off-by: Dwi Siswanto --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 08f76b056e..8894945086 100644 --- a/go.mod +++ b/go.mod @@ -22,7 +22,7 @@ require ( github.com/olekukonko/tablewriter v1.0.8 github.com/pkg/errors v0.9.1 github.com/projectdiscovery/clistats v0.1.1 - github.com/projectdiscovery/fastdialer v0.4.15 + github.com/projectdiscovery/fastdialer v0.4.16 github.com/projectdiscovery/hmap v0.0.95 github.com/projectdiscovery/interactsh v1.2.4 github.com/projectdiscovery/rawhttp v0.1.90 diff --git a/go.sum b/go.sum index 837246a360..cde66ac181 100644 --- a/go.sum +++ b/go.sum @@ -823,8 +823,8 @@ github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB7 github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= github.com/projectdiscovery/dsl v0.8.4 h1:p3rvzJae9BecOMufdYex3DX9zZeQNaXwVQe4kCEAOtE= github.com/projectdiscovery/dsl v0.8.4/go.mod h1:msE7dGAuHYRrKddEwB1yoQ5dHrzzyimQUjsGDsMDis8= -github.com/projectdiscovery/fastdialer v0.4.15 h1:AHDgyydTdE5uUHGwzpvIDslY2AQn1kVq79gKEgFGAbE= -github.com/projectdiscovery/fastdialer v0.4.15/go.mod h1:X0l4+KqOE/aIL00pyTnBj4pWQDPYnCGL7cwZsJu6SCQ= +github.com/projectdiscovery/fastdialer v0.4.16 h1:rmCNr5N/9KTm0nSYjSuQ5j3aXmNIPf6HhJlAhN/7NRI= +github.com/projectdiscovery/fastdialer v0.4.16/go.mod h1:X0l4+KqOE/aIL00pyTnBj4pWQDPYnCGL7cwZsJu6SCQ= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw= github.com/projectdiscovery/freeport v0.0.7 h1:Q6uXo/j8SaV/GlAHkEYQi8WQoPXyJWxyspx+aFmz9Qk= From d5d8d50ab718bd96b1c3fd6dfb2a7000e33f3533 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com> Date: Fri, 21 Nov 2025 22:41:59 +0700 Subject: [PATCH 44/55] fix(interactsh): skip DNS lookups on interactsh domains (#6614) * fix(interactsh): skip DNS lookups on interactsh domains to prevent false positives. Prevents nuclei from resolving interactsh domains injected in Host headers, which would cause self-interactions to be incorrectly reported as matches. Changes: * Add `GetHostname()` method to `interactsh.Client` to expose active server domain. * Skip CNAME DNS lookups in `(*http.Request).addCNameIfAvailable` when hostname matches the `(*interactsh.Client).GetHostname`. Fixes #6613 Signed-off-by: Dwi Siswanto * fix(http): prevent false `interactshDomain` matches Signed-off-by: Dwi Siswanto --------- Signed-off-by: Dwi Siswanto --- pkg/protocols/common/interactsh/interactsh.go | 5 +++++ pkg/protocols/http/request.go | 9 +++++++++ 2 files changed, 14 insertions(+) diff --git a/pkg/protocols/common/interactsh/interactsh.go b/pkg/protocols/common/interactsh/interactsh.go index 7cdf7c77bb..e92cfaecd9 100644 --- a/pkg/protocols/common/interactsh/interactsh.go +++ b/pkg/protocols/common/interactsh/interactsh.go @@ -455,3 +455,8 @@ func (c *Client) setHostname(hostname string) { c.hostname = hostname } + +// GetHostname returns the configured interactsh server hostname. +func (c *Client) GetHostname() string { + return c.getHostname() +} diff --git a/pkg/protocols/http/request.go b/pkg/protocols/http/request.go index c538686e18..4a97db7ddd 100644 --- a/pkg/protocols/http/request.go +++ b/pkg/protocols/http/request.go @@ -1137,6 +1137,15 @@ func (request *Request) addCNameIfAvailable(hostname string, outputEvent map[str return } + if request.options.Interactsh != nil { + interactshDomain := request.options.Interactsh.GetHostname() + if interactshDomain != "" { + if strings.EqualFold(hostname, interactshDomain) || strings.HasSuffix(hostname, "."+interactshDomain) { + return + } + } + } + data, err := request.dialer.GetDNSData(hostname) if err == nil { switch len(data.CNAME) { From cf44afbb9969705bf7289e495a20fdca39d1972f Mon Sep 17 00:00:00 2001 From: Ice3man Date: Sun, 23 Nov 2025 16:38:03 +0530 Subject: [PATCH 45/55] feat: bump dsl with deserialization helpers --- go.mod | 9 +++++---- go.sum | 16 ++++++++++------ 2 files changed, 15 insertions(+), 10 deletions(-) diff --git a/go.mod b/go.mod index 8894945086..735008693c 100644 --- a/go.mod +++ b/go.mod @@ -7,7 +7,7 @@ toolchain go1.24.4 require ( github.com/Knetic/govaluate v3.0.1-0.20171022003610-9aa49832a739+incompatible github.com/andygrunwald/go-jira v1.16.1 - github.com/antchfx/htmlquery v1.3.4 + github.com/antchfx/htmlquery v1.3.5 github.com/bluele/gcache v0.0.2 github.com/go-playground/validator/v10 v10.26.0 github.com/go-rod/rod v0.116.2 @@ -58,7 +58,7 @@ require ( github.com/alexsnet/go-vnc v0.1.0 github.com/alitto/pond v1.9.2 github.com/antchfx/xmlquery v1.4.4 - github.com/antchfx/xpath v1.3.3 + github.com/antchfx/xpath v1.3.5 github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 github.com/aws/aws-sdk-go-v2 v1.36.5 github.com/aws/aws-sdk-go-v2/config v1.29.17 @@ -91,7 +91,7 @@ require ( github.com/microsoft/go-mssqldb v1.9.2 github.com/ory/dockertest/v3 v3.12.0 github.com/praetorian-inc/fingerprintx v1.1.15 - github.com/projectdiscovery/dsl v0.8.4 + github.com/projectdiscovery/dsl v0.8.5 github.com/projectdiscovery/fasttemplate v0.0.2 github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb @@ -275,7 +275,7 @@ require ( github.com/leodido/go-urn v1.4.0 // indirect github.com/libdns/libdns v0.2.1 // indirect github.com/logrusorgru/aurora/v4 v4.0.0 // indirect - github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 // indirect + github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d // indirect github.com/mackerelio/go-osstat v0.2.4 // indirect @@ -356,6 +356,7 @@ require ( github.com/vmihailenco/msgpack/v5 v5.3.4 // indirect github.com/vmihailenco/tagparser v0.1.2 // indirect github.com/vmihailenco/tagparser/v2 v2.0.0 // indirect + github.com/vulncheck-oss/go-exploit v1.51.0 // indirect github.com/wk8/go-ordered-map/v2 v2.1.8 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xdg-go/pbkdf2 v1.0.0 // indirect diff --git a/go.sum b/go.sum index cde66ac181..52d7ce734d 100644 --- a/go.sum +++ b/go.sum @@ -140,12 +140,13 @@ github.com/andygrunwald/go-jira v1.16.1 h1:WoQEar5XoDRAibOgKzTFELlPNlKAtnfWr296R github.com/andygrunwald/go-jira v1.16.1/go.mod h1:UQH4IBVxIYWbgagc0LF/k9FRs9xjIiQ8hIcC6HfLwFU= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= -github.com/antchfx/htmlquery v1.3.4 h1:Isd0srPkni2iNTWCwVj/72t7uCphFeor5Q8nCzj1jdQ= -github.com/antchfx/htmlquery v1.3.4/go.mod h1:K9os0BwIEmLAvTqaNSua8tXLWRWZpocZIH73OzWQbwM= +github.com/antchfx/htmlquery v1.3.5 h1:aYthDDClnG2a2xePf6tys/UyyM/kRcsFRm+ifhFKoU0= +github.com/antchfx/htmlquery v1.3.5/go.mod h1:5oyIPIa3ovYGtLqMPNjBF2Uf25NPCKsMjCnQ8lvjaoA= github.com/antchfx/xmlquery v1.4.4 h1:mxMEkdYP3pjKSftxss4nUHfjBhnMk4imGoR96FRY2dg= github.com/antchfx/xmlquery v1.4.4/go.mod h1:AEPEEPYE9GnA2mj5Ur2L5Q5/2PycJ0N9Fusrx9b12fc= -github.com/antchfx/xpath v1.3.3 h1:tmuPQa1Uye0Ym1Zn65vxPgfltWb/Lxu2jeqIGteJSRs= github.com/antchfx/xpath v1.3.3/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= +github.com/antchfx/xpath v1.3.5 h1:PqbXLC3TkfeZyakF5eeh3NTWEbYl4VHNVeufANzDbKQ= +github.com/antchfx/xpath v1.3.5/go.mod h1:i54GszH55fYfBmoZXapTHN8T8tkcHfRgLyVwwqzXNcs= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -678,8 +679,9 @@ github.com/logrusorgru/aurora v2.0.3+incompatible h1:tOpm7WcpBTn4fjmVfgpQq0EfczG github.com/logrusorgru/aurora v2.0.3+incompatible/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= github.com/logrusorgru/aurora/v4 v4.0.0 h1:sRjfPpun/63iADiSvGGjgA1cAYegEWMPCJdUpJYn9JA= github.com/logrusorgru/aurora/v4 v4.0.0/go.mod h1:lP0iIa2nrnT/qoFXcOZSrZQpJ1o6n2CUf/hyHi2Q4ZQ= -github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3 h1:wIONC+HMNRqmWBjuMxhatuSzHaljStc4gjDeKycxy0A= github.com/lor00x/goldap v0.0.0-20180618054307-a546dffdd1a3/go.mod h1:37YR9jabpiIxsb8X9VCIx8qFOjTDIIrIHHODa8C4gz0= +github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8 h1:z9RDOBcFcf3f2hSfKuoM3/FmJpt8M+w0fOy4wKneBmc= +github.com/lor00x/goldap v0.0.0-20240304151906-8d785c64d1c8/go.mod h1:37YR9jabpiIxsb8X9VCIx8qFOjTDIIrIHHODa8C4gz0= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/lufia/plan9stats v0.0.0-20250821153705-5981dea3221d h1:vFzYZc8yji+9DmNRhpEbs8VBK4CgV/DPfGzeVJSSp/8= @@ -821,8 +823,8 @@ github.com/projectdiscovery/cdncheck v1.2.10 h1:Ox86LS8RFjq6pYNTP3Eqdawlor/h+bnb github.com/projectdiscovery/cdncheck v1.2.10/go.mod h1:ibL9HoZs2JYTEUBOZo4f+W+XEzQifFLOf4bpgFStgj4= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= -github.com/projectdiscovery/dsl v0.8.4 h1:p3rvzJae9BecOMufdYex3DX9zZeQNaXwVQe4kCEAOtE= -github.com/projectdiscovery/dsl v0.8.4/go.mod h1:msE7dGAuHYRrKddEwB1yoQ5dHrzzyimQUjsGDsMDis8= +github.com/projectdiscovery/dsl v0.8.5 h1:f3opg8Jzikwx6VXC+CbgseUmSUqdfCnfGT08Syhp0sw= +github.com/projectdiscovery/dsl v0.8.5/go.mod h1:AuUq18cpLJJ0uAjJZKaLrdyAgDHrnQAjLMZtPEyMoJw= github.com/projectdiscovery/fastdialer v0.4.16 h1:rmCNr5N/9KTm0nSYjSuQ5j3aXmNIPf6HhJlAhN/7NRI= github.com/projectdiscovery/fastdialer v0.4.16/go.mod h1:X0l4+KqOE/aIL00pyTnBj4pWQDPYnCGL7cwZsJu6SCQ= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= @@ -1051,6 +1053,8 @@ github.com/vmihailenco/tagparser v0.1.2 h1:gnjoVuB/kljJ5wICEEOpx98oXMWPLj22G67Vb github.com/vmihailenco/tagparser v0.1.2/go.mod h1:OeAg3pn3UbLjkWt+rN9oFYB6u/cQgqMEUPoW2WPyhdI= github.com/vmihailenco/tagparser/v2 v2.0.0 h1:y09buUbR+b5aycVFQs/g70pqKVZNBmxwAhO7/IwNM9g= github.com/vmihailenco/tagparser/v2 v2.0.0/go.mod h1:Wri+At7QHww0WTrCBeu4J6bNtoV6mEfg5OIWRZA9qds= +github.com/vulncheck-oss/go-exploit v1.51.0 h1:HTmJ4Q94tbEDPb35mQZn6qMg4rT+Sw9n+L7g3Pjr+3o= +github.com/vulncheck-oss/go-exploit v1.51.0/go.mod h1:J28w0dLnA6DnCrnBm9Sbt6smX8lvztnnN2wCXy7No6c= github.com/weppos/publicsuffix-go v0.12.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k= github.com/weppos/publicsuffix-go v0.13.0/go.mod h1:z3LCPQ38eedDQSwmsSRW4Y7t2L8Ln16JPQ02lHAdn5k= github.com/weppos/publicsuffix-go v0.30.0/go.mod h1:kBi8zwYnR0zrbm8RcuN1o9Fzgpnnn+btVN8uWPMyXAY= From 29977358d73101434e3921040fbebf069d6c4c52 Mon Sep 17 00:00:00 2001 From: ledigang Date: Mon, 24 Nov 2025 20:01:30 +0800 Subject: [PATCH 46/55] chore: omit unnecessary reassignment (#6622) Signed-off-by: ledigang --- pkg/core/execute_options.go | 1 - pkg/input/formats/burp/burp.go | 1 - pkg/protocols/common/hosterrorscache/hosterrorscache_test.go | 1 - 3 files changed, 3 deletions(-) diff --git a/pkg/core/execute_options.go b/pkg/core/execute_options.go index df1fe14358..b0a2be1c86 100644 --- a/pkg/core/execute_options.go +++ b/pkg/core/execute_options.go @@ -109,7 +109,6 @@ func (e *Engine) executeTemplateSpray(ctx context.Context, templatesList []*temp defer wp.Wait() for _, template := range templatesList { - template := template select { case <-ctx.Done(): diff --git a/pkg/input/formats/burp/burp.go b/pkg/input/formats/burp/burp.go index 9b2a362dfe..459c6d8a54 100644 --- a/pkg/input/formats/burp/burp.go +++ b/pkg/input/formats/burp/burp.go @@ -43,7 +43,6 @@ func (j *BurpFormat) Parse(input io.Reader, resultsCb formats.ParseReqRespCallba // Print the parsed data for verification for _, item := range items.Items { - item := item binx, err := base64.StdEncoding.DecodeString(item.Request.Raw) if err != nil { return errors.Wrap(err, "could not decode base64") diff --git a/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go b/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go index e0046ff412..6478e2c5e8 100644 --- a/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go +++ b/pkg/protocols/common/hosterrorscache/hosterrorscache_test.go @@ -177,7 +177,6 @@ func TestCacheCheckConcurrent(t *testing.T) { wg := sync.WaitGroup{} for i := 1; i <= 100; i++ { wg.Add(1) - i := i go func() { defer wg.Done() cache.MarkFailed(protoType, ctx, errors.New("no address found for host")) From c1878b613431332834bcff31f7225af2108997b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Do=C4=9Fan=20Can=20Bak=C4=B1r?= Date: Tue, 25 Nov 2025 12:53:13 +0900 Subject: [PATCH 47/55] disable stale workflow for enhancements --- .github/workflows/stale.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index efa88506da..1cff20d50b 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -37,5 +37,5 @@ jobs: it, please comment or feel free to reopen it. close-issue-label: "Status: Abandoned" close-pr-label: "Status: Abandoned" - exempt-issue-labels: "Status: Abandoned" + exempt-issue-labels: "Status: Abandoned,Type: Enhancement" exempt-pr-labels: "Status: Abandoned" From 78ccfc2912d2547d9cfc9e6f4a0b80df2f0e6d51 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com> Date: Wed, 26 Nov 2025 00:26:08 +0700 Subject: [PATCH 48/55] ci: cache go-rod browser (#6640) Signed-off-by: Dwi Siswanto --- .github/workflows/perf-regression.yaml | 1 + .github/workflows/tests.yaml | 6 ++++++ 2 files changed, 7 insertions(+) diff --git a/.github/workflows/perf-regression.yaml b/.github/workflows/perf-regression.yaml index 8e7e7eed5f..de05a8c921 100644 --- a/.github/workflows/perf-regression.yaml +++ b/.github/workflows/perf-regression.yaml @@ -13,6 +13,7 @@ jobs: steps: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make build-test - run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT env: diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index d616e8b71c..19c330d96f 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -24,6 +24,7 @@ jobs: steps: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - uses: projectdiscovery/actions/golangci-lint/v2@v1 tests: @@ -37,6 +38,7 @@ jobs: steps: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make vet - run: make build - run: make test @@ -54,6 +56,7 @@ jobs: steps: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - name: "Simple" run: go run . working-directory: examples/simple/ @@ -77,6 +80,7 @@ jobs: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/python@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh "${{ matrix.os }}" env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" @@ -96,6 +100,7 @@ jobs: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/python@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh env: GITHUB_TOKEN: "${{ secrets.GITHUB_TOKEN }}" @@ -108,6 +113,7 @@ jobs: steps: - uses: actions/checkout@v5 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make template-validate codeql: From 0aba8e5a659584a9a1ca5d8a9b3e5c7606c8f617 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 25 Nov 2025 17:27:49 +0000 Subject: [PATCH 49/55] chore(deps): bump actions/checkout from 5 to 6 in the workflows group Bumps the workflows group with 1 update: [actions/checkout](https://github.com/actions/checkout). Updates `actions/checkout` from 5 to 6 - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v5...v6) --- updated-dependencies: - dependency-name: actions/checkout dependency-version: '6' dependency-type: direct:production update-type: version-update:semver-major dependency-group: workflows ... Signed-off-by: dependabot[bot] --- .github/workflows/auto-merge.yaml | 2 +- .github/workflows/compat-checks.yaml | 2 +- .github/workflows/generate-docs.yaml | 2 +- .github/workflows/generate-pgo.yaml | 2 +- .github/workflows/govulncheck.yaml | 2 +- .github/workflows/perf-regression.yaml | 2 +- .github/workflows/perf-test.yaml | 2 +- .github/workflows/release.yaml | 2 +- .github/workflows/tests.yaml | 18 +++++++++--------- 9 files changed, 17 insertions(+), 17 deletions(-) diff --git a/.github/workflows/auto-merge.yaml b/.github/workflows/auto-merge.yaml index ad2890ddaf..f6bb2c5c25 100644 --- a/.github/workflows/auto-merge.yaml +++ b/.github/workflows/auto-merge.yaml @@ -18,7 +18,7 @@ jobs: runs-on: ubuntu-latest if: github.actor == 'dependabot[bot]' steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: token: ${{ secrets.DEPENDABOT_PAT }} diff --git a/.github/workflows/compat-checks.yaml b/.github/workflows/compat-checks.yaml index 8a9080b904..093bd6ba01 100644 --- a/.github/workflows/compat-checks.yaml +++ b/.github/workflows/compat-checks.yaml @@ -13,7 +13,7 @@ jobs: permissions: contents: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go/compat-checks@v1 with: release-test: true diff --git a/.github/workflows/generate-docs.yaml b/.github/workflows/generate-docs.yaml index a68ff7d972..365ad32f93 100644 --- a/.github/workflows/generate-docs.yaml +++ b/.github/workflows/generate-docs.yaml @@ -11,7 +11,7 @@ jobs: if: "${{ !endsWith(github.actor, '[bot]') }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/git@v1 - run: make syntax-docs diff --git a/.github/workflows/generate-pgo.yaml b/.github/workflows/generate-pgo.yaml index 322467e704..b0c47d56ec 100644 --- a/.github/workflows/generate-pgo.yaml +++ b/.github/workflows/generate-pgo.yaml @@ -28,7 +28,7 @@ jobs: LIST_FILE: "/tmp/targets-${{ matrix.targets }}.txt" PROFILE_MEM: "/tmp/nuclei-profile-${{ matrix.targets }}-targets" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/git@v1 - uses: projectdiscovery/actions/setup/go@v1 - name: Generate list diff --git a/.github/workflows/govulncheck.yaml b/.github/workflows/govulncheck.yaml index 11898380ec..38edae2487 100644 --- a/.github/workflows/govulncheck.yaml +++ b/.github/workflows/govulncheck.yaml @@ -16,7 +16,7 @@ jobs: env: OUTPUT: "/tmp/results.sarif" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - run: go install golang.org/x/vuln/cmd/govulncheck@latest - run: govulncheck -scan package -format sarif ./... > $OUTPUT diff --git a/.github/workflows/perf-regression.yaml b/.github/workflows/perf-regression.yaml index de05a8c921..85650a09f4 100644 --- a/.github/workflows/perf-regression.yaml +++ b/.github/workflows/perf-regression.yaml @@ -11,7 +11,7 @@ jobs: env: BENCH_OUT: "/tmp/bench.out" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make build-test diff --git a/.github/workflows/perf-test.yaml b/.github/workflows/perf-test.yaml index 4ee8408c9d..ff40b824b9 100644 --- a/.github/workflows/perf-test.yaml +++ b/.github/workflows/perf-test.yaml @@ -16,7 +16,7 @@ jobs: LIST_FILE: "/tmp/targets-${{ matrix.count }}.txt" PROFILE_MEM: "/tmp/nuclei-perf-test-${{ matrix.count }}" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - run: make verify - name: Generate list diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 4d9d412dda..acaecb5969 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -10,7 +10,7 @@ jobs: release: runs-on: ubuntu-latest-16-cores steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 with: fetch-depth: 0 - uses: projectdiscovery/actions/setup/go@v1 diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 19c330d96f..4932339af8 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -22,7 +22,7 @@ jobs: if: "${{ !endsWith(github.actor, '[bot]') }}" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - uses: projectdiscovery/actions/golangci-lint/v2@v1 @@ -36,7 +36,7 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: "${{ matrix.os }}" steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make vet @@ -54,7 +54,7 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - name: "Simple" @@ -77,7 +77,7 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 @@ -97,7 +97,7 @@ jobs: os: [ubuntu-latest, windows-latest, macOS-latest] runs-on: ${{ matrix.os }} steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 @@ -111,7 +111,7 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: make template-validate @@ -125,7 +125,7 @@ jobs: contents: read security-events: write steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: github/codeql-action/init@v4 with: languages: 'go' @@ -137,7 +137,7 @@ jobs: needs: ["tests"] runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/goreleaser@v1 @@ -149,7 +149,7 @@ jobs: TARGET_URL: "http://scanme.sh/a/?b=c" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v5 + - uses: actions/checkout@v6 - run: make build - name: "Setup environment (push)" if: ${{ github.event_name == 'push' }} From 2978819d0e89702b6a4a85856131180ce5f2c2ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Do=C4=9Fan=20Can=20Bak=C4=B1r?= Date: Thu, 27 Nov 2025 12:41:38 +0700 Subject: [PATCH 50/55] do not exempt abandoned issues and prs --- .github/workflows/stale.yaml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/stale.yaml b/.github/workflows/stale.yaml index 1cff20d50b..43e4f0fb45 100644 --- a/.github/workflows/stale.yaml +++ b/.github/workflows/stale.yaml @@ -37,5 +37,4 @@ jobs: it, please comment or feel free to reopen it. close-issue-label: "Status: Abandoned" close-pr-label: "Status: Abandoned" - exempt-issue-labels: "Status: Abandoned,Type: Enhancement" - exempt-pr-labels: "Status: Abandoned" + exempt-issue-labels: "Type: Enhancement" From 7ea72b5268f8caca1d656fab204ca3d0f4b487da Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Fri, 28 Nov 2025 22:26:31 +0700 Subject: [PATCH 51/55] ci: apply free-disk-space on tests Signed-off-by: Dwi Siswanto --- .github/workflows/tests.yaml | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index 4932339af8..e244b2001d 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -39,6 +39,15 @@ jobs: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/free-disk-space@v1 + with: + llvm: 'false' + php: 'false' + mongodb: 'false' + mysql: 'false' + misc-packages: 'false' + docker-images: 'false' + tools-cache: 'false' - run: make vet - run: make build - run: make test From 7e151de8a3b258c9c49d854b4eeac6dd2d3b79eb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 02:07:25 +0700 Subject: [PATCH 52/55] chore: bump PD modules & update `httputil` calls (#6629) * chore(deps): bump the modules group across 1 directory with 11 updates Bumps the modules group with 11 updates in the / directory: | Package | From | To | | --- | --- | --- | | [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.16` | `0.4.17` | | [github.com/projectdiscovery/hmap](https://github.com/projectdiscovery/hmap) | `0.0.95` | `0.0.96` | | [github.com/projectdiscovery/retryabledns](https://github.com/projectdiscovery/retryabledns) | `1.0.108` | `1.0.109` | | [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.131` | `1.0.132` | | [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.60` | `1.1.61` | | [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.28` | `0.1.29` | | [github.com/projectdiscovery/tlsx](https://github.com/projectdiscovery/tlsx) | `1.2.1` | `1.2.2` | | [github.com/projectdiscovery/useragent](https://github.com/projectdiscovery/useragent) | `0.0.102` | `0.0.103` | | [github.com/projectdiscovery/utils](https://github.com/projectdiscovery/utils) | `0.6.1` | `0.7.1` | | [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.55` | `0.2.56` | | [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.2.10` | `1.2.11` | Updates `github.com/projectdiscovery/fastdialer` from 0.4.16 to 0.4.17 - [Release notes](https://github.com/projectdiscovery/fastdialer/releases) - [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.16...v0.4.17) Updates `github.com/projectdiscovery/hmap` from 0.0.95 to 0.0.96 - [Release notes](https://github.com/projectdiscovery/hmap/releases) - [Commits](https://github.com/projectdiscovery/hmap/compare/v0.0.95...v0.0.96) Updates `github.com/projectdiscovery/retryabledns` from 1.0.108 to 1.0.109 - [Release notes](https://github.com/projectdiscovery/retryabledns/releases) - [Commits](https://github.com/projectdiscovery/retryabledns/compare/v1.0.108...v1.0.109) Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.131 to 1.0.132 - [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases) - [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.131...v1.0.132) Updates `github.com/projectdiscovery/gologger` from 1.1.60 to 1.1.61 - [Release notes](https://github.com/projectdiscovery/gologger/releases) - [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.60...v1.1.61) Updates `github.com/projectdiscovery/networkpolicy` from 0.1.28 to 0.1.29 - [Release notes](https://github.com/projectdiscovery/networkpolicy/releases) - [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.28...v0.1.29) Updates `github.com/projectdiscovery/tlsx` from 1.2.1 to 1.2.2 - [Release notes](https://github.com/projectdiscovery/tlsx/releases) - [Changelog](https://github.com/projectdiscovery/tlsx/blob/main/.goreleaser.yml) - [Commits](https://github.com/projectdiscovery/tlsx/compare/v1.2.1...v1.2.2) Updates `github.com/projectdiscovery/useragent` from 0.0.102 to 0.0.103 - [Release notes](https://github.com/projectdiscovery/useragent/releases) - [Commits](https://github.com/projectdiscovery/useragent/compare/v0.0.102...v0.0.103) Updates `github.com/projectdiscovery/utils` from 0.6.1 to 0.7.1 - [Release notes](https://github.com/projectdiscovery/utils/releases) - [Changelog](https://github.com/projectdiscovery/utils/blob/main/CHANGELOG.md) - [Commits](https://github.com/projectdiscovery/utils/compare/v0.6.1...v0.7.1) Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.55 to 0.2.56 - [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases) - [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.55...v0.2.56) Updates `github.com/projectdiscovery/cdncheck` from 1.2.10 to 1.2.11 - [Release notes](https://github.com/projectdiscovery/cdncheck/releases) - [Changelog](https://github.com/projectdiscovery/cdncheck/blob/main/.goreleaser.yaml) - [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.2.10...v1.2.11) --- updated-dependencies: - dependency-name: github.com/projectdiscovery/fastdialer dependency-version: 0.4.17 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/hmap dependency-version: 0.0.96 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/retryabledns dependency-version: 1.0.109 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/retryablehttp-go dependency-version: 1.0.132 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/gologger dependency-version: 1.1.61 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/networkpolicy dependency-version: 0.1.29 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/tlsx dependency-version: 1.2.2 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/useragent dependency-version: 0.0.103 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/utils dependency-version: 0.7.1 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: modules - dependency-name: github.com/projectdiscovery/wappalyzergo dependency-version: 0.2.56 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/cdncheck dependency-version: 1.2.11 dependency-type: indirect update-type: version-update:semver-patch dependency-group: modules ... Signed-off-by: dependabot[bot] * chore: update utils.httputil calls Signed-off-by: Dwi Siswanto * chore(deps): bump github.com/projectdiscovery/utils => v0.7.3 Signed-off-by: Dwi Siswanto --------- Signed-off-by: dependabot[bot] Signed-off-by: Dwi Siswanto Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Dwi Siswanto --- go.mod | 22 ++++----- go.sum | 46 ++++++++++--------- .../common/protocolstate/memguardian.go | 8 ++-- pkg/protocols/http/request.go | 10 ++-- 4 files changed, 44 insertions(+), 42 deletions(-) diff --git a/go.mod b/go.mod index 735008693c..6043a9b47c 100644 --- a/go.mod +++ b/go.mod @@ -22,12 +22,12 @@ require ( github.com/olekukonko/tablewriter v1.0.8 github.com/pkg/errors v0.9.1 github.com/projectdiscovery/clistats v0.1.1 - github.com/projectdiscovery/fastdialer v0.4.16 - github.com/projectdiscovery/hmap v0.0.95 + github.com/projectdiscovery/fastdialer v0.4.17 + github.com/projectdiscovery/hmap v0.0.96 github.com/projectdiscovery/interactsh v1.2.4 github.com/projectdiscovery/rawhttp v0.1.90 - github.com/projectdiscovery/retryabledns v1.0.108 - github.com/projectdiscovery/retryablehttp-go v1.0.131 + github.com/projectdiscovery/retryabledns v1.0.109 + github.com/projectdiscovery/retryablehttp-go v1.0.132 github.com/projectdiscovery/yamldoc-go v1.0.6 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rs/xid v1.6.0 @@ -96,21 +96,21 @@ require ( github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/goflags v0.1.74 - github.com/projectdiscovery/gologger v1.1.60 + github.com/projectdiscovery/gologger v1.1.61 github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 github.com/projectdiscovery/httpx v1.7.2 github.com/projectdiscovery/mapcidr v1.1.97 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 - github.com/projectdiscovery/networkpolicy v0.1.28 + github.com/projectdiscovery/networkpolicy v0.1.29 github.com/projectdiscovery/ratelimit v0.0.82 github.com/projectdiscovery/rdap v0.9.0 github.com/projectdiscovery/sarif v0.0.1 - github.com/projectdiscovery/tlsx v1.2.1 + github.com/projectdiscovery/tlsx v1.2.2 github.com/projectdiscovery/uncover v1.1.0 - github.com/projectdiscovery/useragent v0.0.102 - github.com/projectdiscovery/utils v0.6.1 - github.com/projectdiscovery/wappalyzergo v0.2.55 + github.com/projectdiscovery/useragent v0.0.103 + github.com/projectdiscovery/utils v0.7.3 + github.com/projectdiscovery/wappalyzergo v0.2.56 github.com/redis/go-redis/v9 v9.11.0 github.com/seh-msft/burpxml v1.0.1 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 @@ -322,7 +322,7 @@ require ( github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/blackrock v0.0.1 // indirect - github.com/projectdiscovery/cdncheck v1.2.10 // indirect + github.com/projectdiscovery/cdncheck v1.2.11 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect diff --git a/go.sum b/go.sum index 52d7ce734d..798cbfc736 100644 --- a/go.sum +++ b/go.sum @@ -819,14 +819,14 @@ github.com/projectdiscovery/asnmap v1.1.1 h1:ImJiKIaACOT7HPx4Pabb5dksolzaFYsD1kI github.com/projectdiscovery/asnmap v1.1.1/go.mod h1:QT7jt9nQanj+Ucjr9BqGr1Q2veCCKSAVyUzLXfEcQ60= github.com/projectdiscovery/blackrock v0.0.1 h1:lHQqhaaEFjgf5WkuItbpeCZv2DUIE45k0VbGJyft6LQ= github.com/projectdiscovery/blackrock v0.0.1/go.mod h1:ANUtjDfaVrqB453bzToU+YB4cUbvBRpLvEwoWIwlTss= -github.com/projectdiscovery/cdncheck v1.2.10 h1:Ox86LS8RFjq6pYNTP3Eqdawlor/h+bnb7BTEKBpzFyM= -github.com/projectdiscovery/cdncheck v1.2.10/go.mod h1:ibL9HoZs2JYTEUBOZo4f+W+XEzQifFLOf4bpgFStgj4= +github.com/projectdiscovery/cdncheck v1.2.11 h1:ctqccij5PeobUjcTABZyGvbULxa3WT/hlL+yN1mSbyo= +github.com/projectdiscovery/cdncheck v1.2.11/go.mod h1:jyWMON16zbfCgi1Wp7nsGjZNFdRstYdK3IqjtnHB82Q= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= github.com/projectdiscovery/dsl v0.8.5 h1:f3opg8Jzikwx6VXC+CbgseUmSUqdfCnfGT08Syhp0sw= github.com/projectdiscovery/dsl v0.8.5/go.mod h1:AuUq18cpLJJ0uAjJZKaLrdyAgDHrnQAjLMZtPEyMoJw= -github.com/projectdiscovery/fastdialer v0.4.16 h1:rmCNr5N/9KTm0nSYjSuQ5j3aXmNIPf6HhJlAhN/7NRI= -github.com/projectdiscovery/fastdialer v0.4.16/go.mod h1:X0l4+KqOE/aIL00pyTnBj4pWQDPYnCGL7cwZsJu6SCQ= +github.com/projectdiscovery/fastdialer v0.4.17 h1:Cx3HdZaWJC8+qUBDvEarP7qmeLLJ9SqiXbQJD01oXxY= +github.com/projectdiscovery/fastdialer v0.4.17/go.mod h1:O47aw/ML7boXKTVKpfjBrTeBH8F4LFuu8okvOzOOBsg= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw= github.com/projectdiscovery/freeport v0.0.7 h1:Q6uXo/j8SaV/GlAHkEYQi8WQoPXyJWxyspx+aFmz9Qk= @@ -837,14 +837,14 @@ github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG90 github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY= github.com/projectdiscovery/goflags v0.1.74 h1:n85uTRj5qMosm0PFBfsvOL24I7TdWRcWq/1GynhXS7c= github.com/projectdiscovery/goflags v0.1.74/go.mod h1:UMc9/7dFz2oln+10tv6cy+7WZKTHf9UGhaNkF95emh4= -github.com/projectdiscovery/gologger v1.1.60 h1:N2Zyu4WA2RgUeqSAdfhv/CLS4de8lDDc2+IdLKcAd5U= -github.com/projectdiscovery/gologger v1.1.60/go.mod h1:8FJFKmo0N4ITIH3n1Jy4ze6ijr+mA3t78g+VpN8uBRU= +github.com/projectdiscovery/gologger v1.1.61 h1:+jJ0Z0x6X9s69IRjbtsnOfMD8YTFTVADHMKFNu6dUGg= +github.com/projectdiscovery/gologger v1.1.61/go.mod h1:EfuwZ1lQX7kH4rgNo0nzk5XPh2j2gpYEQUi9tkoJDJw= github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M= github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 h1:yHh46pJovYbyiaHCV7oIDinFmy+Fyq36H1BowJgb0M0= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81/go.mod h1:9lmGPBDGZVANzCGjQg+V32n8Y3Cgjo/4kT0E88lsVTI= -github.com/projectdiscovery/hmap v0.0.95 h1:OO6MCySlK2xMzvJmsYUwdaI7YWv/U437OtsN0Ovw72k= -github.com/projectdiscovery/hmap v0.0.95/go.mod h1:KiTRdGd/GzX7uaoFWPrPBxPf4X/uZ9HTQ9dQ8x7x1bo= +github.com/projectdiscovery/hmap v0.0.96 h1:gxpKGZc802/QCROuaj2l3HnfV3vzZ7AMZrK4tncKXTQ= +github.com/projectdiscovery/hmap v0.0.96/go.mod h1:x7lxwipd1roSXnpjnRqlZUMw8OhUwA96YWQieyWVHJk= github.com/projectdiscovery/httpx v1.7.2 h1:AfJ5wjhKOlywX+x+gPO4iPqgFEyoIJwvXsLpQQgs4+c= github.com/projectdiscovery/httpx v1.7.2/go.mod h1:hm0uTQGUTU1K0AQ1NQVfFrKfiS4u9Ynh8wArdXUXBS4= github.com/projectdiscovery/interactsh v1.2.4 h1:WUSj+fxbcV53J64oIAhbYzCKD1w/IyenyRBhkI5jiqI= @@ -857,32 +857,34 @@ github.com/projectdiscovery/mapcidr v1.1.97 h1:7FkxNNVXp+m1rIu5Nv/2SrF9k4+LwP8Qu github.com/projectdiscovery/mapcidr v1.1.97/go.mod h1:9dgTJh1SP02gYZdpzMjm6vtYFkEHQHoTyaVNvaeJ7lA= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 h1:L/e8z8yw1pfT6bg35NiN7yd1XKtJap5Nk6lMwQ0RNi8= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc= -github.com/projectdiscovery/networkpolicy v0.1.28 h1:Rwg8iZmM4n+CRWyUClthaSrTqDAW8zBI2HULRO1CF3k= -github.com/projectdiscovery/networkpolicy v0.1.28/go.mod h1:/3XfgnxKNuxaTZc6wZ/Pq6fiKvK8N4OQyLmfcUeDk2E= +github.com/projectdiscovery/networkpolicy v0.1.29 h1:wZoCTPJl1ry98Cgwh4vwMSKrirHDd1Yfw7U77kIDkK4= +github.com/projectdiscovery/networkpolicy v0.1.29/go.mod h1:VDp7uQk31gvVG/y9E0liCRlqr/in+vQarODoElXGLOU= github.com/projectdiscovery/ratelimit v0.0.82 h1:rtO5SQf5uQFu5zTahTaTcO06OxmG8EIF1qhdFPIyTak= github.com/projectdiscovery/ratelimit v0.0.82/go.mod h1:z076BrLkBb5yS7uhHNoCTf8X/BvFSGRxwQ8EzEL9afM= github.com/projectdiscovery/rawhttp v0.1.90 h1:LOSZ6PUH08tnKmWsIwvwv1Z/4zkiYKYOSZ6n+8RFKtw= github.com/projectdiscovery/rawhttp v0.1.90/go.mod h1:VZYAM25UI/wVB3URZ95ZaftgOnsbphxyAw/XnQRRz4Y= github.com/projectdiscovery/rdap v0.9.0 h1:wPhHx5pQ2QI+WGhyNb2PjhTl0NtB39Nk7YFZ9cp8ZGA= github.com/projectdiscovery/rdap v0.9.0/go.mod h1:zk4yrJFQ2Hy36Aqk+DvotYQxYAeALaCJ5ORySkff36Q= -github.com/projectdiscovery/retryabledns v1.0.108 h1:47LYRW2LY/0cDnZQfUhoOHNxe9rNc9NQ9ZfNrV/GbyM= -github.com/projectdiscovery/retryabledns v1.0.108/go.mod h1:j7H7K6JZePh9PeNleeRUtDSrkUKMpwDhZw3Ogewzio8= -github.com/projectdiscovery/retryablehttp-go v1.0.131 h1:OU2x9fVDIWnDoKvT8tKbaCONTL1gHnTOIFQFXmnEOE0= -github.com/projectdiscovery/retryablehttp-go v1.0.131/go.mod h1:ttW+Zka1L8IwEUhJ4zArbC+pKZum7b47fzV+4VGN6cA= +github.com/projectdiscovery/retryabledns v1.0.109 h1:2tDpoTRFlWmOsiH5Z/qVGrOzlB/hUO9PF2s6ol9BWcQ= +github.com/projectdiscovery/retryabledns v1.0.109/go.mod h1:yGKECPB69MWT1EZwWj5zejZflgP+KUDd5dBYZm3igxs= +github.com/projectdiscovery/retryablehttp-go v1.0.132 h1:h4sVcJE9GsLnxfzyXy7pa1PXEf4QwscbO19EPE1mNDo= +github.com/projectdiscovery/retryablehttp-go v1.0.132/go.mod h1:vf8+meeaGFjglVSDQvNISQtAmDKpi4FDjyb4+eFUED4= github.com/projectdiscovery/sarif v0.0.1 h1:C2Tyj0SGOKbCLgHrx83vaE6YkzXEVrMXYRGLkKCr/us= github.com/projectdiscovery/sarif v0.0.1/go.mod h1:cEYlDu8amcPf6b9dSakcz2nNnJsoz4aR6peERwV+wuQ= github.com/projectdiscovery/stringsutil v0.0.2 h1:uzmw3IVLJSMW1kEg8eCStG/cGbYYZAja8BH3LqqJXMA= github.com/projectdiscovery/stringsutil v0.0.2/go.mod h1:EJ3w6bC5fBYjVou6ryzodQq37D5c6qbAYQpGmAy+DC0= -github.com/projectdiscovery/tlsx v1.2.1 h1:R8QgKb/vxd6Y0cfGFBYs4nn0zodHABeeLPqJjs2mNrA= -github.com/projectdiscovery/tlsx v1.2.1/go.mod h1:p19UHGQ6bvcbvhO4NvYBKOxlE4QvrUaectx9g/Mm3JA= +github.com/projectdiscovery/tlsx v1.2.2 h1:Y96QBqeD2anpzEtBl4kqNbwzXh2TrzJuXfgiBLvK+SE= +github.com/projectdiscovery/tlsx v1.2.2/go.mod h1:ZJl9F1sSl0sdwE+lR0yuNHVX4Zx6tCSTqnNxnHCFZB4= github.com/projectdiscovery/uncover v1.1.0 h1:UDp/qLZn78YZb6VPoOrfyP1vz+ojEx8VrTTyjjRt9UU= github.com/projectdiscovery/uncover v1.1.0/go.mod h1:2rXINmMe/lmVAt2jn9CpAOs9An57/JEeLZobY3Z9kUs= -github.com/projectdiscovery/useragent v0.0.102 h1:Xfr8a7LQhIu0zeSz5gBxGCdyuqZbhkOMAEQUcEZXyBU= -github.com/projectdiscovery/useragent v0.0.102/go.mod h1:DIfLRBKZ6dLhHRnMYkxdg6Jpu0kpE3pJlMG94dsIchY= -github.com/projectdiscovery/utils v0.6.1 h1:9bf3J2G4WJMULGm4Xq7+96+Uj4QpYID/tNnzberR6RE= -github.com/projectdiscovery/utils v0.6.1/go.mod h1:j4Fb6PDir9PcTxLOL9cpSVDPVKtLTZwdVxxMAeG0JjA= -github.com/projectdiscovery/wappalyzergo v0.2.55 h1:Yw8bSrEH/+prUrkb6c4NMwi5m2Sso28nrdy4vFxYn8I= -github.com/projectdiscovery/wappalyzergo v0.2.55/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= +github.com/projectdiscovery/useragent v0.0.103 h1:mtHA4PK50ZH89bMWVCdRNac7f6kpfMet+sva6pSzzEo= +github.com/projectdiscovery/useragent v0.0.103/go.mod h1:NW+eQMkBYT5QdRgJ6ckKLhQ23D5I/Sc51Y43ys80+ZU= +github.com/projectdiscovery/utils v0.7.1 h1:3zzDzA0T6SxLripNGoTbO/znDCDWE2xlk9nazRCLBV4= +github.com/projectdiscovery/utils v0.7.1/go.mod h1:WrTQBt9Hfg5sH7q92qXUToUOhD7rWyHiWk2Gh/l1Mv8= +github.com/projectdiscovery/utils v0.7.3 h1:kX+77AA58yK6EZgkTRJEnK9V/7AZYzlXdcu/o/kJhFs= +github.com/projectdiscovery/utils v0.7.3/go.mod h1:uDdQ3/VWomai98l+a3Ye/srDXdJ4xUIar/mSXlQ9gBM= +github.com/projectdiscovery/wappalyzergo v0.2.56 h1:KyZSQ58NjqU5k0//JZUUuVPsruDFXz55ezHpjLKCy68= +github.com/projectdiscovery/wappalyzergo v0.2.56/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= github.com/projectdiscovery/yamldoc-go v1.0.6 h1:GCEdIRlQjDux28xTXKszM7n3jlMf152d5nqVpVoetas= github.com/projectdiscovery/yamldoc-go v1.0.6/go.mod h1:R5lWrNzP+7Oyn77NDVPnBsxx2/FyQZBBkIAaSaCQFxw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= diff --git a/pkg/protocols/common/protocolstate/memguardian.go b/pkg/protocols/common/protocolstate/memguardian.go index 2f31f4ca7d..c9f2c51a2d 100644 --- a/pkg/protocols/common/protocolstate/memguardian.go +++ b/pkg/protocols/common/protocolstate/memguardian.go @@ -96,7 +96,7 @@ func GlobalGuardBytesBufferAlloc() error { defer muGlobalChange.Unlock() // if current capacity was not reduced decrease it - if MaxBytesBufferAllocOnLowMemory > 0 && httputil.DefaultBytesBufferAlloc == httputil.GetPoolSize() { + if MaxBytesBufferAllocOnLowMemory > 0 && httputil.DefaultBufferSize == httputil.GetPoolSize() { gologger.Debug().Msgf("reducing bytes.buffer pool size to: %d", MaxBytesBufferAllocOnLowMemory) delta := httputil.GetPoolSize() - int64(MaxBytesBufferAllocOnLowMemory) return httputil.ChangePoolSize(-delta) @@ -112,9 +112,9 @@ func GlobalRestoreBytesBufferAlloc() { } defer muGlobalChange.Unlock() - if httputil.DefaultBytesBufferAlloc != httputil.GetPoolSize() { - delta := httputil.DefaultBytesBufferAlloc - httputil.GetPoolSize() - gologger.Debug().Msgf("restoring bytes.buffer pool size to: %d", httputil.DefaultBytesBufferAlloc) + if httputil.DefaultBufferSize != httputil.GetPoolSize() { + delta := httputil.DefaultBufferSize - httputil.GetPoolSize() + gologger.Debug().Msgf("restoring bytes.buffer pool size to: %d", httputil.DefaultBufferSize) _ = httputil.ChangePoolSize(delta) } } diff --git a/pkg/protocols/http/request.go b/pkg/protocols/http/request.go index 4a97db7ddd..3137a552dd 100644 --- a/pkg/protocols/http/request.go +++ b/pkg/protocols/http/request.go @@ -948,7 +948,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ onceFunc := sync.OnceFunc(func() { // if nuclei-project is enabled store the response if not previously done if request.options.ProjectFile != nil && !fromCache { - if err := request.options.ProjectFile.Set(dumpedRequest, resp, respChain.Body().Bytes()); err != nil { + if err := request.options.ProjectFile.Set(dumpedRequest, resp, respChain.BodyBytes()); err != nil { errx = errors.Wrap(err, "could not store in project file") } } @@ -962,7 +962,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ } // log request stats - request.options.Output.RequestStatsLog(strconv.Itoa(respChain.Response().StatusCode), respChain.FullResponse().String()) + request.options.Output.RequestStatsLog(strconv.Itoa(respChain.Response().StatusCode), respChain.FullResponseString()) // save response to projectfile onceFunc() @@ -1003,7 +1003,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ } } - outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponse().String(), respChain.Body().String(), respChain.Headers().String(), duration, generatedRequest.meta) + outputEvent := request.responseToDSLMap(respChain.Response(), input.MetaInput.Input, matchedURL, convUtil.String(dumpedRequest), respChain.FullResponseString(), respChain.BodyString(), respChain.HeadersString(), duration, generatedRequest.meta) // add response fields to template context and merge templatectx variables to output event request.options.AddTemplateVars(input.MetaInput, request.Type(), request.ID, outputEvent) if request.options.HasTemplateCtx(input.MetaInput) { @@ -1066,7 +1066,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ responseContentType := respChain.Response().Header.Get("Content-Type") isResponseTruncated := request.MaxSize > 0 && respChain.Body().Len() >= request.MaxSize - dumpResponse(event, request, respChain.FullResponse().Bytes(), formedURL, responseContentType, isResponseTruncated, input.MetaInput.Input) + dumpResponse(event, request, respChain.FullResponseBytes(), formedURL, responseContentType, isResponseTruncated, input.MetaInput.Input) callback(event) @@ -1080,7 +1080,7 @@ func (request *Request) executeRequest(input *contextargs.Context, generatedRequ StatusCode: respChain.Response().StatusCode, Matched: event.HasResults(), RawRequest: string(dumpedRequest), - RawResponse: respChain.FullResponse().String(), + RawResponse: respChain.FullResponseString(), Severity: request.options.TemplateInfo.SeverityHolder.Severity.String(), }) } From 3dab87bf773d5e89d9101307b0e14f40d38ab87c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Dec 2025 05:35:17 +0000 Subject: [PATCH 53/55] chore(deps): bump the modules group with 11 updates Bumps the modules group with 11 updates: | Package | From | To | | --- | --- | --- | | [github.com/projectdiscovery/fastdialer](https://github.com/projectdiscovery/fastdialer) | `0.4.17` | `0.4.18` | | [github.com/projectdiscovery/hmap](https://github.com/projectdiscovery/hmap) | `0.0.96` | `0.0.97` | | [github.com/projectdiscovery/retryabledns](https://github.com/projectdiscovery/retryabledns) | `1.0.109` | `1.0.110` | | [github.com/projectdiscovery/retryablehttp-go](https://github.com/projectdiscovery/retryablehttp-go) | `1.0.132` | `1.0.133` | | [github.com/projectdiscovery/dsl](https://github.com/projectdiscovery/dsl) | `0.8.5` | `0.8.6` | | [github.com/projectdiscovery/gologger](https://github.com/projectdiscovery/gologger) | `1.1.61` | `1.1.62` | | [github.com/projectdiscovery/networkpolicy](https://github.com/projectdiscovery/networkpolicy) | `0.1.29` | `0.1.30` | | [github.com/projectdiscovery/uncover](https://github.com/projectdiscovery/uncover) | `1.1.0` | `1.2.0` | | [github.com/projectdiscovery/useragent](https://github.com/projectdiscovery/useragent) | `0.0.103` | `0.0.104` | | [github.com/projectdiscovery/wappalyzergo](https://github.com/projectdiscovery/wappalyzergo) | `0.2.56` | `0.2.57` | | [github.com/projectdiscovery/cdncheck](https://github.com/projectdiscovery/cdncheck) | `1.2.11` | `1.2.12` | Updates `github.com/projectdiscovery/fastdialer` from 0.4.17 to 0.4.18 - [Release notes](https://github.com/projectdiscovery/fastdialer/releases) - [Commits](https://github.com/projectdiscovery/fastdialer/compare/v0.4.17...v0.4.18) Updates `github.com/projectdiscovery/hmap` from 0.0.96 to 0.0.97 - [Release notes](https://github.com/projectdiscovery/hmap/releases) - [Commits](https://github.com/projectdiscovery/hmap/compare/v0.0.96...v0.0.97) Updates `github.com/projectdiscovery/retryabledns` from 1.0.109 to 1.0.110 - [Release notes](https://github.com/projectdiscovery/retryabledns/releases) - [Commits](https://github.com/projectdiscovery/retryabledns/compare/v1.0.109...v1.0.110) Updates `github.com/projectdiscovery/retryablehttp-go` from 1.0.132 to 1.0.133 - [Release notes](https://github.com/projectdiscovery/retryablehttp-go/releases) - [Commits](https://github.com/projectdiscovery/retryablehttp-go/compare/v1.0.132...v1.0.133) Updates `github.com/projectdiscovery/dsl` from 0.8.5 to 0.8.6 - [Release notes](https://github.com/projectdiscovery/dsl/releases) - [Commits](https://github.com/projectdiscovery/dsl/compare/v0.8.5...v0.8.6) Updates `github.com/projectdiscovery/gologger` from 1.1.61 to 1.1.62 - [Release notes](https://github.com/projectdiscovery/gologger/releases) - [Commits](https://github.com/projectdiscovery/gologger/compare/v1.1.61...v1.1.62) Updates `github.com/projectdiscovery/networkpolicy` from 0.1.29 to 0.1.30 - [Release notes](https://github.com/projectdiscovery/networkpolicy/releases) - [Commits](https://github.com/projectdiscovery/networkpolicy/compare/v0.1.29...v0.1.30) Updates `github.com/projectdiscovery/uncover` from 1.1.0 to 1.2.0 - [Release notes](https://github.com/projectdiscovery/uncover/releases) - [Commits](https://github.com/projectdiscovery/uncover/compare/v1.1.0...v1.2.0) Updates `github.com/projectdiscovery/useragent` from 0.0.103 to 0.0.104 - [Release notes](https://github.com/projectdiscovery/useragent/releases) - [Commits](https://github.com/projectdiscovery/useragent/compare/v0.0.103...v0.0.104) Updates `github.com/projectdiscovery/wappalyzergo` from 0.2.56 to 0.2.57 - [Release notes](https://github.com/projectdiscovery/wappalyzergo/releases) - [Commits](https://github.com/projectdiscovery/wappalyzergo/compare/v0.2.56...v0.2.57) Updates `github.com/projectdiscovery/cdncheck` from 1.2.11 to 1.2.12 - [Release notes](https://github.com/projectdiscovery/cdncheck/releases) - [Commits](https://github.com/projectdiscovery/cdncheck/compare/v1.2.11...v1.2.12) --- updated-dependencies: - dependency-name: github.com/projectdiscovery/fastdialer dependency-version: 0.4.18 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/hmap dependency-version: 0.0.97 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/retryabledns dependency-version: 1.0.110 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/retryablehttp-go dependency-version: 1.0.133 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/dsl dependency-version: 0.8.6 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/gologger dependency-version: 1.1.62 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/networkpolicy dependency-version: 0.1.30 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/uncover dependency-version: 1.2.0 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: modules - dependency-name: github.com/projectdiscovery/useragent dependency-version: 0.0.104 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/wappalyzergo dependency-version: 0.2.57 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: modules - dependency-name: github.com/projectdiscovery/cdncheck dependency-version: 1.2.12 dependency-type: indirect update-type: version-update:semver-patch dependency-group: modules ... Signed-off-by: dependabot[bot] --- go.mod | 24 +++++++++++++----------- go.sum | 50 ++++++++++++++++++++++++++------------------------ 2 files changed, 39 insertions(+), 35 deletions(-) diff --git a/go.mod b/go.mod index 6043a9b47c..2317c60175 100644 --- a/go.mod +++ b/go.mod @@ -22,12 +22,12 @@ require ( github.com/olekukonko/tablewriter v1.0.8 github.com/pkg/errors v0.9.1 github.com/projectdiscovery/clistats v0.1.1 - github.com/projectdiscovery/fastdialer v0.4.17 - github.com/projectdiscovery/hmap v0.0.96 + github.com/projectdiscovery/fastdialer v0.4.18 + github.com/projectdiscovery/hmap v0.0.97 github.com/projectdiscovery/interactsh v1.2.4 github.com/projectdiscovery/rawhttp v0.1.90 - github.com/projectdiscovery/retryabledns v1.0.109 - github.com/projectdiscovery/retryablehttp-go v1.0.132 + github.com/projectdiscovery/retryabledns v1.0.110 + github.com/projectdiscovery/retryablehttp-go v1.0.133 github.com/projectdiscovery/yamldoc-go v1.0.6 github.com/remeh/sizedwaitgroup v1.0.0 github.com/rs/xid v1.6.0 @@ -91,26 +91,26 @@ require ( github.com/microsoft/go-mssqldb v1.9.2 github.com/ory/dockertest/v3 v3.12.0 github.com/praetorian-inc/fingerprintx v1.1.15 - github.com/projectdiscovery/dsl v0.8.5 + github.com/projectdiscovery/dsl v0.8.6 github.com/projectdiscovery/fasttemplate v0.0.2 github.com/projectdiscovery/gcache v0.0.0-20241015120333-12546c6e3f4c github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb github.com/projectdiscovery/goflags v0.1.74 - github.com/projectdiscovery/gologger v1.1.61 + github.com/projectdiscovery/gologger v1.1.62 github.com/projectdiscovery/gostruct v0.0.2 github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 github.com/projectdiscovery/httpx v1.7.2 github.com/projectdiscovery/mapcidr v1.1.97 github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 - github.com/projectdiscovery/networkpolicy v0.1.29 + github.com/projectdiscovery/networkpolicy v0.1.30 github.com/projectdiscovery/ratelimit v0.0.82 github.com/projectdiscovery/rdap v0.9.0 github.com/projectdiscovery/sarif v0.0.1 github.com/projectdiscovery/tlsx v1.2.2 - github.com/projectdiscovery/uncover v1.1.0 - github.com/projectdiscovery/useragent v0.0.103 + github.com/projectdiscovery/uncover v1.2.0 + github.com/projectdiscovery/useragent v0.0.104 github.com/projectdiscovery/utils v0.7.3 - github.com/projectdiscovery/wappalyzergo v0.2.56 + github.com/projectdiscovery/wappalyzergo v0.2.57 github.com/redis/go-redis/v9 v9.11.0 github.com/seh-msft/burpxml v1.0.1 github.com/shurcooL/graphql v0.0.0-20230722043721-ed46e5a46466 @@ -181,6 +181,7 @@ require ( github.com/caddyserver/certmagic v0.19.2 // indirect github.com/cenkalti/backoff/v4 v4.3.0 // indirect github.com/cenkalti/backoff/v5 v5.0.3 // indirect + github.com/censys/censys-sdk-go v0.19.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect github.com/charmbracelet/colorprofile v0.2.3-0.20250311203215-f60798e515dc // indirect github.com/charmbracelet/lipgloss v1.1.1-0.20250404203927-76690c660834 // indirect @@ -211,6 +212,7 @@ require ( github.com/dsnet/compress v0.0.2-0.20230904184137-39efe44ab707 // indirect github.com/ebitengine/purego v0.8.4 // indirect github.com/emirpasic/gods v1.18.1 // indirect + github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 // indirect github.com/fatih/color v1.18.0 // indirect github.com/felixge/fgprof v0.9.5 // indirect github.com/felixge/httpsnoop v1.0.4 // indirect @@ -322,7 +324,7 @@ require ( github.com/power-devops/perfstat v0.0.0-20240221224432-82ca36839d55 // indirect github.com/projectdiscovery/asnmap v1.1.1 // indirect github.com/projectdiscovery/blackrock v0.0.1 // indirect - github.com/projectdiscovery/cdncheck v1.2.11 // indirect + github.com/projectdiscovery/cdncheck v1.2.12 // indirect github.com/projectdiscovery/freeport v0.0.7 // indirect github.com/projectdiscovery/ldapserver v1.0.2-0.20240219154113-dcc758ebc0cb // indirect github.com/projectdiscovery/machineid v0.0.0-20240226150047-2e2c51e35983 // indirect diff --git a/go.sum b/go.sum index 798cbfc736..3ad7a67837 100644 --- a/go.sum +++ b/go.sum @@ -234,6 +234,8 @@ github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyY github.com/cenkalti/backoff/v5 v5.0.3 h1:ZN+IMa753KfX5hd8vVaMixjnqRZ3y8CuJKRKj1xcsSM= github.com/cenkalti/backoff/v5 v5.0.3/go.mod h1:rkhZdG3JZukswDf7f0cwqPNk4K0sa+F97BxZthm/crw= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/censys/censys-sdk-go v0.19.1 h1:CG8rQKgwrKuoICd3oU0uddALMfJnboeMkDg/e74HYyc= +github.com/censys/censys-sdk-go v0.19.1/go.mod h1:DgPz5NgL+EfoueXLPG9UG1e7hS0OhtlywgpkIuu3ZRE= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= @@ -337,6 +339,8 @@ github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymF github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05 h1:S92OBrGuLLZsyM5ybUzgc/mPjIYk2AZqufieooe98uw= +github.com/ericlagergren/decimal v0.0.0-20221120152707-495c53812d05/go.mod h1:M9R1FoZ3y//hwwnJtO51ypFGwm8ZfpxPT/ZLtO1mcgQ= github.com/fatih/color v1.18.0 h1:S8gINlzdQ840/4pfAwic/ZE0djQEH3wM94VfqLTZcOM= github.com/fatih/color v1.18.0/go.mod h1:4FelSpRwEGDpQ12mAdzqdOukCy4u8WUtOY6lkT/6HfU= github.com/fatih/structs v1.1.0 h1:Q7juDM0QtcnhCpeyLGQKyg4TOIghuNXrkL32pHAUMxo= @@ -819,14 +823,14 @@ github.com/projectdiscovery/asnmap v1.1.1 h1:ImJiKIaACOT7HPx4Pabb5dksolzaFYsD1kI github.com/projectdiscovery/asnmap v1.1.1/go.mod h1:QT7jt9nQanj+Ucjr9BqGr1Q2veCCKSAVyUzLXfEcQ60= github.com/projectdiscovery/blackrock v0.0.1 h1:lHQqhaaEFjgf5WkuItbpeCZv2DUIE45k0VbGJyft6LQ= github.com/projectdiscovery/blackrock v0.0.1/go.mod h1:ANUtjDfaVrqB453bzToU+YB4cUbvBRpLvEwoWIwlTss= -github.com/projectdiscovery/cdncheck v1.2.11 h1:ctqccij5PeobUjcTABZyGvbULxa3WT/hlL+yN1mSbyo= -github.com/projectdiscovery/cdncheck v1.2.11/go.mod h1:jyWMON16zbfCgi1Wp7nsGjZNFdRstYdK3IqjtnHB82Q= +github.com/projectdiscovery/cdncheck v1.2.12 h1:hS+CW0S2xOBIq7inKLp8u0/714S/t+X52S5iamVkp+U= +github.com/projectdiscovery/cdncheck v1.2.12/go.mod h1:Rc1G0QQdv3F+n003fksivUmTGitvb5+Gf36FpWkPQZw= github.com/projectdiscovery/clistats v0.1.1 h1:8mwbdbwTU4aT88TJvwIzTpiNeow3XnAB72JIg66c8wE= github.com/projectdiscovery/clistats v0.1.1/go.mod h1:4LtTC9Oy//RiuT1+76MfTg8Hqs7FQp1JIGBM3nHK6a0= -github.com/projectdiscovery/dsl v0.8.5 h1:f3opg8Jzikwx6VXC+CbgseUmSUqdfCnfGT08Syhp0sw= -github.com/projectdiscovery/dsl v0.8.5/go.mod h1:AuUq18cpLJJ0uAjJZKaLrdyAgDHrnQAjLMZtPEyMoJw= -github.com/projectdiscovery/fastdialer v0.4.17 h1:Cx3HdZaWJC8+qUBDvEarP7qmeLLJ9SqiXbQJD01oXxY= -github.com/projectdiscovery/fastdialer v0.4.17/go.mod h1:O47aw/ML7boXKTVKpfjBrTeBH8F4LFuu8okvOzOOBsg= +github.com/projectdiscovery/dsl v0.8.6 h1:kwtJn53UtDVX5vzmSmD/vDS8f1sR5yFyQchFd8Y2Oh8= +github.com/projectdiscovery/dsl v0.8.6/go.mod h1:bKSpMqLfeSllWPHlRuw/L0afAUYL2omA7sT6fGj8Nhc= +github.com/projectdiscovery/fastdialer v0.4.18 h1:jM3DlFjpy+NdmEpMpVdPF6J6hT0tDQcaKX0K2MX8xNU= +github.com/projectdiscovery/fastdialer v0.4.18/go.mod h1:zfpqgPL0LmQMxHC9t0np8B7gHHgYwcQ55f8/NJyUPKM= github.com/projectdiscovery/fasttemplate v0.0.2 h1:h2cISk5xDhlJEinlBQS6RRx0vOlOirB2y3Yu4PJzpiA= github.com/projectdiscovery/fasttemplate v0.0.2/go.mod h1:XYWWVMxnItd+r0GbjA1GCsUopMw1/XusuQxdyAIHMCw= github.com/projectdiscovery/freeport v0.0.7 h1:Q6uXo/j8SaV/GlAHkEYQi8WQoPXyJWxyspx+aFmz9Qk= @@ -837,14 +841,14 @@ github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb h1:rutG90 github.com/projectdiscovery/go-smb2 v0.0.0-20240129202741-052cc450c6cb/go.mod h1:FLjF1DmZ+POoGEiIQdWuYVwS++C/GwpX8YaCsTSm1RY= github.com/projectdiscovery/goflags v0.1.74 h1:n85uTRj5qMosm0PFBfsvOL24I7TdWRcWq/1GynhXS7c= github.com/projectdiscovery/goflags v0.1.74/go.mod h1:UMc9/7dFz2oln+10tv6cy+7WZKTHf9UGhaNkF95emh4= -github.com/projectdiscovery/gologger v1.1.61 h1:+jJ0Z0x6X9s69IRjbtsnOfMD8YTFTVADHMKFNu6dUGg= -github.com/projectdiscovery/gologger v1.1.61/go.mod h1:EfuwZ1lQX7kH4rgNo0nzk5XPh2j2gpYEQUi9tkoJDJw= +github.com/projectdiscovery/gologger v1.1.62 h1:wzKqvL6HQRzf0/PpBEhInZqqL1q4mKe2gFGJeDG3FqE= +github.com/projectdiscovery/gologger v1.1.62/go.mod h1:YWvMSxlHybU3SkFCcWn+driSJ8yY+3CR3g/textnp+Y= github.com/projectdiscovery/gostruct v0.0.2 h1:s8gP8ApugGM4go1pA+sVlPDXaWqNP5BBDDSv7VEdG1M= github.com/projectdiscovery/gostruct v0.0.2/go.mod h1:H86peL4HKwMXcQQtEa6lmC8FuD9XFt6gkNR0B/Mu5PE= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81 h1:yHh46pJovYbyiaHCV7oIDinFmy+Fyq36H1BowJgb0M0= github.com/projectdiscovery/gozero v0.1.1-0.20251027191944-a4ea43320b81/go.mod h1:9lmGPBDGZVANzCGjQg+V32n8Y3Cgjo/4kT0E88lsVTI= -github.com/projectdiscovery/hmap v0.0.96 h1:gxpKGZc802/QCROuaj2l3HnfV3vzZ7AMZrK4tncKXTQ= -github.com/projectdiscovery/hmap v0.0.96/go.mod h1:x7lxwipd1roSXnpjnRqlZUMw8OhUwA96YWQieyWVHJk= +github.com/projectdiscovery/hmap v0.0.97 h1:rfJt44HOaK5/zkeQXXoDl5tCuiUpc0chuYN43QPnm+E= +github.com/projectdiscovery/hmap v0.0.97/go.mod h1:x7K+2xTDMYLilpZjnnaXufesVVUSfxttXgqsuYvQJVk= github.com/projectdiscovery/httpx v1.7.2 h1:AfJ5wjhKOlywX+x+gPO4iPqgFEyoIJwvXsLpQQgs4+c= github.com/projectdiscovery/httpx v1.7.2/go.mod h1:hm0uTQGUTU1K0AQ1NQVfFrKfiS4u9Ynh8wArdXUXBS4= github.com/projectdiscovery/interactsh v1.2.4 h1:WUSj+fxbcV53J64oIAhbYzCKD1w/IyenyRBhkI5jiqI= @@ -857,34 +861,32 @@ github.com/projectdiscovery/mapcidr v1.1.97 h1:7FkxNNVXp+m1rIu5Nv/2SrF9k4+LwP8Qu github.com/projectdiscovery/mapcidr v1.1.97/go.mod h1:9dgTJh1SP02gYZdpzMjm6vtYFkEHQHoTyaVNvaeJ7lA= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5 h1:L/e8z8yw1pfT6bg35NiN7yd1XKtJap5Nk6lMwQ0RNi8= github.com/projectdiscovery/n3iwf v0.0.0-20230523120440-b8cd232ff1f5/go.mod h1:pGW2ncnTxTxHtP9wzcIJAB+3/NMp6IiuQWd2NK7K+oc= -github.com/projectdiscovery/networkpolicy v0.1.29 h1:wZoCTPJl1ry98Cgwh4vwMSKrirHDd1Yfw7U77kIDkK4= -github.com/projectdiscovery/networkpolicy v0.1.29/go.mod h1:VDp7uQk31gvVG/y9E0liCRlqr/in+vQarODoElXGLOU= +github.com/projectdiscovery/networkpolicy v0.1.30 h1:XvzvRxldndEk7eRwSvuJ4xtGSzSgwZsrZB9VuUEfR1A= +github.com/projectdiscovery/networkpolicy v0.1.30/go.mod h1:/aJZmi2/d41O67wBcTkee/LWhyJnlKxCuWe6cUN2SNU= github.com/projectdiscovery/ratelimit v0.0.82 h1:rtO5SQf5uQFu5zTahTaTcO06OxmG8EIF1qhdFPIyTak= github.com/projectdiscovery/ratelimit v0.0.82/go.mod h1:z076BrLkBb5yS7uhHNoCTf8X/BvFSGRxwQ8EzEL9afM= github.com/projectdiscovery/rawhttp v0.1.90 h1:LOSZ6PUH08tnKmWsIwvwv1Z/4zkiYKYOSZ6n+8RFKtw= github.com/projectdiscovery/rawhttp v0.1.90/go.mod h1:VZYAM25UI/wVB3URZ95ZaftgOnsbphxyAw/XnQRRz4Y= github.com/projectdiscovery/rdap v0.9.0 h1:wPhHx5pQ2QI+WGhyNb2PjhTl0NtB39Nk7YFZ9cp8ZGA= github.com/projectdiscovery/rdap v0.9.0/go.mod h1:zk4yrJFQ2Hy36Aqk+DvotYQxYAeALaCJ5ORySkff36Q= -github.com/projectdiscovery/retryabledns v1.0.109 h1:2tDpoTRFlWmOsiH5Z/qVGrOzlB/hUO9PF2s6ol9BWcQ= -github.com/projectdiscovery/retryabledns v1.0.109/go.mod h1:yGKECPB69MWT1EZwWj5zejZflgP+KUDd5dBYZm3igxs= -github.com/projectdiscovery/retryablehttp-go v1.0.132 h1:h4sVcJE9GsLnxfzyXy7pa1PXEf4QwscbO19EPE1mNDo= -github.com/projectdiscovery/retryablehttp-go v1.0.132/go.mod h1:vf8+meeaGFjglVSDQvNISQtAmDKpi4FDjyb4+eFUED4= +github.com/projectdiscovery/retryabledns v1.0.110 h1:24p1PzWBdfsRnGsBf6ZxXPzvK0sYaL4q/ju4+2OhJzU= +github.com/projectdiscovery/retryabledns v1.0.110/go.mod h1:GFj5HjxfaGrZeoYf79zI/R99XljBNjmOqNvwOqPepRU= +github.com/projectdiscovery/retryablehttp-go v1.0.133 h1:uAIGwsRelrS1Ulelyp9qLtZRDTFHixw4O0cUQWLhTJQ= +github.com/projectdiscovery/retryablehttp-go v1.0.133/go.mod h1:9DU57ezv5cfZSWw/m5XFDTMjy1yKeMyn1kj35lPlcfM= github.com/projectdiscovery/sarif v0.0.1 h1:C2Tyj0SGOKbCLgHrx83vaE6YkzXEVrMXYRGLkKCr/us= github.com/projectdiscovery/sarif v0.0.1/go.mod h1:cEYlDu8amcPf6b9dSakcz2nNnJsoz4aR6peERwV+wuQ= github.com/projectdiscovery/stringsutil v0.0.2 h1:uzmw3IVLJSMW1kEg8eCStG/cGbYYZAja8BH3LqqJXMA= github.com/projectdiscovery/stringsutil v0.0.2/go.mod h1:EJ3w6bC5fBYjVou6ryzodQq37D5c6qbAYQpGmAy+DC0= github.com/projectdiscovery/tlsx v1.2.2 h1:Y96QBqeD2anpzEtBl4kqNbwzXh2TrzJuXfgiBLvK+SE= github.com/projectdiscovery/tlsx v1.2.2/go.mod h1:ZJl9F1sSl0sdwE+lR0yuNHVX4Zx6tCSTqnNxnHCFZB4= -github.com/projectdiscovery/uncover v1.1.0 h1:UDp/qLZn78YZb6VPoOrfyP1vz+ojEx8VrTTyjjRt9UU= -github.com/projectdiscovery/uncover v1.1.0/go.mod h1:2rXINmMe/lmVAt2jn9CpAOs9An57/JEeLZobY3Z9kUs= -github.com/projectdiscovery/useragent v0.0.103 h1:mtHA4PK50ZH89bMWVCdRNac7f6kpfMet+sva6pSzzEo= -github.com/projectdiscovery/useragent v0.0.103/go.mod h1:NW+eQMkBYT5QdRgJ6ckKLhQ23D5I/Sc51Y43ys80+ZU= -github.com/projectdiscovery/utils v0.7.1 h1:3zzDzA0T6SxLripNGoTbO/znDCDWE2xlk9nazRCLBV4= -github.com/projectdiscovery/utils v0.7.1/go.mod h1:WrTQBt9Hfg5sH7q92qXUToUOhD7rWyHiWk2Gh/l1Mv8= +github.com/projectdiscovery/uncover v1.2.0 h1:31tjYa0v8FB8Ch8hJTxb+2t63vsljdOo0OSFylJcX4M= +github.com/projectdiscovery/uncover v1.2.0/go.mod h1:ozqKb++p39Kmh1SmwIpbQ9p0aVGPXuwsb4/X2Kvx6ms= +github.com/projectdiscovery/useragent v0.0.104 h1:Gxy5UrZ494Ce8CWicFECqc8LGRVknK5duUfBU39VhG0= +github.com/projectdiscovery/useragent v0.0.104/go.mod h1:dq6N53FgzL5xEQkFBSpAtK70TbEOHZPLeG7LOorSS14= github.com/projectdiscovery/utils v0.7.3 h1:kX+77AA58yK6EZgkTRJEnK9V/7AZYzlXdcu/o/kJhFs= github.com/projectdiscovery/utils v0.7.3/go.mod h1:uDdQ3/VWomai98l+a3Ye/srDXdJ4xUIar/mSXlQ9gBM= -github.com/projectdiscovery/wappalyzergo v0.2.56 h1:KyZSQ58NjqU5k0//JZUUuVPsruDFXz55ezHpjLKCy68= -github.com/projectdiscovery/wappalyzergo v0.2.56/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= +github.com/projectdiscovery/wappalyzergo v0.2.57 h1:g3M/lEX0epjY2pL5diqVtn3J0meLg8cG6qzX0pCAW+4= +github.com/projectdiscovery/wappalyzergo v0.2.57/go.mod h1:lwuDLdAqWDZ1IL8OQnoNQ0t17UP9AQSvVuFcDAm4FpQ= github.com/projectdiscovery/yamldoc-go v1.0.6 h1:GCEdIRlQjDux28xTXKszM7n3jlMf152d5nqVpVoetas= github.com/projectdiscovery/yamldoc-go v1.0.6/go.mod h1:R5lWrNzP+7Oyn77NDVPnBsxx2/FyQZBBkIAaSaCQFxw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= From 9102f333a973333ed79d5b8c417abda97e4e00e2 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto <25837540+dwisiswant0@users.noreply.github.com> Date: Thu, 4 Dec 2025 21:35:51 +0700 Subject: [PATCH 54/55] feat(loader): implement persistent metadata cache (#6630) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat(loader): implement persistent metadata cache for template filtering optimization. Introduce a new template metadata indexing system with persistent caching to dramatically improve template loading perf when filters are applied. The implementation adds a new index pkg that caches lightweight template metadata (ID, tags, authors, severity, .etc) and enables filtering templates before expensive YAML parsing occurs. The index uses an in-memory LRU cache backed by `otter` pkg for efficient memory management with adaptive sizing based on entry weight, defaulting to approx. 40MB for 50K templates. Metadata is persisted to disk using gob encoding at "~/.cache/nuclei/index.gob" with atomic writes to prevent corruption. The cache automatically invalidates stale entries using `ModTime` to detect file modifications, ensuring metadata freshness w/o manual intervention. Filtering has been refactored from the previous `TagFilter` and `PathFilter` approach into a unified `index.Filter` type that handles all basic filtering ops including severity, authors, tags, template IDs with wildcard support, protocol types, and path-based inclusion and exclusion. The filter implements OR logic within each field type and AND logic across different field types, with exclusion filters taking precedence over inclusion filters and forced inclusion via `IncludeTemplates` and `IncludeTags` overriding exclusions. The `loader` integration creates an index filter from store configuration via `buildIndexFilter` and manages the cache lifecycle through `loadTemplatesIndex` and `saveTemplatesIndex` methods. When `LoadTemplatesOnlyMetadata` or `LoadTemplatesWithTags` is called, the system first checks the metadata cache for each template path. If cached metadata exists and passes validation, the filter is applied directly against the metadata without parsing. Only templates matching the filter criteria proceed to full YAML parsing, resulting in significant performance gains. Advanced filtering via "-tc" flag (`IncludeConditions`) still requires template parsing as these are expression-based filters that cannot be evaluated from metadata alone. The `TagFilter` has been simplified to handle only `IncludeConditions` while all other filtering ops are delegated to the index-based filtering system. Cache management is fully automatic with no user configuration required. The cache gracefully handles errors by logging warnings & falling back to normal op w/o caching. Cache files use schema versioning to invalidate incompatible cache formats across nuclei updates (well, specifically `Index` and `Metadata` changes). This optimization particularly benefits repeated scans with the same filters, CI/CD pipelines running nuclei regularly, development and testing workflows with frequent template loading, and any scenario with large template collections where filtering would exclude most templates. * test(loader): adds `BenchmarkLoadTemplates{,OnlyMetadata}` benchs Signed-off-by: Dwi Siswanto * ci: cache nuclei-templates index Signed-off-by: Dwi Siswanto * chore(index): satisfy lints Signed-off-by: Dwi Siswanto * fix(index): correct metadata filter logic for proper template matching. The `filter.matchesIncludes()` was using OR logic across different filter types, causing incorrect template matching. Additionally, ID matching was case-sensitive, failing to match patterns like 'CVE-2021-*'. The filter now correctly implements: (author1 OR author2) AND (tag1 OR tag2) AND (severity1 OR severity2) - using OR within each filter type and AND across different types. Signed-off-by: Dwi Siswanto * test(index): resolve test timing issue in CI environments. Some test was failing in CI due to filesystem timestamp resolution limitations. On filesystems with 1s ModTime granularity (common in CI), modifying a file immediately after capturing its timestamp resulted in identical ModTime values, causing IsValid() to incorrectly return true. Signed-off-by: Dwi Siswanto * ci: cache nuclei with composite action Signed-off-by: Dwi Siswanto * fix(index): file locking issue on Windows during cache save/load. Explicitly close file handles before performing rename/remove ops in `Save` and `Load` methods. * In `Save`, close temp file before rename. * In `Load`, close file before remove during error handling/version mismatch. Signed-off-by: Dwi Siswanto * test(index): flaky index tests on Windows Fix path separator mismatch in `TestCacheSize` and `TestCachePersistenceWithLargeDataset` by using `filepath.Join` consistently instead of hardcoded forward slashes. Signed-off-by: Dwi Siswanto * test(cmd): init logger to prevent nil pointer deref The integration tests were panicking with a nil pointer dereference in `pkg/catalog/loader` because the logger was not init'ed. When `store.saveMetadataIndexOnce` attempted to log the result of the metadata cache op, it dereferenced the nil logger, causing a crash. Signed-off-by: Dwi Siswanto * fix(loader): resolve include/exclude paths for metadata cache filter. The `indexFilter` was previously init'ed using raw relative paths from the config for `IncludeTemplates` and `ExcludeTemplates`. But the persistent metadata cache stores templates using their absolute paths. This mismatch caused the `matchesPath` check to fail, leading to templates being incorrectly excluded even when explicitly included via flags (e.g., "-include-templates loader/excluded-template.yaml"). This commit updates `buildIndexFilter` to resolve these paths to their absolute versions using `store.config.Catalog.GetTemplatesPath` before creating the filter, ensuring consistent path matching against the metadata cache. Signed-off-by: Dwi Siswanto * feat(index): adds `NewMetadataFromTemplate` func Signed-off-by: Dwi Siswanto * refactor(index): return metadata when `(*Index).cache` is nil Signed-off-by: Dwi Siswanto * refactor(loader): restore pre‑index behavior semantics Signed-off-by: Dwi Siswanto --------- Signed-off-by: Dwi Siswanto --- .github/workflows/generate-pgo.yaml | 1 + .github/workflows/perf-regression.yaml | 1 + .github/workflows/tests.yaml | 4 + cmd/integration-test/library.go | 2 + go.mod | 1 + go.sum | 2 + pkg/catalog/index/filter.go | 344 +++++++++++ pkg/catalog/index/filter_test.go | 407 +++++++++++++ pkg/catalog/index/index.go | 352 +++++++++++ pkg/catalog/index/index_test.go | 737 ++++++++++++++++++++++++ pkg/catalog/index/metadata.go | 104 ++++ pkg/catalog/loader/loader.go | 205 ++++++- pkg/catalog/loader/loader_bench_test.go | 200 +++++++ 13 files changed, 2328 insertions(+), 32 deletions(-) create mode 100644 pkg/catalog/index/filter.go create mode 100644 pkg/catalog/index/filter_test.go create mode 100644 pkg/catalog/index/index.go create mode 100644 pkg/catalog/index/index_test.go create mode 100644 pkg/catalog/index/metadata.go diff --git a/.github/workflows/generate-pgo.yaml b/.github/workflows/generate-pgo.yaml index b0c47d56ec..39fc7e6a10 100644 --- a/.github/workflows/generate-pgo.yaml +++ b/.github/workflows/generate-pgo.yaml @@ -31,6 +31,7 @@ jobs: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/git@v1 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - name: Generate list run: for i in {1..${{ matrix.targets }}}; do echo "https://honey.scanme.sh/?_=${i}" >> "${LIST_FILE}"; done # NOTE(dwisiswant0): use `-no-mhe` flag to get better samples. diff --git a/.github/workflows/perf-regression.yaml b/.github/workflows/perf-regression.yaml index 85650a09f4..7856067243 100644 --- a/.github/workflows/perf-regression.yaml +++ b/.github/workflows/perf-regression.yaml @@ -14,6 +14,7 @@ jobs: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - run: make build-test - run: ./bin/nuclei.test -test.run - -test.bench=. -test.benchmem ./cmd/nuclei/ | tee $BENCH_OUT env: diff --git a/.github/workflows/tests.yaml b/.github/workflows/tests.yaml index e244b2001d..42a46a67db 100644 --- a/.github/workflows/tests.yaml +++ b/.github/workflows/tests.yaml @@ -39,6 +39,7 @@ jobs: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - uses: projectdiscovery/actions/free-disk-space@v1 with: llvm: 'false' @@ -66,6 +67,7 @@ jobs: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - name: "Simple" run: go run . working-directory: examples/simple/ @@ -88,6 +90,7 @@ jobs: steps: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh "${{ matrix.os }}" @@ -108,6 +111,7 @@ jobs: steps: - uses: actions/checkout@v6 - uses: projectdiscovery/actions/setup/go@v1 + - uses: projectdiscovery/actions/cache/nuclei@v1 - uses: projectdiscovery/actions/setup/python@v1 - uses: projectdiscovery/actions/cache/go-rod-browser@v1 - run: bash run.sh diff --git a/cmd/integration-test/library.go b/cmd/integration-test/library.go index 3513b1d043..2c4cda5764 100644 --- a/cmd/integration-test/library.go +++ b/cmd/integration-test/library.go @@ -15,6 +15,7 @@ import ( "github.com/logrusorgru/aurora" "github.com/pkg/errors" "github.com/projectdiscovery/goflags" + "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" @@ -70,6 +71,7 @@ func executeNucleiAsLibrary(templatePath, templateURL string) ([]string, error) defaultOpts := types.DefaultOptions() defaultOpts.ExecutionId = "test" + defaultOpts.Logger = gologger.DefaultLogger mockProgress := &testutils.MockProgressClient{} reportingClient, err := reporting.New(&reporting.Options{ExecutionId: defaultOpts.ExecutionId}, "", false) diff --git a/go.mod b/go.mod index 2317c60175..8bd2a75809 100644 --- a/go.mod +++ b/go.mod @@ -87,6 +87,7 @@ require ( github.com/leslie-qiwa/flat v0.0.0-20230424180412-f9d1cf014baa github.com/lib/pq v1.10.9 github.com/mattn/go-sqlite3 v1.14.28 + github.com/maypok86/otter/v2 v2.2.1 github.com/mholt/archives v0.1.5 github.com/microsoft/go-mssqldb v1.9.2 github.com/ory/dockertest/v3 v3.12.0 diff --git a/go.sum b/go.sum index 3ad7a67837..aa5d566cdd 100644 --- a/go.sum +++ b/go.sum @@ -707,6 +707,8 @@ github.com/mattn/go-sqlite3 v1.14.28 h1:ThEiQrnbtumT+QMknw63Befp/ce/nUPgBPMlRFEu github.com/mattn/go-sqlite3 v1.14.28/go.mod h1:Uh1q+B4BYcTPb+yiD3kU8Ct7aC0hY9fxUwlHK0RXw+Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= +github.com/maypok86/otter/v2 v2.2.1 h1:hnGssisMFkdisYcvQ8L019zpYQcdtPse+g0ps2i7cfI= +github.com/maypok86/otter/v2 v2.2.1/go.mod h1:1NKY9bY+kB5jwCXBJfE59u+zAwOt6C7ni1FTlFFMqVs= github.com/mholt/acmez v1.2.0 h1:1hhLxSgY5FvH5HCnGUuwbKY2VQVo8IU7rxXKSnZ7F30= github.com/mholt/acmez v1.2.0/go.mod h1:VT9YwH1xgNX1kmYY89gY8xPJC84BFAisjo8Egigt4kE= github.com/mholt/archives v0.1.5 h1:Fh2hl1j7VEhc6DZs2DLMgiBNChUux154a1G+2esNvzQ= diff --git a/pkg/catalog/index/filter.go b/pkg/catalog/index/filter.go new file mode 100644 index 0000000000..ac4959a531 --- /dev/null +++ b/pkg/catalog/index/filter.go @@ -0,0 +1,344 @@ +package index + +import ( + "path/filepath" + "slices" + "strings" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" +) + +// Filter represents filtering criteria for template metadata. +// +// Inclusion fields (e.g., Authors, Tags, IDs, Severities, ProtocolTypes) use +// AND logic across different filter types and OR logic within each type. +// Exclusion fields (e.g., ExcludeTags, ExcludeIDs, ExcludeSeverities, +// ExcludeProtocolTypes) take precedence over inclusion fields. Additionally, +// IncludeTemplates and IncludeTags can force inclusion of templates even if +// they match exclusion criteria. +type Filter struct { + // Authors to include. + Authors []string + + // Tags to include. + Tags []string + + // ExcludeTags to exclude (takes precedence over Tags). + ExcludeTags []string + + // IncludeTags to force include even if excluded. + IncludeTags []string + + // IDs to include (supports wildcards, OR logic). + IDs []string + + // ExcludeIDs to exclude (supports wildcards). + ExcludeIDs []string + + // IncludeTemplates paths to force include even if excluded. + IncludeTemplates []string + + // ExcludeTemplates paths to exclude. + ExcludeTemplates []string + + // Severities to include. + Severities []severity.Severity + + // ExcludeSeverities to exclude. + ExcludeSeverities []severity.Severity + + // ProtocolTypes to include. + ProtocolTypes []types.ProtocolType + + // ExcludeProtocolTypes to exclude. + ExcludeProtocolTypes []types.ProtocolType +} + +// Matches checks if metadata matches the filter criteria. +func (f *Filter) Matches(m *Metadata) bool { + if f.isForcedInclude(m) { + return true + } + + if f.isExcluded(m) { + return false + } + + if !f.matchesIncludes(m) { + return false + } + + return true +} + +// isForcedInclude checks if template is forced to be included. +func (f *Filter) isForcedInclude(m *Metadata) bool { + if len(f.IncludeTemplates) > 0 { + for _, includePath := range f.IncludeTemplates { + if matchesPath(m.FilePath, includePath) { + return true + } + } + } + + if len(f.IncludeTags) > 0 { + if slices.ContainsFunc(f.IncludeTags, m.HasTag) { + return true + } + } + + return false +} + +// isExcluded checks if template should be excluded. +func (f *Filter) isExcluded(m *Metadata) bool { + if len(f.ExcludeTemplates) > 0 { + for _, excludePath := range f.ExcludeTemplates { + if matchesPath(m.FilePath, excludePath) { + return true + } + } + } + + if len(f.ExcludeTags) > 0 { + if slices.ContainsFunc(f.ExcludeTags, m.HasTag) { + return true + } + } + + if len(f.ExcludeIDs) > 0 { + for _, excludeID := range f.ExcludeIDs { + if matchesID(m.ID, excludeID) { + return true + } + } + } + + if len(f.ExcludeSeverities) > 0 { + if slices.ContainsFunc(f.ExcludeSeverities, m.MatchesSeverity) { + return true + } + } + + if len(f.ExcludeProtocolTypes) > 0 { + if slices.ContainsFunc(f.ExcludeProtocolTypes, m.MatchesProtocol) { + return true + } + } + + return false +} + +// matchesIncludes checks if metadata matches include filters. +// +// Returns true if no include filters are specified, or if all specified filter +// types match. +func (f *Filter) matchesIncludes(m *Metadata) bool { + if len(f.Authors) > 0 { + if !slices.ContainsFunc(f.Authors, m.HasAuthor) { + return false + } + } + + if len(f.Tags) > 0 { + if !slices.ContainsFunc(f.Tags, m.HasTag) { + return false + } + } + + if len(f.IDs) > 0 { + matched := false + for _, id := range f.IDs { + if matchesID(m.ID, id) { + matched = true + break + } + } + if !matched { + return false + } + } + + if len(f.Severities) > 0 { + if !slices.ContainsFunc(f.Severities, m.MatchesSeverity) { + return false + } + } + + if len(f.ProtocolTypes) > 0 { + if !slices.ContainsFunc(f.ProtocolTypes, m.MatchesProtocol) { + return false + } + } + + return true +} + +// matchesID checks if template ID matches pattern (supports wildcards). +func matchesID(templateID, pattern string) bool { + // Convert to lowercase for case-insensitive matching + templateID = strings.ToLower(templateID) + pattern = strings.ToLower(pattern) + + if templateID == pattern { + return true + } + + matched, _ := filepath.Match(pattern, templateID) + + return matched +} + +// matchesPath checks if template path matches pattern. +func matchesPath(templatePath, pattern string) bool { + templatePath = filepath.Clean(templatePath) + pattern = filepath.Clean(pattern) + + if templatePath == pattern { + return true + } + + if strings.HasPrefix(templatePath, pattern+string(filepath.Separator)) { + return true + } + + matched, _ := filepath.Match(pattern, templatePath) + + return matched +} + +// FilterFunc is a function that filters metadata. +type FilterFunc func(*Metadata) bool + +// UnmarshalFilter creates a Filter from nuclei options. +func UnmarshalFilter( + authors, tags, excludeTags, includeTags []string, + ids, excludeIDs []string, + includeTemplates, excludeTemplates []string, + severities, excludeSeverities []string, + protocolTypes, excludeProtocolTypes []string, +) (*Filter, error) { + filter := &Filter{ + Authors: authors, + Tags: tags, + ExcludeTags: excludeTags, + IncludeTags: includeTags, + IDs: ids, + ExcludeIDs: excludeIDs, + IncludeTemplates: includeTemplates, + ExcludeTemplates: excludeTemplates, + } + + for _, sev := range severities { + holder := &severity.Holder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = sev + return nil + }); err == nil { + filter.Severities = append(filter.Severities, holder.Severity) + } + } + + for _, sev := range excludeSeverities { + holder := &severity.Holder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = sev + return nil + }); err == nil { + filter.ExcludeSeverities = append(filter.ExcludeSeverities, holder.Severity) + } + } + + for _, pt := range protocolTypes { + holder := &types.TypeHolder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = pt + return nil + }); err == nil && holder.ProtocolType != types.InvalidProtocol { + filter.ProtocolTypes = append(filter.ProtocolTypes, holder.ProtocolType) + } + } + + for _, pt := range excludeProtocolTypes { + holder := &types.TypeHolder{} + if err := holder.UnmarshalYAML(func(v interface{}) error { + *v.(*string) = pt + return nil + }); err == nil && holder.ProtocolType != types.InvalidProtocol { + filter.ExcludeProtocolTypes = append(filter.ExcludeProtocolTypes, holder.ProtocolType) + } + } + + return filter, nil +} + +// UnmarshalFilterFunc creates a FilterFunc from filter criteria. +func UnmarshalFilterFunc(filter *Filter) FilterFunc { + if filter == nil { + return func(*Metadata) bool { return true } + } + + return filter.Matches +} + +// IsEmpty returns true if filter has no criteria set. +func (f *Filter) IsEmpty() bool { + return len(f.Authors) == 0 && + len(f.Tags) == 0 && + len(f.ExcludeTags) == 0 && + len(f.IncludeTags) == 0 && + len(f.IDs) == 0 && + len(f.ExcludeIDs) == 0 && + len(f.IncludeTemplates) == 0 && + len(f.ExcludeTemplates) == 0 && + len(f.Severities) == 0 && + len(f.ExcludeSeverities) == 0 && + len(f.ProtocolTypes) == 0 && + len(f.ExcludeProtocolTypes) == 0 +} + +// String returns a human-readable representation of the filter. +func (f *Filter) String() string { + var parts []string + + if len(f.Authors) > 0 { + parts = append(parts, "authors="+strings.Join(f.Authors, ",")) + } + + if len(f.Tags) > 0 { + parts = append(parts, "tags="+strings.Join(f.Tags, ",")) + } + + if len(f.ExcludeTags) > 0 { + parts = append(parts, "exclude-tags="+strings.Join(f.ExcludeTags, ",")) + } + + if len(f.IDs) > 0 { + parts = append(parts, "ids="+strings.Join(f.IDs, ",")) + } + + if len(f.Severities) > 0 { + sevs := make([]string, len(f.Severities)) + for i, s := range f.Severities { + sevs[i] = s.String() + } + + parts = append(parts, "severities="+strings.Join(sevs, ",")) + } + + if len(f.ProtocolTypes) > 0 { + pts := make([]string, len(f.ProtocolTypes)) + for i, p := range f.ProtocolTypes { + pts[i] = p.String() + } + + parts = append(parts, "types="+strings.Join(pts, ",")) + } + + if len(parts) == 0 { + return "filter=" + } + + return "filter(" + strings.Join(parts, ", ") + ")" +} diff --git a/pkg/catalog/index/filter_test.go b/pkg/catalog/index/filter_test.go new file mode 100644 index 0000000000..2bc4735e6c --- /dev/null +++ b/pkg/catalog/index/filter_test.go @@ -0,0 +1,407 @@ +package index + +import ( + "os" + "path/filepath" + "testing" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" + "github.com/stretchr/testify/require" +) + +func TestFilterMatches(t *testing.T) { + metadata := &Metadata{ + ID: "test-template-1", + FilePath: "/templates/cves/2021/CVE-2021-1234.yaml", + Name: "Test CVE Template", + Authors: []string{"pdteam", "geeknik"}, + Tags: []string{"cve", "rce", "apache"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("Empty filter matches all", func(t *testing.T) { + filter := &Filter{} + require.True(t, filter.Matches(metadata)) + require.True(t, filter.IsEmpty()) + }) + + t.Run("Author filter - match", func(t *testing.T) { + filter := &Filter{Authors: []string{"pdteam"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Author filter - no match", func(t *testing.T) { + filter := &Filter{Authors: []string{"unknown"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Multiple authors - OR logic", func(t *testing.T) { + filter := &Filter{Authors: []string{"unknown", "geeknik"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Tag filter - match", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Tag filter - no match", func(t *testing.T) { + filter := &Filter{Tags: []string{"xss"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude tags - match", func(t *testing.T) { + filter := &Filter{ExcludeTags: []string{"rce"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Include tags overrides exclude", func(t *testing.T) { + filter := &Filter{ + ExcludeTags: []string{"rce"}, + IncludeTags: []string{"cve"}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - exact match", func(t *testing.T) { + filter := &Filter{IDs: []string{"test-template-1"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - wildcard match", func(t *testing.T) { + filter := &Filter{IDs: []string{"test-*"}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("ID filter - no match", func(t *testing.T) { + filter := &Filter{IDs: []string{"other-*"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude ID - exact match", func(t *testing.T) { + filter := &Filter{ExcludeIDs: []string{"test-template-1"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude ID - wildcard match", func(t *testing.T) { + filter := &Filter{ExcludeIDs: []string{"test-*"}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Severity filter - match", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.Critical}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Severity filter - no match", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.High, severity.Medium}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude severity - match", func(t *testing.T) { + filter := &Filter{ExcludeSeverities: []severity.Severity{severity.Critical}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Protocol type filter - match", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Protocol type filter - no match", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.DNSProtocol}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Exclude protocol type - match", func(t *testing.T) { + filter := &Filter{ExcludeProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Include templates - path match", func(t *testing.T) { + filter := &Filter{ + ExcludeTags: []string{"cve"}, + IncludeTemplates: []string{"/templates/cves/"}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Exclude templates - path match", func(t *testing.T) { + filter := &Filter{ + ExcludeTemplates: []string{"/templates/cves/"}, + } + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - all match", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"pdteam"}, + Tags: []string{"cve"}, + Severities: []severity.Severity{severity.Critical}, + ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}, + } + require.True(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - AND logic across types", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"pdteam"}, // matches + Tags: []string{"xss"}, // doesn't match + Severities: []severity.Severity{severity.Critical}, // matches + } + // With AND logic across filter types, doesn't match because tags don't match + // even though author and severity match + require.False(t, filter.Matches(metadata)) + }) + + t.Run("Complex filter - no match at all", func(t *testing.T) { + filter := &Filter{ + Authors: []string{"unknown"}, // doesn't match + Tags: []string{"xss"}, // doesn't match + Severities: []severity.Severity{severity.Low}, // doesn't match + } + require.False(t, filter.Matches(metadata)) + }) +} + +func TestMatchesPath(t *testing.T) { + tests := []struct { + name string + path string + pattern string + expected bool + }{ + {"exact match", "/templates/cves/2021/test.yaml", "/templates/cves/2021/test.yaml", true}, + {"directory prefix", "/templates/cves/2021/test.yaml", "/templates/cves", true}, + {"directory with slash", "/templates/cves/2021/test.yaml", "/templates/cves/", true}, + {"no match", "/templates/cves/2021/test.yaml", "/templates/exploits", false}, + {"wildcard match", "/templates/cves/2021/test.yaml", "/templates/*/2021/*.yaml", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := matchesPath(tt.path, tt.pattern) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestMatchesID(t *testing.T) { + tests := []struct { + name string + id string + pattern string + expected bool + }{ + {"exact match", "CVE-2021-1234", "CVE-2021-1234", true}, + {"wildcard prefix", "CVE-2021-1234", "CVE-*", true}, + {"wildcard suffix", "CVE-2021-1234", "*-1234", true}, + {"wildcard middle", "CVE-2021-1234", "CVE-*-1234", true}, + {"no match", "CVE-2021-1234", "CVE-2022-*", false}, + {"partial no match", "CVE-2021-1234", "CVE-2021-12", false}, + {"case insensitive exact", "cve-2021-1234", "CVE-2021-1234", true}, + {"case insensitive wildcard", "CVE-2021-1234", "cve-*", true}, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + result := matchesID(tt.id, tt.pattern) + require.Equal(t, tt.expected, result) + }) + } +} + +func TestUnmarshalFilter(t *testing.T) { + filter, err := UnmarshalFilter( + []string{"author1", "author2"}, + []string{"tag1", "tag2"}, + []string{"exclude-tag"}, + []string{"include-tag"}, + []string{"id1", "id2*"}, + []string{"exclude-id*"}, + []string{"/include/path"}, + []string{"/exclude/path"}, + []string{"critical", "high"}, + []string{"info"}, + []string{"http", "dns"}, + []string{"file"}, + ) + + require.NoError(t, err) + require.NotNil(t, filter) + + require.Equal(t, []string{"author1", "author2"}, filter.Authors) + require.Equal(t, []string{"tag1", "tag2"}, filter.Tags) + require.Equal(t, []string{"exclude-tag"}, filter.ExcludeTags) + require.Equal(t, []string{"include-tag"}, filter.IncludeTags) + require.Equal(t, []string{"id1", "id2*"}, filter.IDs) + require.Equal(t, []string{"exclude-id*"}, filter.ExcludeIDs) + require.Equal(t, []string{"/include/path"}, filter.IncludeTemplates) + require.Equal(t, []string{"/exclude/path"}, filter.ExcludeTemplates) + + require.Len(t, filter.Severities, 2) + require.Contains(t, filter.Severities, severity.Critical) + require.Contains(t, filter.Severities, severity.High) + + require.Len(t, filter.ExcludeSeverities, 1) + require.Contains(t, filter.ExcludeSeverities, severity.Info) + + require.Len(t, filter.ProtocolTypes, 2) + require.Contains(t, filter.ProtocolTypes, types.HTTPProtocol) + require.Contains(t, filter.ProtocolTypes, types.DNSProtocol) + + require.Len(t, filter.ExcludeProtocolTypes, 1) + require.Contains(t, filter.ExcludeProtocolTypes, types.FileProtocol) +} + +func TestIndexFilter(t *testing.T) { + tmpDir := t.TempDir() + idx, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Create test templates and metadata + templates := []struct { + id string + path string + authors []string + tags []string + severity string + protocol string + }{ + {"cve-2021-1", "/templates/cves/CVE-2021-1.yaml", []string{"pdteam"}, []string{"cve", "rce"}, "critical", "http"}, + {"cve-2021-2", "/templates/cves/CVE-2021-2.yaml", []string{"pdteam"}, []string{"cve", "xss"}, "high", "http"}, + {"exploit-1", "/templates/exploits/exploit-1.yaml", []string{"geeknik"}, []string{"exploit"}, "medium", "dns"}, + {"info-1", "/templates/info/info-1.yaml", []string{"author1"}, []string{"info"}, "info", "http"}, + } + + for _, tmpl := range templates { + tmpFile := filepath.Join(tmpDir, filepath.Base(tmpl.path)) + err := os.WriteFile(tmpFile, []byte("id: "+tmpl.id), 0644) + require.NoError(t, err) + + metadata := &Metadata{ + ID: tmpl.id, + FilePath: tmpFile, + Authors: tmpl.authors, + Tags: tmpl.tags, + Severity: tmpl.severity, + ProtocolType: tmpl.protocol, + } + idx.Set(tmpl.path, metadata) + } + + t.Run("No filter returns all", func(t *testing.T) { + results := idx.Filter(nil) + require.Len(t, results, 4) + }) + + t.Run("Filter by author", func(t *testing.T) { + filter := &Filter{Authors: []string{"pdteam"}} + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Filter by tag", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Filter by severity", func(t *testing.T) { + filter := &Filter{Severities: []severity.Severity{severity.Critical}} + results := idx.Filter(filter) + require.Len(t, results, 1) + }) + + t.Run("Filter by protocol type", func(t *testing.T) { + filter := &Filter{ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Exclude by severity", func(t *testing.T) { + filter := &Filter{ExcludeSeverities: []severity.Severity{severity.Info}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Exclude by tag", func(t *testing.T) { + filter := &Filter{ExcludeTags: []string{"info"}} + results := idx.Filter(filter) + require.Len(t, results, 3) + }) + + t.Run("Complex filter", func(t *testing.T) { + filter := &Filter{ + Tags: []string{"cve"}, + Severities: []severity.Severity{severity.Critical, severity.High}, + ExcludeSeverities: []severity.Severity{severity.Info}, + } + results := idx.Filter(filter) + require.Len(t, results, 2) + }) + + t.Run("Count with filter", func(t *testing.T) { + filter := &Filter{Tags: []string{"cve"}} + count := idx.Count(filter) + require.Equal(t, 2, count) + }) + + t.Run("Count without filter", func(t *testing.T) { + count := idx.Count(nil) + require.Equal(t, 4, count) + }) +} + +func TestIndexFilterFunc(t *testing.T) { + tmpDir := t.TempDir() + idx, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Add test metadata + for i := 0; i < 5; i++ { + metadata := &Metadata{ + ID: "test-" + string(rune('a'+i)), + FilePath: "/tmp/test.yaml", + Severity: "high", + } + if i%2 == 0 { + metadata.Tags = []string{"even"} + } else { + metadata.Tags = []string{"odd"} + } + idx.Set("/tmp/test-"+string(rune('a'+i))+".yaml", metadata) + } + + t.Run("Custom filter function", func(t *testing.T) { + results := idx.FilterFunc(func(m *Metadata) bool { + return m.HasTag("even") + }) + require.Len(t, results, 3) // 0, 2, 4 + }) + + t.Run("Nil filter function returns all", func(t *testing.T) { + results := idx.FilterFunc(nil) + require.Len(t, results, 5) + }) +} + +func TestFilterString(t *testing.T) { + filter := &Filter{ + Authors: []string{"author1", "author2"}, + Tags: []string{"tag1"}, + Severities: []severity.Severity{severity.Critical, severity.High}, + ProtocolTypes: []types.ProtocolType{types.HTTPProtocol}, + } + + str := filter.String() + require.Contains(t, str, "authors=") + require.Contains(t, str, "tags=") + require.Contains(t, str, "severities=") + require.Contains(t, str, "types=") + + emptyFilter := &Filter{} + require.Equal(t, "filter=", emptyFilter.String()) +} diff --git a/pkg/catalog/index/index.go b/pkg/catalog/index/index.go new file mode 100644 index 0000000000..3024851b72 --- /dev/null +++ b/pkg/catalog/index/index.go @@ -0,0 +1,352 @@ +package index + +import ( + "encoding/gob" + "maps" + "os" + "path/filepath" + "sync" + + "github.com/maypok86/otter/v2" + "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + folderutil "github.com/projectdiscovery/utils/folder" +) + +const ( + // IndexFileName is the name of the persistent cache file. + IndexFileName = "index.gob" + + // IndexVersion is the schema version for cache invalidation on breaking + // changes. + IndexVersion = 1 + + // DefaultMaxSize is the default maximum number of templates to cache. + DefaultMaxSize = 50000 + + // DefaultMaxWeight is the default maximum weight of the cache. + DefaultMaxWeight = DefaultMaxSize * 800 // ~40MB assuming ~800B/entry +) + +// Index represents a cache for template metadata. +type Index struct { + cache *otter.Cache[string, *Metadata] + cacheFile string + mu sync.RWMutex + version int +} + +// cacheSnapshot represents the serialized cache structure. +type cacheSnapshot struct { + Version int `gob:"version"` + Data map[string]*Metadata `gob:"data"` +} + +// NewIndex creates a new template metadata cache with the given options. +func NewIndex(cacheDir string) (*Index, error) { + if cacheDir == "" { + cacheDir = folderutil.AppCacheDirOrDefault(".nuclei-cache", config.BinaryName) + } + + if err := os.MkdirAll(cacheDir, 0755); err != nil { + return nil, err + } + + cacheFile := filepath.Join(cacheDir, IndexFileName) + + // NOTE(dwisiswant0): Build cache with adaptive sizing based on memory cost. + opts := &otter.Options[string, *Metadata]{ + MaximumWeight: uint64(DefaultMaxWeight), + Weigher: func(key string, value *Metadata) uint32 { + if value == nil { + return uint32(len(key)) + } + + weight := len(key) + weight += len(value.ID) + weight += len(value.FilePath) + weight += 24 // ModTime is time.Time (24B) + weight += len(value.Name) + weight += len(value.Severity) + weight += len(value.ProtocolType) + weight += len(value.TemplateVerifier) + + for _, author := range value.Authors { + weight += len(author) + } + for _, tag := range value.Tags { + weight += len(tag) + } + + return uint32(weight) + }, + } + + cache, err := otter.New(opts) + if err != nil { + return nil, err + } + + c := &Index{ + cache: cache, + cacheFile: cacheFile, + version: IndexVersion, + } + + return c, nil +} + +// NewDefaultIndex creates a index with default settings in the default cache +// directory. +func NewDefaultIndex() (*Index, error) { + return NewIndex("") +} + +// Get retrieves metadata for a template path, validating freshness via mtime. +func (i *Index) Get(path string) (*Metadata, bool) { + i.mu.RLock() + defer i.mu.RUnlock() + + metadata, found := i.cache.GetIfPresent(path) + if !found { + return nil, false + } + + if !metadata.IsValid() { + go i.Delete(path) + + return nil, false + } + + return metadata, true +} + +// Set stores metadata for a template path. +// +// The caller is responsible for ensuring the metadata is valid and contains +// the correct checksum before calling this method. +// Use [SetFromTemplate] for automatic extraction and checksum computation. +// +// Returns the metadata and whether it was successfully cached (false if evicted). +func (i *Index) Set(path string, metadata *Metadata) (*Metadata, bool) { + i.mu.Lock() + defer i.mu.Unlock() + + return i.cache.Set(path, metadata) +} + +// SetFromTemplate extracts metadata from a parsed template and stores it. +// +// Returns the metadata and whether it was successfully cached. The metadata is +// always returned (even on checksum failure) for immediate filtering use. +// Returns false if the metadata was not cached (e.g., set, evicted). +func (i *Index) SetFromTemplate(path string, tpl *templates.Template) (*Metadata, bool) { + metadata := NewMetadataFromTemplate(path, tpl) + + info, err := os.Stat(path) + if err != nil { + return metadata, false + } + metadata.ModTime = info.ModTime() + + if i.cache == nil { + return metadata, false + } + + return i.Set(path, metadata) +} + +// Has checks if metadata exists for a path without validation. +func (i *Index) Has(path string) bool { + i.mu.RLock() + defer i.mu.RUnlock() + + _, found := i.cache.GetIfPresent(path) + + return found +} + +// Delete removes metadata for a path. +func (i *Index) Delete(path string) { + i.mu.Lock() + defer i.mu.Unlock() + + i.cache.Invalidate(path) +} + +// Size returns the number of cached entries. +func (i *Index) Size() int { + i.mu.RLock() + defer i.mu.RUnlock() + + return i.cache.EstimatedSize() +} + +// Clear removes all cached entries. +func (i *Index) Clear() { + i.mu.Lock() + defer i.mu.Unlock() + + i.cache.InvalidateAll() +} + +// Save persists the cache to disk using gob encoding. +func (i *Index) Save() error { + i.mu.RLock() + defer i.mu.RUnlock() + + snapshot := &cacheSnapshot{ + Version: i.version, + Data: make(map[string]*Metadata), + } + + maps.Insert(snapshot.Data, i.cache.All()) + + // NOTE(dwisiswant0): write to temp for atomic op. + tmpFile := i.cacheFile + ".tmp" + file, err := os.Create(tmpFile) + if err != nil { + return err + } + + encoder := gob.NewEncoder(file) + if err := encoder.Encode(snapshot); err != nil { + _ = file.Close() + _ = os.Remove(tmpFile) + + return err + } + + if err := file.Close(); err != nil { + _ = os.Remove(tmpFile) + + return err + } + + if err := os.Rename(tmpFile, i.cacheFile); err != nil { + _ = os.Remove(tmpFile) + + return err + } + + return nil +} + +// Load loads the cache from disk using gob decoding. +func (i *Index) Load() error { + file, err := os.Open(i.cacheFile) + if err != nil { + if os.IsNotExist(err) { + return nil + } + + return err + } + defer func() { _ = file.Close() }() + + var snapshot cacheSnapshot + + decoder := gob.NewDecoder(file) + if err := decoder.Decode(&snapshot); err != nil { + _ = file.Close() + _ = os.Remove(i.cacheFile) + + return nil + } + + if snapshot.Version != i.version { + _ = file.Close() + _ = os.Remove(i.cacheFile) + + return nil + } + + i.mu.Lock() + defer i.mu.Unlock() + + for key, value := range snapshot.Data { + i.cache.Set(key, value) + } + + return nil +} + +// Filter returns all template paths that match the given filter criteria. +func (i *Index) Filter(filter *Filter) []string { + if filter == nil || filter.IsEmpty() { + return i.All() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + var matched []string + for path, metadata := range i.cache.All() { + if filter.Matches(metadata) { + matched = append(matched, path) + } + } + + return matched +} + +// FilterFunc returns all template paths that match the given filter function. +func (i *Index) FilterFunc(fn FilterFunc) []string { + if fn == nil { + return i.All() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + var matched []string + for path, metadata := range i.cache.All() { + if fn(metadata) { + matched = append(matched, path) + } + } + + return matched +} + +// All returns all template paths in the index. +func (i *Index) All() []string { + i.mu.RLock() + defer i.mu.RUnlock() + + paths := make([]string, 0, i.cache.EstimatedSize()) + for path := range i.cache.All() { + paths = append(paths, path) + } + + return paths +} + +// GetAll returns all metadata entries in the index. +func (i *Index) GetAll() map[string]*Metadata { + i.mu.RLock() + defer i.mu.RUnlock() + + result := maps.Collect(i.cache.All()) + + return result +} + +// Count returns the number of templates matching the filter. +func (i *Index) Count(filter *Filter) int { + if filter == nil || filter.IsEmpty() { + return i.Size() + } + + i.mu.RLock() + defer i.mu.RUnlock() + + count := 0 + for _, metadata := range i.cache.All() { + if filter.Matches(metadata) { + count++ + } + } + + return count +} diff --git a/pkg/catalog/index/index_test.go b/pkg/catalog/index/index_test.go new file mode 100644 index 0000000000..de76dd7592 --- /dev/null +++ b/pkg/catalog/index/index_test.go @@ -0,0 +1,737 @@ +package index + +import ( + "fmt" + "os" + "path/filepath" + "testing" + "time" + + "github.com/projectdiscovery/nuclei/v3/pkg/model" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/stringslice" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/code" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/headless" + "github.com/projectdiscovery/nuclei/v3/pkg/protocols/http" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" + "github.com/stretchr/testify/require" +) + +func TestNewIndex(t *testing.T) { + t.Run("with custom directory", func(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err, "Failed to create cache with custom directory") + require.NotNil(t, cache, "Cache should not be nil") + require.Equal(t, filepath.Join(tmpDir, IndexFileName), cache.cacheFile) + require.Equal(t, IndexVersion, cache.version) + }) + + t.Run("with default directory", func(t *testing.T) { + cache, err := NewDefaultIndex() + require.NoError(t, err, "Failed to create cache with default directory") + require.NotNil(t, cache, "Cache should not be nil") + }) +} + +func TestCacheBasicOperations(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "concurrent-test", + FilePath: "/tmp/concurrent.yaml", + } + + t.Run("Set and Has", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + require.Equal(t, 1, cache.Size(), "Cache size should be 1 after Set") + require.True(t, cache.Has(metadata.FilePath), "Cache should contain the path after Set") + require.False(t, cache.Has("/nonexistent"), "Cache should not contain nonexistent path") + }) + + t.Run("Get with validation", func(t *testing.T) { + // Get should fail validation for nonexistent file + retrieved, found := cache.Get(metadata.FilePath) + require.False(t, found, "Get should fail validation for nonexistent file") + require.Nil(t, retrieved, "Retrieved metadata should be nil for invalid entry") + }) + + t.Run("Delete", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + require.True(t, cache.Has(metadata.FilePath), "Cache should contain path before Delete") + + cache.Delete(metadata.FilePath) + require.False(t, cache.Has(metadata.FilePath), "Cache should not contain path after Delete") + }) + + t.Run("Clear", func(t *testing.T) { + cache.Set(metadata.FilePath, metadata) + cache.Set("/tmp/test2.yaml", &Metadata{ID: "test2", FilePath: "/tmp/test2.yaml"}) + require.True(t, cache.Size() > 0, "Cache should have entries before Clear") + + cache.Clear() + require.Equal(t, 0, cache.Size(), "Cache should be empty after Clear") + }) +} + +func TestCachePersistence(t *testing.T) { + tmpDir := t.TempDir() + + metadata1 := &Metadata{ + ID: "persist-test-1", + FilePath: "/tmp/persist1.yaml", + Name: "Persistence Test 1", + Authors: []string{"tester"}, + Tags: []string{"test"}, + Severity: "medium", + ProtocolType: "dns", + } + + metadata2 := &Metadata{ + ID: "persist-test-2", + FilePath: "/tmp/persist2.yaml", + Name: "Persistence Test 2", + Authors: []string{"tester2"}, + Tags: []string{"cve"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("Save and Load", func(t *testing.T) { + // Create cache and add entries + cache1, err := NewIndex(tmpDir) + require.NoError(t, err) + + cache1.Set(metadata1.FilePath, metadata1) + cache1.Set(metadata2.FilePath, metadata2) + require.Equal(t, 2, cache1.Size()) + + // Save to disk + err = cache1.Save() + require.NoError(t, err, "Failed to save cache") + + // Verify cache file exists + cacheFile := filepath.Join(tmpDir, IndexFileName) + stat, err := os.Stat(cacheFile) + require.NoError(t, err, "Cache file should exist") + require.Greater(t, stat.Size(), int64(0), "Cache file should not be empty") + + // Create new cache and load + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + require.Equal(t, 0, cache2.Size(), "New cache should be empty before Load") + + err = cache2.Load() + require.NoError(t, err, "Failed to load cache") + + // Verify data was loaded + require.Equal(t, 2, cache2.Size(), "Loaded cache should have 2 entries") + require.True(t, cache2.Has(metadata1.FilePath), "Loaded cache should contain first entry") + require.True(t, cache2.Has(metadata2.FilePath), "Loaded cache should contain second entry") + }) + + t.Run("Load non-existent cache", func(t *testing.T) { + emptyDir := t.TempDir() + cache, err := NewIndex(emptyDir) + require.NoError(t, err) + + // Loading non-existent cache should not error + err = cache.Load() + require.NoError(t, err, "Loading non-existent cache should not error") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading non-existent file") + }) + + t.Run("Atomic save", func(t *testing.T) { + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + cache.Set(metadata1.FilePath, metadata1) + err = cache.Save() + require.NoError(t, err) + + // Verify no .tmp file left behind + tmpFile := filepath.Join(tmpDir, IndexFileName+".tmp") + _, err = os.Stat(tmpFile) + require.True(t, os.IsNotExist(err), "Temporary file should not exist after save") + + // Verify actual cache file exists + cacheFile := filepath.Join(tmpDir, IndexFileName) + _, err = os.Stat(cacheFile) + require.NoError(t, err, "Cache file should exist") + }) +} + +func TestIndexVersionMismatch(t *testing.T) { + tmpDir := t.TempDir() + + // Create cache with current version + cache1, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "version-test", + FilePath: "/tmp/version.yaml", + } + cache1.Set(metadata.FilePath, metadata) + + // Save with current version + err = cache1.Save() + require.NoError(t, err) + + // Manually modify version and save again + cache1.version = 999 + err = cache1.Save() + require.NoError(t, err) + + // Try to load with different version + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Load should succeed but cache should be empty (version mismatch) + err = cache2.Load() + require.NoError(t, err, "Load should not error on version mismatch") + require.Equal(t, 0, cache2.Size(), "Cache should be empty after version mismatch") +} + +func TestCacheCorruptedFile(t *testing.T) { + tmpDir := t.TempDir() + cacheFile := filepath.Join(tmpDir, IndexFileName) + + // Create corrupted cache file + err := os.WriteFile(cacheFile, []byte("corrupted data that is not valid gob"), 0644) + require.NoError(t, err) + + // Try to load corrupted cache + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + err = cache.Load() + require.NoError(t, err, "Load should not error on corrupted cache") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading corrupted file") + + // Corrupted file should be removed + _, err = os.Stat(cacheFile) + require.True(t, os.IsNotExist(err), "Corrupted cache file should be removed") +} + +func TestMetadataValidation(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "test.yaml") + + t.Run("Valid metadata", func(t *testing.T) { + // Create a test file + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + // Create metadata with correct checksum + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Should be valid + require.True(t, metadata.IsValid(), "Metadata should be valid for unchanged file") + }) + + t.Run("Invalid metadata after file modification", func(t *testing.T) { + // Create the test file first to ensure it exists in this subtest + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + // Set file ModTime to past to ensure modification is detectable + oldTime := time.Now().Add(-2 * time.Second) + err = os.Chtimes(tmpFile, oldTime, oldTime) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Modify file + err = os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Modified"), 0644) + require.NoError(t, err) + + // Should now be invalid + require.False(t, metadata.IsValid(), "Metadata should be invalid after file modification") + }) + + t.Run("Invalid metadata for deleted file", func(t *testing.T) { + // Create the test file first to ensure it exists in this subtest + err := os.WriteFile(tmpFile, []byte("id: test\ninfo:\n name: Test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + } + + // Delete file + err = os.Remove(tmpFile) + require.NoError(t, err) + + // Should be invalid + require.False(t, metadata.IsValid(), "Metadata should be invalid for deleted file") + }) +} + +func TestSetFromTemplate(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "extract.yaml") + + // Create a test file + err := os.WriteFile(tmpFile, []byte("id: extract-test"), 0644) + require.NoError(t, err) + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + t.Run("Basic metadata extraction", func(t *testing.T) { + template := &templates.Template{ + ID: "extract-test", + Info: model.Info{ + Name: "Extract Test Template", + Authors: stringslice.StringSlice{Value: "author1,author2"}, + Tags: stringslice.StringSlice{Value: "tag1,tag2"}, + Description: "Test description", + SeverityHolder: severity.Holder{ + Severity: severity.High, + }, + }, + SelfContained: true, + Verified: true, + TemplateVerifier: "test-verifier", + } + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok, "Failed to set metadata from template") + require.NotNil(t, metadata, "Metadata should not be nil") + + // Verify core fields + require.Equal(t, "extract-test", metadata.ID) + require.Equal(t, tmpFile, metadata.FilePath) + + // Verify Info fields + require.Equal(t, "Extract Test Template", metadata.Name) + require.Equal(t, []string{"author1,author2"}, metadata.Authors) + require.Equal(t, []string{"tag1,tag2"}, metadata.Tags) + require.Equal(t, "high", metadata.Severity) + + // Verify flags + require.True(t, metadata.Verified) + require.Equal(t, "test-verifier", metadata.TemplateVerifier) + }) + + t.Run("HTTP protocol detection", func(t *testing.T) { + // Create a separate test file for this test + httpFile := filepath.Join(tmpDir, "http-test.yaml") + err := os.WriteFile(httpFile, []byte("id: http-test"), 0644) + require.NoError(t, err) + + template := &templates.Template{ + ID: "http-test", + Info: model.Info{ + Name: "HTTP Test", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Medium, + }, + }, + RequestsHTTP: []*http.Request{{Method: http.HTTPMethodTypeHolder{MethodType: http.HTTPGet}}}, + } + + metadata, ok := cache.SetFromTemplate(httpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) + require.Equal(t, "http", metadata.ProtocolType) + }) + + t.Run("Extract with missing file", func(t *testing.T) { + template := &templates.Template{ + ID: "missing-test", + Info: model.Info{ + Name: "Missing File Test", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Low, + }, + }, + } + + metadata, ok := cache.SetFromTemplate("/nonexistent/file.yaml", template) + require.False(t, ok, "Should return false for nonexistent file") + require.NotNil(t, metadata, "Metadata should still be returned") + }) +} + +func TestMetadataMatchingHelpers(t *testing.T) { + metadata := &Metadata{ + Tags: []string{"cve", "rce", "apache"}, + Authors: []string{"pdteam", "geeknik"}, + Severity: "critical", + ProtocolType: "http", + } + + t.Run("HasTag", func(t *testing.T) { + require.True(t, metadata.HasTag("cve")) + require.True(t, metadata.HasTag("rce")) + require.True(t, metadata.HasTag("apache")) + require.False(t, metadata.HasTag("xxe")) + require.False(t, metadata.HasTag("")) + }) + + t.Run("HasAuthor", func(t *testing.T) { + require.True(t, metadata.HasAuthor("pdteam")) + require.True(t, metadata.HasAuthor("geeknik")) + require.False(t, metadata.HasAuthor("unknown")) + require.False(t, metadata.HasAuthor("")) + }) + + t.Run("MatchesSeverity", func(t *testing.T) { + require.True(t, metadata.MatchesSeverity(severity.Critical)) + require.False(t, metadata.MatchesSeverity(severity.High)) + require.False(t, metadata.MatchesSeverity(severity.Medium)) + require.False(t, metadata.MatchesSeverity(severity.Low)) + require.False(t, metadata.MatchesSeverity(severity.Info)) + }) + + t.Run("MatchesProtocol", func(t *testing.T) { + require.True(t, metadata.MatchesProtocol(types.HTTPProtocol)) + require.False(t, metadata.MatchesProtocol(types.DNSProtocol)) + require.False(t, metadata.MatchesProtocol(types.FileProtocol)) + require.False(t, metadata.MatchesProtocol(types.NetworkProtocol)) + }) + + t.Run("Empty metadata", func(t *testing.T) { + emptyMetadata := &Metadata{} + require.False(t, emptyMetadata.HasTag("any")) + require.False(t, emptyMetadata.HasAuthor("any")) + }) +} + +func TestCacheConcurrency(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Test concurrent writes + t.Run("Concurrent Set", func(t *testing.T) { + done := make(chan bool) + for i := 0; i < 10; i++ { + go func(id int) { + metadata := &Metadata{ + ID: string(rune('a' + id)), + FilePath: filepath.Join("/tmp", string(rune('a'+id))+".yaml"), + } + cache.Set(metadata.FilePath, metadata) + done <- true + }(i) + } + + // Wait for all goroutines + for i := 0; i < 10; i++ { + <-done + } + + require.Equal(t, 10, cache.Size(), "All concurrent writes should succeed") + }) + + // Test concurrent reads + t.Run("Concurrent Has", func(t *testing.T) { + metadata := &Metadata{ + ID: "concurrent-test", + FilePath: "/tmp/concurrent.yaml", + } + cache.Set(metadata.FilePath, metadata) + + done := make(chan bool) + for i := 0; i < 20; i++ { + go func() { + _ = cache.Has(metadata.FilePath) + done <- true + }() + } + + // Wait for all goroutines + for i := 0; i < 20; i++ { + <-done + } + }) +} + +func TestCacheSize(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + require.Equal(t, 0, cache.Size(), "New cache should have size 0") + + // Add entries + for i := 0; i < 5; i++ { + metadata := &Metadata{ + ID: string(rune('a' + i)), + FilePath: filepath.Join("/tmp", string(rune('a'+i))+".yaml"), + } + cache.Set(metadata.FilePath, metadata) + } + + require.Equal(t, 5, cache.Size(), "Cache should have size 5 after adding 5 entries") + + // Delete entries + cache.Delete(filepath.Join("/tmp", "a.yaml")) + cache.Delete(filepath.Join("/tmp", "b.yaml")) + + require.Equal(t, 3, cache.Size(), "Cache should have size 3 after deleting 2 entries") + + // Clear cache + cache.Clear() + require.Equal(t, 0, cache.Size(), "Cache should have size 0 after Clear") +} + +func TestCacheGetWithValidFile(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Create a real file for testing validation + tmpFile := filepath.Join(tmpDir, "test.yaml") + err = os.WriteFile(tmpFile, []byte("id: test"), 0644) + require.NoError(t, err) + + info, err := os.Stat(tmpFile) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: tmpFile, + ModTime: info.ModTime(), + Name: "Test Template", + } + + // Set and get should work with valid file + cache.Set(metadata.FilePath, metadata) + retrieved, found := cache.Get(metadata.FilePath) + require.True(t, found, "Should find entry with valid file") + require.NotNil(t, retrieved, "Retrieved metadata should not be nil") + require.Equal(t, metadata.ID, retrieved.ID) +} + +func TestCacheSaveErrorHandling(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata := &Metadata{ + ID: "test", + FilePath: filepath.Join("/tmp", "test.yaml"), + } + cache.Set(metadata.FilePath, metadata) + + // Create a directory where the temp file would be created to force an error + // The Save method creates a file at cacheFile + ".tmp" + conflictPath := filepath.Join(tmpDir, IndexFileName+".tmp") + err = os.Mkdir(conflictPath, 0755) + require.NoError(t, err) + + err = cache.Save() + require.Error(t, err, "Save should fail when temp file cannot be created") +} + +func TestNewCacheWithInvalidDirectory(t *testing.T) { + // Try to create cache in a file path (should fail) + tmpFile := filepath.Join(t.TempDir(), "file.txt") + err := os.WriteFile(tmpFile, []byte("test"), 0644) + require.NoError(t, err) + + cache, err := NewIndex(tmpFile) + require.Error(t, err, "NewCache should fail when path is a file") + require.Nil(t, cache, "Cache should be nil on error") +} + +func TestCacheLoadCorruptedRemoval(t *testing.T) { + tmpDir := t.TempDir() + cacheFile := filepath.Join(tmpDir, IndexFileName) + + // Create corrupted cache file with invalid gob data + err := os.WriteFile(cacheFile, []byte("this is not valid gob encoding at all!"), 0644) + require.NoError(t, err) + + // Verify file exists before Load + _, err = os.Stat(cacheFile) + require.NoError(t, err, "Corrupted file should exist") + + // Load should not error but should remove corrupted file + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + err = cache.Load() + require.NoError(t, err, "Load should not return error for corrupted file") + + // Verify corrupted file was removed + _, err = os.Stat(cacheFile) + require.True(t, os.IsNotExist(err), "Corrupted file should be removed") + require.Equal(t, 0, cache.Size(), "Cache should be empty after loading corrupted file") +} + +func TestMetadataExtractionWithNilClassification(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "test.yaml") + err := os.WriteFile(tmpFile, []byte("id: test"), 0644) + require.NoError(t, err) + + template := &templates.Template{ + ID: "nil-classification", + Info: model.Info{ + Name: "Template without classification", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.Medium, + }, + Classification: nil, // Explicitly nil + }, + } + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) +} + +func TestCachePersistenceWithLargeDataset(t *testing.T) { + tmpDir := t.TempDir() + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + // Add 100 entries to test bulk operations + for i := 0; i < 100; i++ { + metadata := &Metadata{ + ID: fmt.Sprintf("template-%d", i), + FilePath: filepath.Join("/tmp", fmt.Sprintf("template-%d.yaml", i)), + Name: fmt.Sprintf("Template %d", i), + Authors: []string{fmt.Sprintf("author%d", i)}, + Tags: []string{"tag1", "tag2", "tag3"}, + Severity: "high", + } + cache.Set(metadata.FilePath, metadata) + } + + require.Equal(t, 100, cache.Size(), "Cache should contain 100 entries") + + // Save to disk + err = cache.Save() + require.NoError(t, err) + + // Load into new cache + cache2, err := NewIndex(tmpDir) + require.NoError(t, err) + err = cache2.Load() + require.NoError(t, err) + + require.Equal(t, 100, cache2.Size(), "Loaded cache should contain 100 entries") + + // Verify a sample entry + found := cache2.Has(filepath.Join("/tmp", "template-50.yaml")) + require.True(t, found, "Should find sample entry") +} + +func TestMetadataHelperMethods(t *testing.T) { + metadata := &Metadata{ + ID: "helper-test", + Tags: []string{}, + Authors: []string{}, + Severity: "", + ProtocolType: "", + } + + t.Run("Empty tags", func(t *testing.T) { + require.False(t, metadata.HasTag("anytag")) + }) + + t.Run("Empty authors", func(t *testing.T) { + require.False(t, metadata.HasAuthor("anyauthor")) + }) + + t.Run("Empty severity", func(t *testing.T) { + require.False(t, metadata.MatchesSeverity(severity.Critical)) + }) + + t.Run("Empty protocol", func(t *testing.T) { + require.False(t, metadata.MatchesProtocol(types.HTTPProtocol)) + }) +} + +func TestMultipleProtocolsDetection(t *testing.T) { + tmpDir := t.TempDir() + tmpFile := filepath.Join(tmpDir, "multi.yaml") + err := os.WriteFile(tmpFile, []byte("id: multi"), 0644) + require.NoError(t, err) + + // Template with multiple protocol types + template := &templates.Template{ + ID: "multi-protocol", + Info: model.Info{ + Name: "Multi Protocol Template", + Authors: stringslice.StringSlice{Value: "tester"}, + SeverityHolder: severity.Holder{ + Severity: severity.High, + }, + }, + RequestsHTTP: []*http.Request{{Method: http.HTTPMethodTypeHolder{MethodType: http.HTTPGet}}}, + RequestsHeadless: []*headless.Request{{}}, + RequestsCode: []*code.Request{{}}, + } + + cache, err := NewIndex(tmpDir) + require.NoError(t, err) + + metadata, ok := cache.SetFromTemplate(tmpFile, template) + require.True(t, ok) + require.NotNil(t, metadata) + require.Equal(t, "http", metadata.ProtocolType, "Primary protocol should be http") +} + +func TestNewMetadataFromTemplate(t *testing.T) { + tmpl := &templates.Template{ + ID: "test-template", + Info: model.Info{ + Name: "Test Template", + Authors: stringslice.StringSlice{Value: []string{"author"}}, + Tags: stringslice.StringSlice{Value: []string{"tag"}}, + SeverityHolder: severity.Holder{ + Severity: severity.Low, + }, + }, + Verified: true, + TemplateVerifier: "verifier", + } + + path := "/tmp/test.yaml" + metadata := NewMetadataFromTemplate(path, tmpl) + + require.Equal(t, tmpl.ID, metadata.ID) + require.Equal(t, path, metadata.FilePath) + require.Equal(t, tmpl.Info.Name, metadata.Name) + require.Equal(t, tmpl.Info.Authors.ToSlice(), metadata.Authors) + require.Equal(t, tmpl.Info.Tags.ToSlice(), metadata.Tags) + require.Equal(t, tmpl.Info.SeverityHolder.Severity.String(), metadata.Severity) + require.Equal(t, tmpl.Type().String(), metadata.ProtocolType) + require.Equal(t, tmpl.Verified, metadata.Verified) + require.Equal(t, tmpl.TemplateVerifier, metadata.TemplateVerifier) +} diff --git a/pkg/catalog/index/metadata.go b/pkg/catalog/index/metadata.go new file mode 100644 index 0000000000..013ab439c1 --- /dev/null +++ b/pkg/catalog/index/metadata.go @@ -0,0 +1,104 @@ +package index + +import ( + "os" + "slices" + "time" + + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" + "github.com/projectdiscovery/nuclei/v3/pkg/templates" + "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" +) + +// Metadata contains lightweight metadata extracted from a template. +type Metadata struct { + // ID is the unique identifier of the template. + ID string `gob:"id"` + + // FilePath is the path to the template file. + FilePath string `gob:"file_path"` + + // ModTime is the modification time of the template file. + ModTime time.Time `gob:"mod_time"` + + // Name is the name of the template. + Name string `gob:"name"` + + // Authors are the authors of the template. + Authors []string `gob:"authors"` + + // Tags are the tags associated with the template. + Tags []string `gob:"tags"` + + // Severity is the severity level of the template. + Severity string `gob:"severity"` + + // ProtocolType is the primary protocol type of the template. + ProtocolType string `gob:"protocol_type"` + + // Verified indicates whether the template is verified. + Verified bool `gob:"verified"` + + // TemplateVerifier is the verifier used for the template. + TemplateVerifier string `gob:"verifier,omitempty"` + + // NOTE(dwisiswant0): Consider adding more fields here in the future to + // enhance filtering caps w/o loading full templates, such as: + // `has_{code,headless,file}` to indicate presence of protocol-based + // requests, and/or classification fields (CVE, CWE, CVSS, EPSS), if needed. + // + // For maintainers: when adding new fields, don't forget to update the + // Weigher logic in [NewIndex] to account for the new fields in cache weight + // calculation, because it affects cache eviction behavior. Also, consider + // the impact on existing cached data and whether a [IndexVersion] bump is + // needed. +} + +// NewMetadataFromTemplate creates a new metadata object from a template. +func NewMetadataFromTemplate(path string, tpl *templates.Template) *Metadata { + return &Metadata{ + ID: tpl.ID, + FilePath: path, + + Name: tpl.Info.Name, + Authors: tpl.Info.Authors.ToSlice(), + Tags: tpl.Info.Tags.ToSlice(), + Severity: tpl.Info.SeverityHolder.Severity.String(), + + ProtocolType: tpl.Type().String(), + + Verified: tpl.Verified, + TemplateVerifier: tpl.TemplateVerifier, + } +} + +// IsValid checks if the cached metadata is still valid by comparing the file +// modification time. +func (m *Metadata) IsValid() bool { + info, err := os.Stat(m.FilePath) + if err != nil { + return false + } + + return m.ModTime.Equal(info.ModTime()) +} + +// MatchesSeverity checks if the metadata matches the given severity. +func (m *Metadata) MatchesSeverity(sev severity.Severity) bool { + return m.Severity == sev.String() +} + +// MatchesProtocol checks if the metadata matches the given protocol type. +func (m *Metadata) MatchesProtocol(protocolType types.ProtocolType) bool { + return m.ProtocolType == protocolType.String() +} + +// HasTag checks if the metadata contains the given tag. +func (m *Metadata) HasTag(tag string) bool { + return slices.Contains(m.Tags, tag) +} + +// HasAuthor checks if the metadata contains the given author. +func (m *Metadata) HasAuthor(author string) bool { + return slices.Contains(m.Authors, author) +} diff --git a/pkg/catalog/loader/loader.go b/pkg/catalog/loader/loader.go index a68153d875..00bc970b9b 100644 --- a/pkg/catalog/loader/loader.go +++ b/pkg/catalog/loader/loader.go @@ -14,7 +14,7 @@ import ( "github.com/projectdiscovery/gologger" "github.com/projectdiscovery/nuclei/v3/pkg/catalog" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/config" - "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader/filter" + "github.com/projectdiscovery/nuclei/v3/pkg/catalog/index" "github.com/projectdiscovery/nuclei/v3/pkg/keys" "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" "github.com/projectdiscovery/nuclei/v3/pkg/protocols" @@ -77,7 +77,6 @@ type Config struct { type Store struct { id string // id of the store (optional) tagFilter *templates.TagFilter - pathFilter *filter.PathFilter config *Config finalTemplates []string finalWorkflows []string @@ -92,6 +91,16 @@ type Store struct { // parserCacheOnce is used to cache the parser cache result parserCacheOnce func() *templates.Cache + // metadataIndex is the template metadata cache + metadataIndex *index.Index + + // indexFilter is the cached filter for metadata matching + indexFilter *index.Filter + + // saveTemplatesIndexOnce is used to ensure we only save the metadata index + // once + saveMetadataIndexOnce func() + // NotFoundCallback is called for each not found template // This overrides error handling for not found templates NotFoundCallback func(template string) bool @@ -129,17 +138,10 @@ func NewConfig(options *types.Options, catalog catalog.Catalog, executerOpts *pr // New creates a new template store based on provided configuration func New(cfg *Config) (*Store, error) { + // tagFilter only for IncludeConditions (advanced filtering). + // All other filtering (tags, authors, severities, IDs, protocols, paths) is + // handled by [index.Filter]. tagFilter, err := templates.NewTagFilter(&templates.TagFilterConfig{ - Tags: cfg.Tags, - ExcludeTags: cfg.ExcludeTags, - Authors: cfg.Authors, - Severities: cfg.Severities, - ExcludeSeverities: cfg.ExcludeSeverities, - IncludeTags: cfg.IncludeTags, - IncludeIds: cfg.IncludeIds, - ExcludeIds: cfg.ExcludeIds, - Protocols: cfg.Protocols, - ExcludeProtocols: cfg.ExcludeProtocols, IncludeConditions: cfg.IncludeConditions, }) if err != nil { @@ -147,13 +149,9 @@ func New(cfg *Config) (*Store, error) { } store := &Store{ - id: cfg.StoreId, - config: cfg, - tagFilter: tagFilter, - pathFilter: filter.NewPathFilter(&filter.PathFilterConfig{ - IncludedTemplates: cfg.IncludeTemplates, - ExcludedTemplates: cfg.ExcludeTemplates, - }, cfg.Catalog), + id: cfg.StoreId, + config: cfg, + tagFilter: tagFilter, finalTemplates: cfg.Templates, finalWorkflows: cfg.Workflows, logger: cfg.Logger, @@ -171,6 +169,21 @@ func New(cfg *Config) (*Store, error) { return nil }) + // Initialize metadata index and filter (load from disk & cache for reuse) + store.metadataIndex = store.loadTemplatesIndex() + store.indexFilter = store.buildIndexFilter() + store.saveMetadataIndexOnce = sync.OnceFunc(func() { + if store.metadataIndex == nil { + return + } + + if err := store.metadataIndex.Save(); err != nil { + store.logger.Warning().Msgf("Could not save metadata cache: %v", err) + } else { + store.logger.Verbose().Msgf("Saved %d templates to metadata cache", store.metadataIndex.Size()) + } + }) + // Do a check to see if we have URLs in templates flag, if so // we need to processs them separately and remove them from the initial list var templatesFinal []string @@ -302,17 +315,102 @@ func init() { templateIDPathMap = make(map[string]string) } +// buildIndexFilter creates an [index.Filter] from the store configuration. +// This filter handles all basic filtering (paths, tags, authors, severities, +// IDs, protocols). Advanced IncludeConditions filtering is handled separately +// by tagFilter. +func (store *Store) buildIndexFilter() *index.Filter { + includeTemplates, _ := store.config.Catalog.GetTemplatesPath(store.config.IncludeTemplates) + excludeTemplates, _ := store.config.Catalog.GetTemplatesPath(store.config.ExcludeTemplates) + + return &index.Filter{ + Authors: store.config.Authors, + Tags: store.config.Tags, + ExcludeTags: store.config.ExcludeTags, + IncludeTags: store.config.IncludeTags, + IDs: store.config.IncludeIds, + ExcludeIDs: store.config.ExcludeIds, + IncludeTemplates: includeTemplates, + ExcludeTemplates: excludeTemplates, + Severities: []severity.Severity(store.config.Severities), + ExcludeSeverities: []severity.Severity(store.config.ExcludeSeverities), + ProtocolTypes: []templateTypes.ProtocolType(store.config.Protocols), + ExcludeProtocolTypes: []templateTypes.ProtocolType(store.config.ExcludeProtocols), + } +} + +func (store *Store) loadTemplatesIndex() *index.Index { + var metadataIdx *index.Index + + idx, err := index.NewDefaultIndex() + if err != nil { + store.logger.Warning().Msgf("Could not create metadata cache: %v", err) + } else { + metadataIdx = idx + if err := metadataIdx.Load(); err != nil { + store.logger.Warning().Msgf("Could not load metadata cache: %v", err) + } + } + + return metadataIdx +} + // LoadTemplatesOnlyMetadata loads only the metadata of the templates func (store *Store) LoadTemplatesOnlyMetadata() error { + defer store.saveMetadataIndexOnce() + templatePaths, errs := store.config.Catalog.GetTemplatesPath(store.finalTemplates) store.logErroredTemplates(errs) - filteredTemplatePaths := store.pathFilter.Match(templatePaths) - + indexFilter := store.indexFilter validPaths := make(map[string]struct{}) - for templatePath := range filteredTemplatePaths { + + for _, templatePath := range templatePaths { + if store.metadataIndex != nil { + if metadata, found := store.metadataIndex.Get(templatePath); found { + if !indexFilter.Matches(metadata) { + continue + } + + if store.tagFilter != nil { + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog) + if !loaded { + if err != nil && strings.Contains(err.Error(), templates.ErrExcluded.Error()) { + stats.Increment(templates.TemplatesExcludedStats) + if config.DefaultConfig.LogAllEvents { + store.logger.Print().Msgf("[%v] %v\n", aurora.Yellow("WRN").String(), err.Error()) + } + } + continue + } + } + + validPaths[templatePath] = struct{}{} + continue + } + } + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, nil, store.config.Catalog) - if loaded || store.pathFilter.MatchIncluded(templatePath) { + if loaded { + templatesCache := store.parserCacheOnce() + if templatesCache != nil { + if template, _, _ := templatesCache.Has(templatePath); template != nil { + var metadata *index.Metadata + if store.metadataIndex != nil { + metadata, _ = store.metadataIndex.SetFromTemplate(templatePath, template) + } else { + metadata = index.NewMetadataFromTemplate(templatePath, template) + } + + if !indexFilter.Matches(metadata) { + continue + } + + validPaths[templatePath] = struct{}{} + continue + } + } + validPaths[templatePath] = struct{}{} } if err != nil { @@ -376,15 +474,24 @@ func (store *Store) LoadTemplatesOnlyMetadata() error { func (store *Store) ValidateTemplates() error { templatePaths, errs := store.config.Catalog.GetTemplatesPath(store.finalTemplates) store.logErroredTemplates(errs) + workflowPaths, errs := store.config.Catalog.GetTemplatesPath(store.finalWorkflows) store.logErroredTemplates(errs) - filteredTemplatePaths := store.pathFilter.Match(templatePaths) - filteredWorkflowPaths := store.pathFilter.Match(workflowPaths) + templatePathsMap := make(map[string]struct{}, len(templatePaths)) + for _, path := range templatePaths { + templatePathsMap[path] = struct{}{} + } + + workflowPathsMap := make(map[string]struct{}, len(workflowPaths)) + for _, path := range workflowPaths { + workflowPathsMap[path] = struct{}{} + } - if store.areTemplatesValid(filteredTemplatePaths) && store.areWorkflowsValid(filteredWorkflowPaths) { + if store.areTemplatesValid(templatePathsMap) && store.areWorkflowsValid(workflowPathsMap) { return nil } + return errors.New("errors occurred during template validation") } @@ -503,10 +610,9 @@ func (store *Store) LoadTemplates(templatesList []string) []*templates.Template func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template { includedWorkflows, errs := store.config.Catalog.GetTemplatesPath(workflowsList) store.logErroredTemplates(errs) - workflowPathMap := store.pathFilter.Match(includedWorkflows) - loadedWorkflows := make([]*templates.Template, 0, len(workflowPathMap)) - for workflowPath := range workflowPathMap { + loadedWorkflows := make([]*templates.Template, 0, len(includedWorkflows)) + for _, workflowPath := range includedWorkflows { loaded, err := store.config.ExecutorOptions.Parser.LoadWorkflow(workflowPath, store.config.Catalog) if err != nil { store.logger.Warning().Msgf("Could not load workflow %s: %s\n", workflowPath, err) @@ -526,9 +632,12 @@ func (store *Store) LoadWorkflows(workflowsList []string) []*templates.Template // LoadTemplatesWithTags takes a list of templates and extra tags // returning templates that match. func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templates.Template { + defer store.saveMetadataIndexOnce() + + indexFilter := store.indexFilter + includedTemplates, errs := store.config.Catalog.GetTemplatesPath(templatesList) store.logErroredTemplates(errs) - templatePathMap := store.pathFilter.Match(includedTemplates) loadedTemplates := sliceutil.NewSyncSlice[*templates.Template]() loadedTemplateIDs := mapsutil.NewSyncLockMap[string, struct{}]() @@ -572,14 +681,46 @@ func (store *Store) LoadTemplatesWithTags(templatesList, tags []string) []*templ panic("dialers with executionId " + store.config.ExecutorOptions.Options.ExecutionId + " not found") } - for templatePath := range templatePathMap { + for _, templatePath := range includedTemplates { wgLoadTemplates.Add() go func(templatePath string) { defer wgLoadTemplates.Done() + var ( + metadata *index.Metadata + metadataCached bool + ) + + if store.metadataIndex != nil { + if cachedMetadata, found := store.metadataIndex.Get(templatePath); found { + metadata = cachedMetadata + if !indexFilter.Matches(metadata) { + return + } + // NOTE(dwisiswant0): else, tagFilter probably exists (for + // IncludeConditions), which still need to check via + // LoadTemplate. + + metadataCached = true + } + } + loaded, err := store.config.ExecutorOptions.Parser.LoadTemplate(templatePath, store.tagFilter, tags, store.config.Catalog) - if loaded || store.pathFilter.MatchIncluded(templatePath) { + if loaded { parsed, err := templates.Parse(templatePath, store.preprocessor, store.config.ExecutorOptions) + + if parsed != nil && !metadataCached { + if store.metadataIndex != nil { + metadata, _ = store.metadataIndex.SetFromTemplate(templatePath, parsed) + } else { + metadata = index.NewMetadataFromTemplate(templatePath, parsed) + } + + if metadata != nil && !indexFilter.Matches(metadata) { + return + } + } + if err != nil { // exclude templates not compatible with offline matching from total runtime warning stats if !errors.Is(err, templates.ErrIncompatibleWithOfflineMatching) { diff --git a/pkg/catalog/loader/loader_bench_test.go b/pkg/catalog/loader/loader_bench_test.go index 079e928ad5..32ed506e8b 100644 --- a/pkg/catalog/loader/loader_bench_test.go +++ b/pkg/catalog/loader/loader_bench_test.go @@ -8,7 +8,9 @@ import ( "github.com/projectdiscovery/nuclei/v3/pkg/catalog/disk" "github.com/projectdiscovery/nuclei/v3/pkg/catalog/loader" "github.com/projectdiscovery/nuclei/v3/pkg/loader/workflow" + "github.com/projectdiscovery/nuclei/v3/pkg/model/types/severity" "github.com/projectdiscovery/nuclei/v3/pkg/templates" + templateTypes "github.com/projectdiscovery/nuclei/v3/pkg/templates/types" "github.com/projectdiscovery/nuclei/v3/pkg/testutils" ) @@ -41,3 +43,201 @@ func BenchmarkStoreValidateTemplates(b *testing.B) { _ = store.ValidateTemplates() } } + +func BenchmarkLoadTemplates(b *testing.B) { + options := testutils.DefaultOptions.Copy() + options.Logger = &gologger.Logger{} + options.ExecutionId = "bench-load-templates" + testutils.Init(options) + + catalog := disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) + executerOpts := testutils.NewMockExecuterOptions(options, nil) + executerOpts.Parser = templates.NewParser() + + workflowLoader, err := workflow.NewLoader(executerOpts) + if err != nil { + b.Fatalf("could not create workflow loader: %s", err) + } + executerOpts.WorkflowLoader = workflowLoader + + b.Run("NoFilter", func(b *testing.B) { + loaderCfg := loader.NewConfig(options, catalog, executerOpts) + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterBySeverityCritical", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterBySeverityHighCritical", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.High, severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByAuthor", func(b *testing.B) { + opts := options.Copy() + opts.Authors = []string{"pdteam"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByTags", func(b *testing.B) { + opts := options.Copy() + opts.Tags = []string{"cve", "rce"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("FilterByProtocol", func(b *testing.B) { + opts := options.Copy() + opts.Protocols = templateTypes.ProtocolTypes{templateTypes.HTTPProtocol} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) + + b.Run("ComplexFilter", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.High, severity.Critical} + opts.Authors = []string{"pdteam"} + opts.Tags = []string{"cve"} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplates([]string{config.DefaultConfig.TemplatesDirectory}) + } + }) +} + +func BenchmarkLoadTemplatesOnlyMetadata(b *testing.B) { + options := testutils.DefaultOptions.Copy() + options.Logger = &gologger.Logger{} + options.ExecutionId = "bench-metadata" + testutils.Init(options) + + catalog := disk.NewCatalog(config.DefaultConfig.TemplatesDirectory) + executerOpts := testutils.NewMockExecuterOptions(options, nil) + executerOpts.Parser = templates.NewParser() + + workflowLoader, err := workflow.NewLoader(executerOpts) + if err != nil { + b.Fatalf("could not create workflow loader: %s", err) + } + executerOpts.WorkflowLoader = workflowLoader + + b.Run("WithoutFilter", func(b *testing.B) { + loaderCfg := loader.NewConfig(options, catalog, executerOpts) + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + // Pre-warm the cache + _ = store.LoadTemplatesOnlyMetadata() + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplatesOnlyMetadata() + } + }) + + b.Run("WithSeverityFilter", func(b *testing.B) { + opts := options.Copy() + opts.Severities = severity.Severities{severity.Critical} + loaderCfg := loader.NewConfig(opts, catalog, executerOpts) + + store, err := loader.New(loaderCfg) + if err != nil { + b.Fatalf("could not create store: %s", err) + } + + // Pre-warm the cache + _ = store.LoadTemplatesOnlyMetadata() + + b.ResetTimer() + b.ReportAllocs() + + for b.Loop() { + _ = store.LoadTemplatesOnlyMetadata() + } + }) +} From f181a691b2998fe9ff287db7fe30e7f58ac1d6e1 Mon Sep 17 00:00:00 2001 From: Dwi Siswanto Date: Thu, 4 Dec 2025 22:20:38 +0700 Subject: [PATCH 55/55] chore: bump version Signed-off-by: Dwi Siswanto --- pkg/catalog/config/constants.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pkg/catalog/config/constants.go b/pkg/catalog/config/constants.go index ddc05d8638..38b636304a 100644 --- a/pkg/catalog/config/constants.go +++ b/pkg/catalog/config/constants.go @@ -31,7 +31,7 @@ const ( CLIConfigFileName = "config.yaml" ReportingConfigFilename = "reporting-config.yaml" // Version is the current version of nuclei - Version = `v3.5.1` + Version = `v3.6.0` // Directory Names of custom templates CustomS3TemplatesDirName = "s3" CustomGitHubTemplatesDirName = "github"