Skip to content

Commit

Permalink
add method option
Browse files Browse the repository at this point in the history
  • Loading branch information
liamg committed Dec 17, 2020
1 parent b98c500 commit acb4219
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 1 deletion.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,10 @@ Extra header to send with requests e.g. `-H "Cookie: PHPSESSID=blah"`

HTTP status codes which indicate a positive find. (default `200,400,403,500,405,204,401,301,302`)

##### `-m, --method`

HTTP method to use.

##### `-s, --spider`

Scan page content for links and confirm their existence.
Expand Down
3 changes: 3 additions & 0 deletions cmd/scout/url.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ var headers []string
var extensions = []string{"php", "htm", "html", "txt"}
var enableSpidering bool
var proxy string
var method = "GET"

var urlCmd = &cobra.Command{
Use: "url [url]",
Expand Down Expand Up @@ -65,6 +66,7 @@ var urlCmd = &cobra.Command{
}

options := []scan.URLOption{
scan.WithMethod(method),
scan.WithPositiveStatusCodes(intStatusCodes),
scan.WithTargetURL(*parsedURL),
scan.WithResultChan(resultChan),
Expand Down Expand Up @@ -192,6 +194,7 @@ func init() {
urlCmd.Flags().StringSliceVarP(&headers, "header", "H", headers, "Extra header to send with requests (can be specified multiple times).")
urlCmd.Flags().BoolVarP(&enableSpidering, "spider", "s", enableSpidering, "Spider links within page content")
urlCmd.Flags().StringVarP(&proxy, "proxy", "p", proxy, "HTTP Proxy to use")
urlCmd.Flags().StringVarP(&method, "method", "m", method, "HTTP method (default: GET)")

rootCmd.AddCommand(urlCmd)
}
7 changes: 7 additions & 0 deletions pkg/scan/url_options.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package scan

import (
"net/url"
"strings"
"time"

"github.com/liamg/scout/pkg/wordlist"
Expand Down Expand Up @@ -94,6 +95,12 @@ func WithExtraHeaders(headers []string) URLOption {
}
}

func WithMethod(method string) URLOption {
return func(s *URLScanner) {
s.method = strings.ToUpper(method)
}
}

type URLResult struct {
URL url.URL
StatusCode int
Expand Down
4 changes: 3 additions & 1 deletion pkg/scan/url_scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,7 @@ type URLScanner struct {
queueChan chan URLJob
jobsLoaded int32
proxy *url.URL
method string
}

type URLJob struct {
Expand Down Expand Up @@ -71,6 +72,7 @@ func NewURLScanner(options ...URLOption) *URLScanner {
extensions: []string{"php", "htm", "html", "txt"},
backupExtensions: []string{"~", ".bak", ".BAK", ".old", ".backup", ".txt", ".OLD", ".BACKUP", "1", "2", "_", ".1", ".2"},
enableSpidering: false,
method: "GET",
}

for _, option := range options {
Expand Down Expand Up @@ -269,7 +271,7 @@ func (scanner *URLScanner) checkURL(job URLJob) *URLResult {

if err := retry.Do(func() error {

req, err := http.NewRequest(http.MethodGet, job.URL, nil)
req, err := http.NewRequest(scanner.method, job.URL, nil)
if err != nil {
return err
}
Expand Down

0 comments on commit acb4219

Please sign in to comment.