Skip to content

Commit d1c5e72

Browse files
authored
Merge pull request #73 from lc/lc/add-timeout
feat(gau): add timeout flag for http client
2 parents d102582 + 8627404 commit d1c5e72

File tree

8 files changed

+18
-12
lines changed

8 files changed

+18
-12
lines changed

README.md

+1
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ $ gau -h
3939
|`--providers`| list of providers to use (wayback,commoncrawl,otx,urlscan) | gau --providers wayback|
4040
|`--proxy`| http proxy to use (socks5:// or http:// | gau --proxy http://proxy.example.com:8080 |
4141
|`--retries`| retries for HTTP client | gau --retries 10 |
42+
|`--timeout`| timeout (in seconds) for HTTP client | gau --timeout 60 |
4243
|`--subs`| include subdomains of target domain | gau example.com --subs |
4344
|`--threads`| number of workers to spawn | gau example.com --threads |
4445
|`--to`| fetch urls to date (format: YYYYMM) | gau example.com --to 202101 |

pkg/httpclient/client.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -15,12 +15,12 @@ type Header struct {
1515
Value string
1616
}
1717

18-
func MakeRequest(c *fasthttp.Client, url string, maxRetries int, headers ...Header) ([]byte, error) {
18+
func MakeRequest(c *fasthttp.Client, url string, maxRetries uint, timeout uint, headers ...Header) ([]byte, error) {
1919
var (
2020
req *fasthttp.Request
2121
resp *fasthttp.Response
2222
)
23-
retries := maxRetries
23+
retries := int(maxRetries)
2424
for i := retries; i >= 0; i-- {
2525
req = fasthttp.AcquireRequest()
2626
defer fasthttp.ReleaseRequest(req)
@@ -35,7 +35,7 @@ func MakeRequest(c *fasthttp.Client, url string, maxRetries int, headers ...Head
3535
resp = fasthttp.AcquireResponse()
3636
defer fasthttp.ReleaseResponse(resp)
3737

38-
if err := c.DoTimeout(req, resp, time.Second*45); err != nil {
38+
if err := c.DoTimeout(req, resp, time.Second*time.Duration(timeout)); err != nil {
3939
fasthttp.ReleaseRequest(req)
4040
if retries == 0 {
4141
return nil, err

pkg/providers/commoncrawl/commoncrawl.go

+3-3
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ type Client struct {
3131
func New(c *providers.Config, filters providers.Filters) (*Client, error) {
3232
client := &Client{config: c, filters: filters}
3333
// Fetch the list of available CommonCrawl Api URLs.
34-
resp, err := httpclient.MakeRequest(c.Client, "http://index.commoncrawl.org/collinfo.json", int(c.MaxRetries))
34+
resp, err := httpclient.MakeRequest(c.Client, "http://index.commoncrawl.org/collinfo.json", c.MaxRetries, c.Timeout)
3535
if err != nil {
3636
return nil, err
3737
}
@@ -78,7 +78,7 @@ paginate:
7878
logrus.WithFields(logrus.Fields{"provider": Name, "page": page}).Infof("fetching %s", domain)
7979
}
8080
apiURL := c.formatURL(domain, page)
81-
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
81+
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
8282
if err != nil {
8383
return fmt.Errorf("failed to fetch commoncrawl(%d): %s", page, err)
8484
}
@@ -114,7 +114,7 @@ func (c *Client) formatURL(domain string, page uint) string {
114114
func (c *Client) getPagination(domain string) (paginationResult, error) {
115115
url := fmt.Sprintf("%s&showNumPages=true", c.formatURL(domain, 0))
116116

117-
resp, err := httpclient.MakeRequest(c.config.Client, url, int(c.config.MaxRetries))
117+
resp, err := httpclient.MakeRequest(c.config.Client, url, c.config.MaxRetries, c.config.Timeout)
118118
if err != nil {
119119
return paginationResult{}, err
120120
}

pkg/providers/otx/otx.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ paginate:
5656
logrus.WithFields(logrus.Fields{"provider": Name, "page": page - 1}).Infof("fetching %s", domain)
5757
}
5858
apiURL := c.formatURL(domain, page)
59-
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
59+
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
6060
if err != nil {
6161
return fmt.Errorf("failed to fetch alienvault(%d): %s", page, err)
6262
}

pkg/providers/providers.go

+2-1
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ import (
55
"github.com/valyala/fasthttp"
66
)
77

8-
const Version = `2.0.9`
8+
const Version = `2.1.0`
99

1010
// Provider is a generic interface for all archive fetchers
1111
type Provider interface {
@@ -20,6 +20,7 @@ type URLScan struct {
2020

2121
type Config struct {
2222
Threads uint
23+
Timeout uint
2324
Verbose bool
2425
MaxRetries uint
2526
IncludeSubdomains bool

pkg/providers/urlscan/urlscan.go

+1-1
Original file line numberDiff line numberDiff line change
@@ -51,7 +51,7 @@ paginate:
5151
logrus.WithFields(logrus.Fields{"provider": Name, "page": page}).Infof("fetching %s", domain)
5252
}
5353
apiURL := c.formatURL(domain, searchAfter)
54-
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries), header)
54+
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout, header)
5555
if err != nil {
5656
return fmt.Errorf("failed to fetch urlscan: %s", err)
5757
}

pkg/providers/wayback/wayback.go

+3-2
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ func (c *Client) Fetch(ctx context.Context, domain string, results chan string)
5353
}
5454
apiURL := c.formatURL(domain, page)
5555
// make HTTP request
56-
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, int(c.config.MaxRetries))
56+
resp, err := httpclient.MakeRequest(c.config.Client, apiURL, c.config.MaxRetries, c.config.Timeout)
5757
if err != nil {
5858
return fmt.Errorf("failed to fetch wayback results page %d: %s", page, err)
5959
}
@@ -96,7 +96,8 @@ func (c *Client) formatURL(domain string, page uint) string {
9696
// getPagination returns the number of pages for Wayback
9797
func (c *Client) getPagination(domain string) (uint, error) {
9898
url := fmt.Sprintf("%s&showNumPages=true", c.formatURL(domain, 0))
99-
resp, err := httpclient.MakeRequest(c.config.Client, url, int(c.config.MaxRetries))
99+
resp, err := httpclient.MakeRequest(c.config.Client, url, c.config.MaxRetries, c.config.Timeout)
100+
100101
if err != nil {
101102
return 0, err
102103
}

runner/flags/flags.go

+4-1
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,7 @@ type Config struct {
2626
Filters providers.Filters `mapstructure:"filters"`
2727
Proxy string `mapstructure:"proxy"`
2828
Threads uint `mapstructure:"threads"`
29+
Timeout uint `mapstructure:"timeout"`
2930
Verbose bool `mapstructure:"verbose"`
3031
MaxRetries uint `mapstructure:"retries"`
3132
IncludeSubdomains bool `mapstructure:"subdomains"`
@@ -58,6 +59,7 @@ func (c *Config) ProviderConfig() (*providers.Config, error) {
5859

5960
pc := &providers.Config{
6061
Threads: c.Threads,
62+
Timeout: c.Timeout,
6163
Verbose: c.Verbose,
6264
MaxRetries: c.MaxRetries,
6365
IncludeSubdomains: c.IncludeSubdomains,
@@ -94,7 +96,8 @@ func New() *Options {
9496
v := viper.New()
9597

9698
pflag.String("o", "", "filename to write results to")
97-
pflag.Uint("threads", 1, "number of workers to spawn, default: 1")
99+
pflag.Uint("threads", 1, "number of workers to spawn")
100+
pflag.Uint("timeout", 45, "timeout (in seconds) for HTTP client")
98101
pflag.Uint("retries", 0, "retries for HTTP client")
99102
pflag.String("proxy", "", "http proxy to use")
100103
pflag.StringSlice("blacklist", []string{}, "list of extensions to skip")

0 commit comments

Comments
 (0)