Skip to content

Commit

Permalink
Merge pull request #10 from liamg/liamg-add-supplementary-checks
Browse files Browse the repository at this point in the history
Add supplementary checks for backup extensions
  • Loading branch information
liamg authored Jan 12, 2020
2 parents 536a0e8 + b264b53 commit 96d3bd7
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 7 deletions.
11 changes: 8 additions & 3 deletions pkg/scan/url_options.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ type URLOptions struct {
Extensions []string
Filename string
SkipSSLVerification bool
BackupExtensions []string
}

type URLResult struct {
Expand All @@ -44,9 +45,10 @@ var DefaultURLOptions = URLOptions{
http.StatusMovedPermanently,
http.StatusFound,
},
Timeout: time.Second * 5,
Parallelism: 10,
Extensions: []string{"php", "htm", "html"},
Timeout: time.Second * 5,
Parallelism: 10,
Extensions: []string{"php", "htm", "html"},
BackupExtensions: []string{"~", ".bak", ".BAK", ".old", ".backup", ".txt", ".OLD", ".BACKUP", "1", "2", "_"},
}

func (opt *URLOptions) Inherit() {
Expand All @@ -69,4 +71,7 @@ func (opt *URLOptions) Inherit() {
if len(opt.Extensions) == 0 {
opt.Extensions = DefaultURLOptions.Extensions
}
if len(opt.BackupExtensions) == 0 {
opt.BackupExtensions = DefaultURLOptions.BackupExtensions
}
}
4 changes: 4 additions & 0 deletions pkg/scan/url_scanner.go
Original file line number Diff line number Diff line change
Expand Up @@ -209,6 +209,10 @@ func (scanner *URLScanner) checkURL(uri string) *URLResult {
return nil
}

for _, ext := range scanner.options.BackupExtensions {
extraWork = append(extraWork, uri+ext)
}

return &URLResult{
StatusCode: code,
URL: *parsedURL,
Expand Down
45 changes: 41 additions & 4 deletions pkg/scan/url_scanner_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,16 @@ func TestURLScanner(t *testing.T) {

scanner := NewURLScanner(&URLOptions{
TargetURL: *parsed,
Parallelism: 100,
Parallelism: 2,
Wordlist: wordlist.FromReader(bytes.NewReader([]byte("login.php\nsomething.php"))),
})

results, err := scanner.Scan()
if err != nil {
t.Fatal(err)
}

require.Equal(t, len(results), 1)
require.Equal(t, 1, len(results))
assert.Equal(t, results[0].String(), server.URL+"/login.php")

}
Expand All @@ -66,7 +67,7 @@ func TestURLScannerWithRedirects(t *testing.T) {

scanner := NewURLScanner(&URLOptions{
TargetURL: *parsed,
Parallelism: 100,
Parallelism: 1,
PositiveStatusCodes: []int{http.StatusOK},
Wordlist: wordlist.FromReader(bytes.NewReader([]byte("login.php"))),
})
Expand All @@ -76,7 +77,43 @@ func TestURLScannerWithRedirects(t *testing.T) {
t.Fatal(err)
}

require.Equal(t, len(results), 1)
require.Equal(t, 1, len(results))
assert.Equal(t, results[0].String(), server.URL+"/very-secret-file.php")

}

func TestURLScannerWithBackupFile(t *testing.T) {

server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
switch r.URL.Path {
case "/login.php":
w.WriteHeader(http.StatusOK)
case "/login.php~":
w.WriteHeader(http.StatusOK)
default:
w.WriteHeader(http.StatusNotFound)
}
}))
defer server.Close()

parsed, err := url.Parse(server.URL)
if err != nil {
t.Fatal(err)
}

scanner := NewURLScanner(&URLOptions{
TargetURL: *parsed,
Parallelism: 1,
Wordlist: wordlist.FromReader(bytes.NewReader([]byte("login.php"))),
})

results, err := scanner.Scan()
if err != nil {
t.Fatal(err)
}

require.Equal(t, 2, len(results))
assert.Equal(t, results[0].String(), server.URL+"/login.php")
assert.Equal(t, results[1].String(), server.URL+"/login.php~")

}

0 comments on commit 96d3bd7

Please sign in to comment.