Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 4 additions & 3 deletions lib/srv/db/snowflake/http.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import (
"github.com/gravitational/trace"

"github.com/gravitational/teleport"
"github.com/gravitational/teleport/lib/utils"
)

func writeResponse(resp *http.Response, newResp []byte) (*bytes.Buffer, error) {
Expand Down Expand Up @@ -69,7 +70,7 @@ func copyRequest(ctx context.Context, req *http.Request, body io.Reader) (*http.
func readRequestBody(req *http.Request) ([]byte, error) {
defer req.Body.Close()

body, err := io.ReadAll(io.LimitReader(req.Body, teleport.MaxHTTPRequestSize))
body, err := utils.ReadAtMost(req.Body, teleport.MaxHTTPRequestSize)
if err != nil {
return nil, trace.Wrap(err)
}
Expand All @@ -80,7 +81,7 @@ func readRequestBody(req *http.Request) ([]byte, error) {
func readResponseBody(resp *http.Response) ([]byte, error) {
defer resp.Body.Close()

body, err := io.ReadAll(io.LimitReader(resp.Body, teleport.MaxHTTPRequestSize))
body, err := utils.ReadAtMost(resp.Body, teleport.MaxHTTPRequestSize)
if err != nil {
return nil, trace.Wrap(err)
}
Expand All @@ -107,7 +108,7 @@ func maybeReadGzip(headers *http.Header, body []byte) ([]byte, error) {
}
defer bodyGZ.Close()

body, err = io.ReadAll(bodyGZ)
body, err = utils.ReadAtMost(bodyGZ, teleport.MaxHTTPRequestSize)
Copy link
Copy Markdown
Collaborator

@zmb3 zmb3 Oct 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We've already read at most 10MB of (potentially compressed) data on line 84, and now we're requiring that the uncompressed data is also below the same threshold. Is this what we want?

You can probably just remove this particular change. Our gzip.NewReader already enforces a 10MB limit, so this code is functionally equivalent to the prior version. Edit: GitHub navigated me to the wrong NewReader impl. I take this back :-)

Copy link
Copy Markdown
Contributor Author

@jentfoo jentfoo Oct 20, 2023

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That is the intention here. The issue is that 10MB of compressed data can decompress into gigabytes of data which could easily impact the service.

if err != nil {
return nil, trace.Wrap(err)
}
Expand Down