Skip to content

Commit

Permalink
s2: Fix DecodeConcurrent deadlock on errors (#925)
Browse files Browse the repository at this point in the history
When DecodeConcurrent encounters an error it can lock up in some cases.

Fix and add fuzz test for stream decoding.

Fixes #920
  • Loading branch information
klauspost authored Feb 5, 2024
1 parent e8251aa commit 255a132
Show file tree
Hide file tree
Showing 3 changed files with 104 additions and 3 deletions.
58 changes: 58 additions & 0 deletions internal/fuzz/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,64 @@ func AddFromZip(f *testing.F, filename string, t InputType, short bool) {
}
}

// ReturnFromZip will read the supplied zip and add all as corpus for f.
// Byte slices only.
func ReturnFromZip(tb testing.TB, filename string, t InputType, fn func([]byte)) {
file, err := os.Open(filename)
if err != nil {
tb.Fatal(err)
}
fi, err := file.Stat()
if err != nil {
tb.Fatal(err)
}
zr, err := zip.NewReader(file, fi.Size())
if err != nil {
tb.Fatal(err)
}
for _, file := range zr.File {
rc, err := file.Open()
if err != nil {
tb.Fatal(err)
}

b, err := io.ReadAll(rc)
if err != nil {
tb.Fatal(err)
}
rc.Close()
t := t
if t == TypeOSSFuzz {
t = TypeRaw // Fallback
if len(b) >= 4 {
sz := binary.BigEndian.Uint32(b)
if sz <= uint32(len(b))-4 {
fn(b[4 : 4+sz])
continue
}
}
}

if bytes.HasPrefix(b, []byte("go test fuzz")) {
t = TypeGoFuzz
} else {
t = TypeRaw
}

if t == TypeRaw {
fn(b)
continue
}
vals, err := unmarshalCorpusFile(b)
if err != nil {
tb.Fatal(err)
}
for _, v := range vals {
fn(v)
}
}
}

// unmarshalCorpusFile decodes corpus bytes into their respective values.
func unmarshalCorpusFile(b []byte) ([][]byte, error) {
if len(b) == 0 {
Expand Down
34 changes: 34 additions & 0 deletions s2/fuzz_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ package s2
import (
"bytes"
"fmt"
"io"
"testing"

"github.com/klauspost/compress/internal/fuzz"
Expand Down Expand Up @@ -148,3 +149,36 @@ func FuzzEncodingBlocks(f *testing.F) {
}
})
}

func FuzzStreamDecode(f *testing.F) {
enc := NewWriter(nil, WriterBlockSize(8<<10))
addCompressed := func(b []byte) {
var buf bytes.Buffer
enc.Reset(&buf)
enc.Write(b)
enc.Close()
f.Add(buf.Bytes())
}
fuzz.ReturnFromZip(f, "testdata/enc_regressions.zip", fuzz.TypeRaw, addCompressed)
fuzz.ReturnFromZip(f, "testdata/fuzz/block-corpus-raw.zip", fuzz.TypeRaw, addCompressed)
fuzz.ReturnFromZip(f, "testdata/fuzz/block-corpus-enc.zip", fuzz.TypeGoFuzz, addCompressed)
dec := NewReader(nil, ReaderIgnoreCRC())
f.Fuzz(func(t *testing.T, data []byte) {
// Using Read
dec.Reset(bytes.NewReader(data))
io.Copy(io.Discard, dec)

// Using DecodeConcurrent
dec.Reset(bytes.NewReader(data))
dec.DecodeConcurrent(io.Discard, 2)

// Use ByteReader.
dec.Reset(bytes.NewReader(data))
for {
_, err := dec.ReadByte()
if err != nil {
break
}
}
})
}
15 changes: 12 additions & 3 deletions s2/reader.go
Original file line number Diff line number Diff line change
Expand Up @@ -452,6 +452,12 @@ func (r *Reader) DecodeConcurrent(w io.Writer, concurrent int) (written int64, e
for toWrite := range queue {
entry := <-toWrite
reUse <- toWrite
if hasErr() || entry == nil {
if entry != nil {
writtenBlocks <- entry
}
continue
}
if hasErr() {
writtenBlocks <- entry
continue
Expand All @@ -471,20 +477,21 @@ func (r *Reader) DecodeConcurrent(w io.Writer, concurrent int) (written int64, e
}
}()

// Reader
defer func() {
close(queue)
if r.err != nil {
err = r.err
setErr(r.err)
} else if err != nil {
setErr(err)
}
close(queue)
wg.Wait()
if err == nil {
err = aErr
}
written = aWritten
}()

// Reader
for !hasErr() {
if !r.readFull(r.buf[:4], true) {
if r.err == io.EOF {
Expand Down Expand Up @@ -553,11 +560,13 @@ func (r *Reader) DecodeConcurrent(w io.Writer, concurrent int) (written int64, e
if err != nil {
writtenBlocks <- decoded
setErr(err)
entry <- nil
return
}
if !r.ignoreCRC && crc(decoded) != checksum {
writtenBlocks <- decoded
setErr(ErrCRC)
entry <- nil
return
}
entry <- decoded
Expand Down

0 comments on commit 255a132

Please sign in to comment.