@@ -679,8 +679,15 @@ class ZstdSpec extends AnyFlatSpec with ScalaCheckPropertyChecks {
679679 val channel = FileChannel .open(file.toPath, StandardOpenOption .READ )
680680 // write some garbage bytes at the beginning of buffer containing compressed data to prove that
681681 // this buffer's position doesn't have to start from 0.
682- val garbageBytes = " garbage bytes" .getBytes(Charset .defaultCharset());
683- val readBuffer = ByteBuffer .allocate(channel.size().toInt + garbageBytes.length)
682+ val garbageBytes = " garbage bytes" .getBytes(Charset .defaultCharset())
683+ // add some extra bytes to the underlying array of the ByteBuffer. The ByteBuffer view does not include these
684+ // extra bytes. These are added to the underlying array to test for scenarios where the ByteBuffer view is a slice
685+ // of the underlying array.
686+ val extraBytes = " extra bytes" .getBytes(Charset .defaultCharset())
687+ // Create a read buffer with extraBytes, we will later carve a slice out of it to store the compressed data.
688+ val bigReadBuffer = ByteBuffer .allocate(channel.size().toInt + garbageBytes.length + extraBytes.length)
689+ bigReadBuffer.put(extraBytes)
690+ val readBuffer = bigReadBuffer.slice()
684691 readBuffer.put(garbageBytes)
685692 channel.read(readBuffer)
686693 // set pos to 0 and limit to containing bytes
@@ -694,7 +701,9 @@ class ZstdSpec extends AnyFlatSpec with ScalaCheckPropertyChecks {
694701 var pos = 0
695702 // write some garbage bytes at the beginning of buffer containing uncompressed data to prove that
696703 // this buffer's position doesn't have to start from 0.
697- val block = ByteBuffer .allocate(1 + garbageBytes.length)
704+ val bigBlock = ByteBuffer .allocate(1 + garbageBytes.length + extraBytes.length)
705+ bigBlock.put(extraBytes)
706+ var block = bigBlock.slice()
698707 while (pos < length && zis.hasRemaining) {
699708 block.clear
700709 block.put(garbageBytes)
0 commit comments