-
Notifications
You must be signed in to change notification settings - Fork 29k
[SPARK-12084][Core]Fix codes that uses ByteBuffer.array incorrectly #10083
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Changes from 2 commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -79,7 +79,10 @@ object AvroConversionUtil extends Serializable { | |
|
|
||
| def unpackBytes(obj: Any): Array[Byte] = { | ||
| val bytes: Array[Byte] = obj match { | ||
| case buf: java.nio.ByteBuffer => buf.array() | ||
| case buf: java.nio.ByteBuffer => | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You can't use
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is in |
||
| val arr = new Array[Byte](buf.remaining()) | ||
| buf.get(arr) | ||
| arr | ||
| case arr: Array[Byte] => arr | ||
| case other => throw new SparkException( | ||
| s"Unknown BYTES type ${other.getClass.getName}") | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -334,10 +334,11 @@ private void decodeBinaryBatch(int col, int num) throws IOException { | |
| ByteBuffer bytes = columnReaders[col].nextBinary().toByteBuffer(); | ||
| int len = bytes.limit() - bytes.position(); | ||
|
Member
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Can this become a call to
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Done |
||
| if (originalTypes[col] == OriginalType.UTF8) { | ||
| UTF8String str = UTF8String.fromBytes(bytes.array(), bytes.position(), len); | ||
| UTF8String str = | ||
| UTF8String.fromBytes(bytes.array(), bytes.arrayOffset() + bytes.position(), len); | ||
| rowWriters[n].write(col, str); | ||
| } else { | ||
| rowWriters[n].write(col, bytes.array(), bytes.position(), len); | ||
| rowWriters[n].write(col, bytes.array(), bytes.arrayOffset() + bytes.position(), len); | ||
| } | ||
| rows[n].setNotNullAt(col); | ||
| } else { | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -27,6 +27,7 @@ import scala.util.control.NonFatal | |
| import org.apache.hadoop.conf.Configuration | ||
| import org.apache.hadoop.fs.Path | ||
|
|
||
| import org.apache.spark.network.util.JavaUtils | ||
| import org.apache.spark.streaming.Time | ||
| import org.apache.spark.streaming.util.{BatchedWriteAheadLog, WriteAheadLog, WriteAheadLogUtils} | ||
| import org.apache.spark.util.{Clock, Utils} | ||
|
|
@@ -212,7 +213,7 @@ private[streaming] class ReceivedBlockTracker( | |
| writeAheadLog.readAll().asScala.foreach { byteBuffer => | ||
| logTrace("Recovering record " + byteBuffer) | ||
| Utils.deserialize[ReceivedBlockTrackerLogEvent]( | ||
| byteBuffer.array, Thread.currentThread().getContextClassLoader) match { | ||
| JavaUtils.bufferToArray(byteBuffer), Thread.currentThread().getContextClassLoader) match { | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Might be worth it to have a version of |
||
| case BlockAdditionEvent(receivedBlockInfo) => | ||
| insertAddedBlock(receivedBlockInfo) | ||
| case BatchAllocationEvent(time, allocatedBlocks) => | ||
|
|
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
|
|
@@ -24,6 +24,8 @@ import scala.util.Try | |
| import org.apache.hadoop.conf.Configuration | ||
| import org.apache.hadoop.fs.FSDataOutputStream | ||
|
|
||
| import org.apache.spark.util.Utils | ||
|
|
||
| /** | ||
| * A writer for writing byte-buffers to a write ahead log file. | ||
| */ | ||
|
|
@@ -48,17 +50,7 @@ private[streaming] class FileBasedWriteAheadLogWriter(path: String, hadoopConf: | |
| val lengthToWrite = data.remaining() | ||
| val segment = new FileBasedWriteAheadLogSegment(path, nextOffset, lengthToWrite) | ||
| stream.writeInt(lengthToWrite) | ||
| if (data.hasArray) { | ||
| stream.write(data.array()) | ||
| } else { | ||
| // If the buffer is not backed by an array, we transfer using temp array | ||
| // Note that despite the extra array copy, this should be faster than byte-by-byte copy | ||
| while (data.hasRemaining) { | ||
| val array = new Array[Byte](data.remaining) | ||
| data.get(array) | ||
| stream.write(array) | ||
| } | ||
| } | ||
| Utils.writeByteBuffer(data, stream: OutputStream) | ||
|
Contributor
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Weird, was the compiler complaining about something here?
Member
Author
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Yes. Because |
||
| flush() | ||
| nextOffset = stream.getPos() | ||
| segment | ||
|
|
||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Not necessary for this change, but at some point it might be worth it to change this to use Kryo's
ByteBufferInput.There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Kryo will use the array as an internal buffer. Why it's not necessary?
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
I'm saying that the change I proposed is not necessary, not that your change is not necessary.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Got it. Sorry for my misunderstanding.