Skip to content

Commit

Permalink
Fix agglomerate mapping (#4904)
Browse files Browse the repository at this point in the history
* load additional element

* also load additional element for offset based handling

* remove unnecessary size check

* update changelog
  • Loading branch information
youri-k authored Nov 4, 2020
1 parent eaf63cc commit a623fa8
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released

### Fixed
- Fixed the disappearing of dataset settings after switching between view mode and annotation mode. [#4845](https://github.com/scalableminds/webknossos/pull/4845)
- Fixed a rare error in the agglomerate mapping for large datasets. [#4904](https://github.com/scalableminds/webknossos/pull/4904)

### Removed
-
Original file line number Diff line number Diff line change
Expand Up @@ -70,9 +70,8 @@ class AgglomerateIdCache(val maxEntries: Int, val standardBlockSize: Int) extend
def handleUncachedAgglomerate(): Long = {
val minId =
if (segmentId < ULong(standardBlockSize / 2)) ULong(0) else segmentId - ULong(standardBlockSize / 2)
val blockSize = spire.math.min(size - minId, ULong(standardBlockSize))

val agglomerateIds = readFromFile(reader, dataSet, minId.toLong, blockSize.toLong)
val agglomerateIds = readFromFile(reader, dataSet, minId.toLong, standardBlockSize)

agglomerateIds.zipWithIndex.foreach {
case (id, index) => put(index + minId.toLong, id)
Expand Down Expand Up @@ -151,15 +150,15 @@ class BoundingBoxCache(val cache: mutable.HashMap[(Long, Long, Long), BoundingBo
readHDF: (IHDF5Reader, Long, Long) => Array[Long]): Array[Long] = {
val readerRange = getReaderRange(request)
if (readerRange._2 - readerRange._1 < maxReaderRange) {
val agglomerateIds = readHDF(reader, readerRange._1.toLong, (readerRange._2 - readerRange._1).toLong)
val agglomerateIds = readHDF(reader, readerRange._1.toLong, (readerRange._2 - readerRange._1).toLong + 1)
input.map(i => if (i == ULong(0)) 0L else agglomerateIds((i - readerRange._1).toInt))
} else {
var offset = readerRange._1
val result = Array.ofDim[Long](input.length)
val isTransformed = Array.fill(input.length)(false)
while (offset <= readerRange._2) {
val agglomerateIds =
readHDF(reader, offset.toLong, spire.math.min(maxReaderRange, readerRange._2 - offset).toLong)
readHDF(reader, offset.toLong, spire.math.min(maxReaderRange, readerRange._2 - offset).toLong + 1)
for (i <- input.indices) {
val inputElement = input(i)
if (!isTransformed(i) && inputElement >= offset && inputElement < offset + maxReaderRange) {
Expand Down

0 comments on commit a623fa8

Please sign in to comment.