Skip to content

Commit

Permalink
Merge branch 'master' of github.com:scalableminds/webknossos into ema…
Browse files Browse the repository at this point in the history
…il-notification

* 'master' of github.com:scalableminds/webknossos:
  Update screenshots (#6934)
  Support rendering negative floats (#6895)
  Fix loading of webworkers in dev mode (#6933)
  Restore cache buster for webworkers (#6932)
  Introduce data vault as storage backend abstraction (#6899)
  Fix download button for annotations when tiff export is disabled (#6931)
  • Loading branch information
hotzenklotz committed Mar 22, 2023
2 parents 7bf748e + 20f7e0c commit 0f4fc77
Show file tree
Hide file tree
Showing 63 changed files with 724 additions and 3,757 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
### Fixed
- Fixed an issue with text hints not being visible on the logout page for dark mode users. [#6916](https://github.com/scalableminds/webknossos/pull/6916)
- Fixed creating task types with a selected preferred mode. [#6928](https://github.com/scalableminds/webknossos/pull/6928)
- Fixed support for rendering of negative floats. [#6895](https://github.com/scalableminds/webknossos/pull/6895)
- Fixed caching issues with webworkers. [#6932](https://github.com/scalableminds/webknossos/pull/6932)
- Fixed download button for annotations which was disabled in some cases. [#6931](https://github.com/scalableminds/webknossos/pull/6931)

### Removed

Expand Down
8 changes: 4 additions & 4 deletions app/models/binary/credential/CredentialService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package models.binary.credential
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.storage.{
FileSystemCredential,
FileSystemsHolder,
DataVaultsHolder,
GoogleServiceAccountCredential,
HttpBasicAuthCredential,
S3AccessKeyCredential
Expand All @@ -24,21 +24,21 @@ class CredentialService @Inject()(credentialDAO: CredentialDAO) {
userId: ObjectId,
organizationId: ObjectId): Option[FileSystemCredential] =
uri.getScheme match {
case FileSystemsHolder.schemeHttps | FileSystemsHolder.schemeHttp =>
case DataVaultsHolder.schemeHttps | DataVaultsHolder.schemeHttp =>
credentialIdentifier.map(
username =>
HttpBasicAuthCredential(uri.toString,
username,
credentialSecret.getOrElse(""),
userId.toString,
organizationId.toString))
case FileSystemsHolder.schemeS3 =>
case DataVaultsHolder.schemeS3 =>
(credentialIdentifier, credentialSecret) match {
case (Some(keyId), Some(secretKey)) =>
Some(S3AccessKeyCredential(uri.toString, keyId, secretKey, userId.toString, organizationId.toString))
case _ => None
}
case FileSystemsHolder.schemeGS =>
case DataVaultsHolder.schemeGS =>
for {
secret <- credentialSecret
secretJson <- tryo(Json.parse(secret)).toOption
Expand Down
5 changes: 2 additions & 3 deletions app/models/binary/explore/ExploreRemoteLayerService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import com.scalableminds.webknossos.datastore.dataformats.zarr._
import com.scalableminds.webknossos.datastore.datareaders.n5.N5Header
import com.scalableminds.webknossos.datastore.datareaders.zarr._
import com.scalableminds.webknossos.datastore.models.datasource._
import com.scalableminds.webknossos.datastore.storage.{FileSystemsHolder, RemoteSourceDescriptor}
import com.scalableminds.webknossos.datastore.storage.{DataVaultsHolder, RemoteSourceDescriptor}
import com.typesafe.scalalogging.LazyLogging
import models.binary.credential.CredentialService
import models.user.User
Expand Down Expand Up @@ -163,8 +163,7 @@ class ExploreRemoteLayerService @Inject()(credentialService: CredentialService)
requestingUser._organization)
remoteSource = RemoteSourceDescriptor(uri, credentialOpt)
credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "remoteFileSystem.credential.insert.failed"
fileSystem <- FileSystemsHolder.getOrCreate(remoteSource) ?~> "remoteFileSystem.setup.failed"
remotePath <- tryo(fileSystem.getPath(FileSystemsHolder.pathFromUri(remoteSource.uri))) ?~> "remoteFileSystem.getPath.failed"
remotePath <- DataVaultsHolder.getVaultPath(remoteSource) ?~> "remoteFileSystem.setup.failed"
layersWithVoxelSizes <- exploreRemoteLayersForRemotePath(
remotePath,
credentialId.map(_.toString),
Expand Down
6 changes: 5 additions & 1 deletion app/models/binary/explore/RemoteLayerExplorer.scala
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import com.scalableminds.util.io.ZipIO
import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper}
import com.scalableminds.webknossos.datastore.dataformats.MagLocator
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass}
import com.scalableminds.webknossos.datastore.datavault.VaultPath
import net.liftweb.util.Helpers.tryo
import play.api.libs.json.Reads

Expand All @@ -25,7 +26,10 @@ trait RemoteLayerExplorer extends FoxImplicits {

protected def parseJsonFromPath[T: Reads](path: Path): Fox[T] =
for {
fileBytes <- tryo(ZipIO.tryGunzip(Files.readAllBytes(path))) ?~> "dataSet.explore.failed.readFile"
fileBytes <- path match {
case path: VaultPath => path.readBytes() ?~> "dataSet.explore.failed.readFile"
case _ => tryo(ZipIO.tryGunzip(Files.readAllBytes(path))) ?~> "dataSet.explore.failed.readFile"
}
fileAsString <- tryo(new String(fileBytes, StandardCharsets.UTF_8)).toFox ?~> "dataSet.explore.failed.readFile"
parsed <- JsonHelper.parseAndValidateJson[T](fileAsString)
} yield parsed
Expand Down
29 changes: 16 additions & 13 deletions frontend/javascripts/oxalis/shaders/texture_access.glsl.ts
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,10 @@ export const getColorForCoords: ShaderModule = {
}
if (packingDegree == 1.0) {
returnValue[1] = max(bucketColor, 0.0);
// Negative values in the alpha channel would result in this
// value being interpreted as missing. Therefore, we are clamping
// the alpha value.
returnValue[1] = vec4(bucketColor.xyz, max(bucketColor.a, 0.0));
return returnValue;
}
Expand All @@ -342,35 +345,35 @@ export const getColorForCoords: ShaderModule = {
// The same goes for the following code where the packingDegree is 4 and we only have 1 byte of information.
if (rgbaIndex == 0.0) {
returnValue[1] = vec4(
max(bucketColor.r, 0.0),
max(bucketColor.g, 0.0),
max(bucketColor.r, 0.0),
max(bucketColor.g, 0.0)
bucketColor.r,
bucketColor.g,
bucketColor.r,
1.0
);
return returnValue;
} else if (rgbaIndex == 1.0) {
returnValue[1] = vec4(
max(bucketColor.b, 0.0),
max(bucketColor.a, 0.0),
max(bucketColor.b, 0.0),
max(bucketColor.a, 0.0)
bucketColor.b,
bucketColor.a,
bucketColor.b,
1.0
);
return returnValue;
}
}
// The following code deals with packingDegree == 4.0
if (rgbaIndex == 0.0) {
returnValue[1] = vec4(max(bucketColor.r, 0.0));
returnValue[1] = vec4(vec3(bucketColor.r), 1.0);
return returnValue;
} else if (rgbaIndex == 1.0) {
returnValue[1] = vec4(max(bucketColor.g, 0.0));
returnValue[1] = vec4(vec3(bucketColor.g), 1.0);
return returnValue;
} else if (rgbaIndex == 2.0) {
returnValue[1] = vec4(max(bucketColor.b, 0.0));
returnValue[1] = vec4(vec3(bucketColor.b), 1.0);
return returnValue;
} else if (rgbaIndex == 3.0) {
returnValue[1] = vec4(max(bucketColor.a, 0.0));
returnValue[1] = vec4(vec3(bucketColor.a), 1.0);
return returnValue;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -435,6 +435,10 @@ with wk.webknossos_context(
activeTabKey === "export" &&
runningExportJobs.some(([key]) => key === exportKey(selectedLayerInfos, mag));

const isOkButtonDisabled =
activeTabKey === "export" &&
(!isExportable || isCurrentlyRunningExportJob || isMergerModeEnabled);

return (
<Modal
title="Download this annotation"
Expand All @@ -445,7 +449,7 @@ with wk.webknossos_context(
<Button
key="ok"
type="primary"
disabled={!isExportable || isCurrentlyRunningExportJob || isMergerModeEnabled}
disabled={isOkButtonDisabled}
onClick={handleOk}
loading={isCurrentlyRunningExportJob}
>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ if (!WK_AUTH_TOKEN) {
throw new Error("No WK_AUTH_TOKEN specified.");
}

if (process.env.BROWSERSTACK_USERNAME == null || process.env.BROWSERSTACK_ACCESS_KEY == null) {
throw new Error(
"BROWSERSTACK_USERNAME and BROWSERSTACK_ACCESS_KEY must be defined as env variables.",
);
}

process.on("unhandledRejection", (err, promise) => {
console.error("Unhandled rejection (promise: ", promise, ", reason: ", err, ").");
});
Expand Down Expand Up @@ -95,7 +101,8 @@ test.beforeEach(async (t) => {
// @ts-expect-error ts-migrate(7017) FIXME: Element implicitly has an 'any' type because type ... Remove this comment to see the full error message
global.FetchError = FetchError;
});
// These are the datasets that are available on our dev instance

// These datasets are available on our dev instance (e.g., master.webknossos.xyz)
const datasetNames = [
"ROI2017_wkw",
"2017-05-31_mSEM_aniso-test",
Expand All @@ -105,6 +112,7 @@ const datasetNames = [
"float_test_dataset",
"Multi-Channel-Test",
"connectome_file_test_dataset",
"kiwi", // This dataset is rotated and translated.
];

type DatasetName = string;
Expand All @@ -125,6 +133,7 @@ const viewOverrides: Record<string, string> = {
'{"position":[63,67,118],"mode":"orthogonal","zoomStep":0.826,"stateByLayer":{"segmentation":{"meshInfo":{"meshFileName":"meshfile-with-name","meshes":[{"segmentId":4,"seedPosition":[64,75,118],"isPrecomputed":true,"meshFileName":"meshfile-with-name"},{"segmentId":12,"seedPosition":[107,125,118],"isPrecomputed":false,"mappingName":"agglomerate_view_70","mappingType":"HDF5"},{"segmentId":79,"seedPosition":[110,78,118],"isPrecomputed":false,"mappingName":null,"mappingType":null}]}}}}',
connectome_file_test_dataset:
'{"position":[102,109,60],"mode":"orthogonal","zoomStep":0.734,"stateByLayer":{"segmentation":{"connectomeInfo":{"connectomeName":"connectome","agglomerateIdsToImport":[1]}}}}',
kiwi: "1191,1112,21,0,8.746",
};
const datasetConfigOverrides: Record<string, PartialDatasetConfiguration> = {
ROI2017_wkw_fallback: {
Expand Down
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added frontend/javascripts/test/screenshots/kiwi.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
122 changes: 122 additions & 0 deletions test/backend/DataVaultTestSuite.scala
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
package backend

import org.scalatestplus.play.PlaySpec

import java.net.URI
import com.scalableminds.webknossos.datastore.datavault.{DataVault, GoogleCloudDataVault, HttpsDataVault, VaultPath}
import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptor

import scala.collection.immutable.NumericRange

class DataVaultTestSuite extends PlaySpec {

"Data vault" when {
"using Range requests" when {
val range: NumericRange[Long] = Range.Long(0, 1024, 1)
val dataKey = "32_32_40/15360-15424_8384-8448_3520-3584" // when accessed via range request, the response body is 1024 bytes long, otherwise 124.8 KB

"with HTTP Vault" should {
"return correct response" in {
val uri = new URI("http://storage.googleapis.com/")
val vaultPath = new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None)))
val bytes =
(vaultPath / s"neuroglancer-fafb-data/fafb_v14/fafb_v14_orig/$dataKey").readBytes(Some(range)).get

assert(bytes.length == range.length)
assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70)))
}
}

"with Google Cloud Storage Vault" should {
"return correct response" in {
val uri = new URI("gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_orig")
val vaultPath = new VaultPath(uri, GoogleCloudDataVault.create(RemoteSourceDescriptor(uri, None)))
val bytes = (vaultPath / dataKey).readBytes(Some(range)).get

assert(bytes.length == range.length)
assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70)))
}
}
}
"using regular requests" when {
val dataKey = "32_32_40/15360-15424_8384-8448_3520-3584"
val dataLength = 127808

"with HTTP Vault" should {
"return correct response" in {
val uri = new URI("http://storage.googleapis.com/")
val vaultPath = new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None)))
val bytes = (vaultPath / s"neuroglancer-fafb-data/fafb_v14/fafb_v14_orig/$dataKey").readBytes().get

assert(bytes.length == dataLength)
assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70)))
}
}

"with Google Cloud Storage Vault" should {
"return correct response" in {
val uri = new URI("gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_orig")
val vaultPath = new VaultPath(uri, GoogleCloudDataVault.create(RemoteSourceDescriptor(uri, None)))
val bytes = (vaultPath / dataKey).readBytes().get

assert(bytes.length == dataLength)
assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70)))
}
}
}

"using vault path" when {
class MockDataVault extends DataVault {
override def readBytes(path: VaultPath, range: Option[NumericRange[Long]]): Array[Byte] = ???
}

"Uri has no trailing slash" should {
val someUri = new URI("protocol://host/a/b")
val somePath = new VaultPath(someUri, new MockDataVault)

"resolve child" in {
val childPath = somePath / "c"
assert(childPath.toUri.toString == s"${someUri.toString}/c")
}

"get parent" in {
assert((somePath / "..").toString == "protocol://host/a/")
}

"get directory" in {
assert((somePath / ".").toString == s"${someUri.toString}/")
}

"handle sequential parameters" in {
assert((somePath / "c" / "d" / "e").toString == "protocol://host/a/b/c/d/e")
}

"resolve relative to host with starting slash in parameter" in {
assert((somePath / "/x").toString == "protocol://host/x")
}

"resolving path respects trailing slash" in {
assert((somePath / "x/").toString == "protocol://host/a/b/x/")
assert((somePath / "x").toString == "protocol://host/a/b/x")
}
}
"Uri has trailing slash" should {
val trailingSlashUri = new URI("protocol://host/a/b/")
val trailingSlashPath = new VaultPath(trailingSlashUri, new MockDataVault)
"resolve child" in {
val childPath = trailingSlashPath / "c"
assert(childPath.toUri.toString == s"${trailingSlashUri.toString}c")
}

"get parent" in {
assert((trailingSlashPath / "..").toString == "protocol://host/a/")
}

"get directory" in {
assert((trailingSlashPath / ".").toString == s"${trailingSlashUri.toString}")
}
}

}
}
}
47 changes: 0 additions & 47 deletions test/backend/RangeRequestTestSuite.scala

This file was deleted.

Loading

0 comments on commit 0f4fc77

Please sign in to comment.