diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 14cc6be7d82..b2197baf9aa 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -17,6 +17,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Webknossos can now open ND Zarr datasets with arbitrary axis orders (not limited to `**xyz` anymore). [#7592](https://github.com/scalableminds/webknossos/pull/7592) - Added a new "Split from all neighboring segments" feature for the proofreading mode. [#7611](https://github.com/scalableminds/webknossos/pull/7611) - If storage scan is enabled, the measured used storage is now displayed in the dashboard’s dataset detail view. [#7677](https://github.com/scalableminds/webknossos/pull/7677) +- Prepared support to download full stl meshes via the HTTP api. [#7587](https://github.com/scalableminds/webknossos/pull/7587) ### Changed - Datasets stored in WKW format are no longer loaded with memory mapping, reducing memory demands. [#7528](https://github.com/scalableminds/webknossos/pull/7528) diff --git a/DEV_INSTALL.md b/DEV_INSTALL.md index f8a6f8fc989..b576533e9f1 100644 --- a/DEV_INSTALL.md +++ b/DEV_INSTALL.md @@ -26,6 +26,7 @@ For non-localhost deployments, check out the [installation guide in the document * [Redis 5+](https://redis.io/) * [Blosc](https://github.com/Blosc/c-blosc) * [Brotli](https://github.com/google/brotli) +* [Draco](https://github.com/google/draco) * [node.js 18](http://nodejs.org/download/) * [yarn package manager](https://yarnpkg.com/) * [git](http://git-scm.com/downloads) @@ -41,13 +42,14 @@ arch -x86_64 /bin/zsh # Install Homebrew package manager /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" -# Install git, node.js, postgres, sbt, gfind, gsed -brew install openjdk@21 openssl git node postgresql sbt findutils coreutils gnu-sed redis yarn c-blosc brotli wget +# Install git, node.js, postgres, sbt, gfind, gsed, draco +brew install openjdk draco openssl git node postgresql sbt findutils coreutils gnu-sed redis yarn c-blosc brotli wget # Set env variables for openjdk and openssl # You probably want to add these lines manually to avoid conflicts in your zshrc echo 'if [ $(arch) = "i386" ]; then' >> ~/.zshrc -echo ' export PATH="/usr/local/opt/openjdk@14/bin:$PATH"' >> ~/.zshrc +echo ' export JAVA_HOME=/opt/homebrew/opt/openjdk/libexec/openjdk.jdk/Contents/Home' >> ~/.zshrc +echo ' export PATH="/usr/local/opt/openjdk/bin:$PATH"' >> ~/.zshrc echo ' export PATH="/usr/local/opt/openssl/bin:$PATH"' >> ~/.zshrc echo ' export LDFLAGS="-L/usr/local/opt/openssl/lib"' >> ~/.zshrc echo ' export CPPFLAGS="-I/usr/local/opt/openssl/include"' >> ~/.zshrc @@ -80,7 +82,7 @@ curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add - echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list sudo apt update -sudo apt install -y nodejs git postgresql postgresql-client unzip zip yarn redis-server build-essential libblosc1 libbrotli1 +sudo apt install -y nodejs git postgresql postgresql-client unzip zip yarn redis-server build-essential libblosc1 libbrotli1 libdraco-dev # Install sdkman, java, scala and sbt curl -s "https://get.sdkman.io" | bash diff --git a/Dockerfile b/Dockerfile index 9c1e4eaec3e..16652584ac0 100644 --- a/Dockerfile +++ b/Dockerfile @@ -2,7 +2,7 @@ FROM eclipse-temurin:21 ARG VERSION_NODE="18.x" RUN curl -sL "https://deb.nodesource.com/setup_${VERSION_NODE}" | bash - \ - && apt-get -y install libblosc1 libbrotli1 postgresql-client git nodejs \ + && apt-get -y install libblosc1 libbrotli1 postgresql-client libdraco4 git nodejs \ && rm -rf /var/lib/apt/lists/* RUN mkdir -p /webknossos diff --git a/Dockerfile.dev b/Dockerfile.dev index 1ec8818ee2e..b10a462e57c 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,4 +1,4 @@ -FROM scalableminds/sbt:master__7785473656 +FROM scalableminds/sbt:master__7830403826 ARG VERSION_NODE="18.x" ENV DEBIAN_FRONTEND noninteractive @@ -24,6 +24,7 @@ RUN apt-get update \ postgresql-client \ yarn \ git \ + libdraco-dev \ libblosc1 \ # The following packages are necessary to run headless-gl && apt-get install -y \ diff --git a/MIGRATIONS.unreleased.md b/MIGRATIONS.unreleased.md index b26c78c4dd1..9806a7b80a6 100644 --- a/MIGRATIONS.unreleased.md +++ b/MIGRATIONS.unreleased.md @@ -17,6 +17,7 @@ UPDATE webknossos.annotations_ SET state = 'Finished' WHERE _id IN (SELECT DIST - WEBKNOSSOS now uses Java 21 (up from Java 11). [#7599](https://github.com/scalableminds/webknossos/pull/7599) - NodeJS version 18+ is required for snapshot tests with ShadowDOM elements from Antd v5. [#7522](https://github.com/scalableminds/webknossos/pull/7522) - Email verification is disabled by default. To enable it, set `webKnossos.user.emailVerification.activated` to `true` in your `application.conf`. [#7620](https://github.com/scalableminds/webknossos/pull/7620) [#7621](https://github.com/scalableminds/webknossos/pull/7621) +- New dependency draco/libdraco-dev needs to be installed when deploying without docker and for local development. ### Postgres Evolutions: diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index 907c713ecdf..410850dd38b 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -30,7 +30,7 @@ import scala.concurrent.ExecutionContext object RpcTokenHolder { /* * This token is used to tell the datastore or tracing store “I am WEBKNOSSOS”. - * The respective module asks the remote webKnossos to validate that. + * The respective module asks the remote WEBKNOSSOS to validate that. * The token is refreshed on every wK restart. * Keep it secret! */ @@ -81,7 +81,7 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, } /* token may be - - the global webKnossosToken (allow everything) + - the global webknossosToken (allow everything) - a user token (allow what that user may do) - a dataset sharing token (allow seeing dataset / annotations that token belongs to) */ diff --git a/app/models/job/Job.scala b/app/models/job/Job.scala index d5798f165fc..a832bbb039b 100644 --- a/app/models/job/Job.scala +++ b/app/models/job/Job.scala @@ -80,16 +80,16 @@ case class Job( } } - def resultLinkPublic(organizationName: String, webKnossosPublicUrl: String): Option[String] = + def resultLinkPublic(organizationName: String, webknossosPublicUrl: String): Option[String] = for { resultLink <- resultLink(organizationName) - resultLinkPublic = if (resultLink.startsWith("/")) s"$webKnossosPublicUrl$resultLink" + resultLinkPublic = if (resultLink.startsWith("/")) s"$webknossosPublicUrl$resultLink" else s"$resultLink" } yield resultLinkPublic - def resultLinkSlackFormatted(organizationName: String, webKnossosPublicUrl: String): Option[String] = + def resultLinkSlackFormatted(organizationName: String, webknossosPublicUrl: String): Option[String] = for { - resultLink <- resultLinkPublic(organizationName, webKnossosPublicUrl) + resultLink <- resultLinkPublic(organizationName, webknossosPublicUrl) resultLinkFormatted = s" <$resultLink|Result>" } yield resultLinkFormatted } diff --git a/build.sbt b/build.sbt index 971dff89d18..850a31513e1 100644 --- a/build.sbt +++ b/build.sbt @@ -57,11 +57,17 @@ lazy val util = (project in file("util")).settings( dependencyOverrides ++= Dependencies.dependencyOverrides ) +lazy val webknossosJni = (project in file("webknossos-jni")) + .settings(nativeCompile / sourceDirectory := sourceDirectory.value) + .enablePlugins(JniNative) + lazy val webknossosDatastore = (project in file("webknossos-datastore")) .dependsOn(util) + .dependsOn(webknossosJni) .enablePlugins(play.sbt.PlayScala) .enablePlugins(BuildInfoPlugin) .enablePlugins(ProtocPlugin) + .settings(javah / target := (webknossosJni / nativeCompile / sourceDirectory).value / "include") .settings( name := "webknossos-datastore", commonSettings, diff --git a/clean b/clean index 11e9c6602be..27869a9f349 100755 --- a/clean +++ b/clean @@ -11,6 +11,7 @@ rm -rf webknossos-datastore/project/project/target rm -rf webknossos-tracingstore/target rm -rf webknossos-tracingstore/project/target rm -rf webknossos-tracingstore/project/project/target +rm -rf webknossos-jni/target rm -rf node_modules rm -rf .eslintcache mkdir target diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 3dc09a0551d..3202837e356 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -2191,7 +2191,6 @@ type MeshRequest = { additionalCoordinates: AdditionalCoordinate[] | undefined; mag: Vector3; segmentId: number; // Segment to build mesh for - subsamplingStrides: Vector3; // The cubeSize is in voxels in mag cubeSize: Vector3; scale: Vector3; @@ -2212,7 +2211,6 @@ export function computeAdHocMesh( additionalCoordinates, cubeSize, mappingName, - subsamplingStrides, ...rest } = meshRequest; @@ -2228,13 +2226,11 @@ export function computeAdHocMesh( // The back-end needs a small padding at the border of the // bounding box to calculate the mesh. This padding // is added here to the position and bbox size. - position: V3.toArray(V3.sub(position, subsamplingStrides)), + position: V3.toArray(V3.sub(position, [1, 1, 1])), additionalCoordinates, - cubeSize: V3.toArray(V3.add(cubeSize, subsamplingStrides)), + cubeSize: V3.toArray(V3.add(cubeSize, [1, 1, 1])), // Name and type of mapping to apply before building mesh (optional) mapping: mappingName, - // "size" of each voxel (i.e., only every nth voxel is considered in each dimension) - subsamplingStrides, ...rest, }, }, diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 1a1bc18ed52..6617dbb6333 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -451,11 +451,6 @@ function* maybeLoadMeshChunk( batchCounterPerSegment[segmentId]++; threeDMap.set(clippedPosition, true); - // In general, it is more performant to compute meshes in a more coarse resolution instead of using subsampling strides - // since in the coarse resolution less data needs to be loaded. Another possibility to increase performance is - // window.__marchingCubeSizeInMag1 which affects the cube size the marching cube algorithm will work on. If the cube is significantly larger than the - // segments, computations are wasted. - const subsamplingStrides = (window as any).__meshSubsamplingStrides || [1, 1, 1]; const scale = yield* select((state) => state.dataset.dataSource.scale); const dataStoreHost = yield* select((state) => state.dataset.dataStore.url); const owningOrganization = yield* select((state) => state.dataset.owningOrganization); @@ -493,7 +488,6 @@ function* maybeLoadMeshChunk( additionalCoordinates: additionalCoordinates || undefined, mag, segmentId, - subsamplingStrides, cubeSize, scale, findNeighbors, diff --git a/project/BuildInfoSettings.scala b/project/BuildInfoSettings.scala index ea3dd469d3c..a16d0223fe3 100644 --- a/project/BuildInfoSettings.scala +++ b/project/BuildInfoSettings.scala @@ -5,29 +5,32 @@ import scala.language.postfixOps object BuildInfoSettings { - def getStdoutFromCommand(command: String, failureMsg: String): String = { + def getStdoutFromCommand(command: String, failureMsg: String): String = try { (command !!).trim } catch { case _: Throwable => failureMsg } - } - val ciBuild: String = if (System.getenv().containsKey("CIRCLE_BUILD_NUM")) System.getenv().get("CIRCLE_BUILD_NUM") else "" + val ciBuild: String = + if (System.getenv().containsKey("CIRCLE_BUILD_NUM")) System.getenv().get("CIRCLE_BUILD_NUM") else "" val ciTag: String = if (System.getenv().containsKey("CIRCLE_TAG")) System.getenv().get("CIRCLE_TAG") else "" def commitHash: String = getStdoutFromCommand("git rev-parse HEAD", "") def commitDate: String = getStdoutFromCommand("git log -1 --format=%cd ", "") - def webKnossosVersion: String = if (ciTag != "") ciTag else (if (ciBuild != "") ciBuild else "dev") + def webknossosVersion: String = if (ciTag != "") ciTag else (if (ciBuild != "") ciBuild else "dev") lazy val webknossosBuildInfoSettings = Seq( - buildInfoKeys := Seq[BuildInfoKey](name, scalaVersion, sbtVersion, + buildInfoKeys := Seq[BuildInfoKey]( + name, + scalaVersion, + sbtVersion, "commitHash" -> commitHash, "commitDate" -> commitDate, "ciBuild" -> ciBuild, "ciTag" -> ciTag, - "version" -> webKnossosVersion, + "version" -> webknossosVersion, "datastoreApiVersion" -> "2.0" ), buildInfoPackage := "webknossos", @@ -35,12 +38,15 @@ object BuildInfoSettings { ) lazy val webknossosDatastoreBuildInfoSettings = Seq( - buildInfoKeys := Seq[BuildInfoKey](name, scalaVersion, sbtVersion, + buildInfoKeys := Seq[BuildInfoKey]( + name, + scalaVersion, + sbtVersion, "commitHash" -> commitHash, "commitDate" -> commitDate, "ciBuild" -> ciBuild, "ciTag" -> ciTag, - "version" -> webKnossosVersion, + "version" -> webknossosVersion, "datastoreApiVersion" -> "2.0" ), buildInfoPackage := "webknossosDatastore", @@ -48,13 +54,14 @@ object BuildInfoSettings { ) lazy val webknossosTracingstoreBuildInfoSettings = Seq( - buildInfoKeys := Seq[BuildInfoKey](name, scalaVersion, sbtVersion, - "commitHash" -> commitHash, - "commitDate" -> commitDate, - "ciBuild" -> ciBuild, - "ciTag" -> ciTag, - "version" -> webKnossosVersion - ), + buildInfoKeys := Seq[BuildInfoKey](name, + scalaVersion, + sbtVersion, + "commitHash" -> commitHash, + "commitDate" -> commitDate, + "ciBuild" -> ciBuild, + "ciTag" -> ciTag, + "version" -> webknossosVersion), buildInfoPackage := "webknossosTracingstore", buildInfoOptions := Seq(BuildInfoOption.ToJson) ) diff --git a/project/plugins.sbt b/project/plugins.sbt index 745d372c6a2..641d69007fc 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -16,5 +16,8 @@ addSbtPlugin("com.sksamuel.scapegoat" %% "sbt-scapegoat" % "1.2.1") // check dependencies against published vulnerabilities with sbt dependencyCheck addSbtPlugin("net.vonbuchholtz" % "sbt-dependency-check" % "5.1.0") -//protocol buffers +// protocol buffers libraryDependencies += "com.thesamet.scalapb" %% "compilerplugin" % "0.11.13" + +// java native interface +addSbtPlugin("com.github.sbt" % "sbt-jni" % "1.7.0") diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Float.scala b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Float.scala index 7b2fe6ef557..c83722c3640 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Float.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Float.scala @@ -7,11 +7,21 @@ case class Vec3Float(x: Float, y: Float, z: Float) { def scale(s: Float): Vec3Float = Vec3Float(x * s, y * s, z * s) def *(s: Float): Vec3Float = scale(s) + def *(s: Double): Vec3Float = scale(s.toFloat) + def *(that: Vec3Float): Vec3Float = Vec3Float(x * that.x, y * that.y, z * that.z) + def +(that: Vec3Float): Vec3Float = Vec3Float(x + that.x, y + that.y, z + that.z) + def -(that: Vec3Float): Vec3Float = Vec3Float(x - that.x, y - that.y, z - that.z) + def toList: List[Float] = List(x, y, z) + + def normalize: Vec3Float = { + val length = Math.sqrt(x * x + y * y + z * z) + scale(1 / length.toFloat) + } } object Vec3Float { @@ -34,4 +44,7 @@ object Vec3Float { JsArray(l.map(toJson(_))) } } + + def crossProduct(a: Vec3Float, b: Vec3Float): Vec3Float = + Vec3Float(a.y * b.z - a.z * b.y, a.z * b.x - a.x * b.z, a.x * b.y - a.y * b.x) } diff --git a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala index 45ff9657271..e6746f294be 100644 --- a/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala +++ b/util/src/main/scala/com/scalableminds/util/geometry/Vec3Int.scala @@ -17,6 +17,9 @@ case class Vec3Int(x: Int, y: Int, z: Int) { def *(that: Int): Vec3Int = Vec3Int(x * that, y * that, z * that) + def -(that: Vec3Int): Vec3Int = + Vec3Int(x - that.x, y - that.y, z - that.z) + def /(that: Vec3Int): Vec3Int = Vec3Int(x / that.x, y / that.y, z / that.z) diff --git a/util/src/main/scala/com/scalableminds/util/requestlogging/RequestLogging.scala b/util/src/main/scala/com/scalableminds/util/requestlogging/RequestLogging.scala index b4865c02a52..4387a11385c 100644 --- a/util/src/main/scala/com/scalableminds/util/requestlogging/RequestLogging.scala +++ b/util/src/main/scala/com/scalableminds/util/requestlogging/RequestLogging.scala @@ -49,7 +49,7 @@ trait AbstractRequestLogging extends LazyLogging { } trait RequestLogging extends AbstractRequestLogging { - // Hint: within webKnossos itself, UserAwareRequestLogging is available, which additionally logs the requester user id + // Hint: within webknossos itself, UserAwareRequestLogging is available, which additionally logs the requester user id def log(notifier: Option[String => Unit] = None)(block: => Future[Result])(implicit request: Request[_], ec: ExecutionContext): Future[Result] = diff --git a/util/src/main/scala/com/scalableminds/util/time/Instant.scala b/util/src/main/scala/com/scalableminds/util/time/Instant.scala index ba8d38aa9e6..bafc1ed2774 100644 --- a/util/src/main/scala/com/scalableminds/util/time/Instant.scala +++ b/util/src/main/scala/com/scalableminds/util/time/Instant.scala @@ -1,6 +1,7 @@ package com.scalableminds.util.time import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Box.tryo import play.api.libs.json._ @@ -44,7 +45,7 @@ case class Instant(epochMillis: Long) extends Ordered[Instant] { def weekyear: Int = toJodaDateTime.getWeekyear } -object Instant extends FoxImplicits { +object Instant extends FoxImplicits with LazyLogging { def now: Instant = Instant(System.currentTimeMillis()) def max: Instant = Instant(253370761200000L) @@ -74,6 +75,8 @@ object Instant extends FoxImplicits { def since(before: Instant): FiniteDuration = now - before + def logSince(before: Instant, label: String): Unit = logger.info(f"$label took ${Instant.since(before)}") + private def fromStringSync(instantLiteral: String): Option[Instant] = fromIsoString(instantLiteral).orElse(fromEpochMillisString(instantLiteral)) diff --git a/util/src/main/scala/com/scalableminds/util/tools/TimeLogger.scala b/util/src/main/scala/com/scalableminds/util/tools/TimeLogger.scala deleted file mode 100644 index 07b2b175c62..00000000000 --- a/util/src/main/scala/com/scalableminds/util/tools/TimeLogger.scala +++ /dev/null @@ -1,21 +0,0 @@ -package com.scalableminds.util.tools - -import com.typesafe.scalalogging.Logger - -import scala.concurrent.ExecutionContext - -object TimeLogger { - def logTime[A](caption: String, logger: Logger)(op: => A): A = { - val t = System.currentTimeMillis() - val result = op - logger.info(s"TIMELOG | $caption took ${System.currentTimeMillis - t} ms") - result - } - - def logTimeF[A](caption: String, logger: Logger)(op: => Fox[A])(implicit ec: ExecutionContext): Fox[A] = { - val t = System.currentTimeMillis() - val result = op - result.futureBox.onComplete(_ => logger.info(s"TIMELOG | $caption took ${System.currentTimeMillis - t} ms")) - result - } -} diff --git a/webknossos-datastore/Dockerfile b/webknossos-datastore/Dockerfile index f042bcf2daa..05b7aca84f6 100644 --- a/webknossos-datastore/Dockerfile +++ b/webknossos-datastore/Dockerfile @@ -1,6 +1,6 @@ FROM eclipse-temurin:21 RUN apt-get update \ - && apt-get -y install libblosc1 libbrotli1 \ + && apt-get -y install libblosc1 libbrotli1 libdraco4 \ && rm -rf /var/lib/apt/lists/* RUN mkdir -p /webknossos-datastore \ diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala index 24c5680ce88..852606f6405 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala @@ -19,11 +19,11 @@ class DataStoreModule extends AbstractModule { bind(classOf[UploadService]).asEagerSingleton() bind(classOf[DataSourceService]).asEagerSingleton() bind(classOf[DataVaultService]).asEagerSingleton() - bind(classOf[DSRemoteWebKnossosClient]).asEagerSingleton() + bind(classOf[DSRemoteWebknossosClient]).asEagerSingleton() bind(classOf[BinaryDataServiceHolder]).asEagerSingleton() bind(classOf[MappingService]).asEagerSingleton() bind(classOf[AgglomerateService]).asEagerSingleton() - bind(classOf[AdHocMeshingServiceHolder]).asEagerSingleton() + bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() bind(classOf[ApplicationHealthService]).asEagerSingleton() bind(classOf[DatasetErrorLoggingService]).asEagerSingleton() } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 4e26df33301..a2e3db07e26 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -34,7 +34,7 @@ class BinaryDataController @Inject()( binaryDataServiceHolder: BinaryDataServiceHolder, mappingService: MappingService, slackNotificationService: DSSlackNotificationService, - adHocMeshingServiceHolder: AdHocMeshingServiceHolder, + adHocMeshServiceHolder: AdHocMeshServiceHolder, findDataService: FindDataService, )(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller @@ -43,16 +43,16 @@ class BinaryDataController @Inject()( override def allowRemoteOrigin: Boolean = true val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService - adHocMeshingServiceHolder.dataStoreAdHocMeshingConfig = + adHocMeshServiceHolder.dataStoreAdHocMeshConfig = (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) - val adHocMeshingService: AdHocMeshService = adHocMeshingServiceHolder.dataStoreAdHocMeshingService + val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def requestViaWebKnossos( + def requestViaWebknossos( token: Option[String], organizationName: String, datasetName: String, dataLayerName: String - ): Action[List[WebKnossosDataRequest]] = Action.async(validateJson[List[WebKnossosDataRequest]]) { implicit request => + ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), urlOrHeaderToken(token, request)) { logTime(slackNotificationService.noticeSlowRequest) { @@ -261,7 +261,6 @@ class BinaryDataController @Inject()( segmentationLayer, request.body.cuboid(dataLayer), request.body.segmentId, - request.body.subsamplingStrides, request.body.scale, request.body.mapping, request.body.mappingType, @@ -271,7 +270,7 @@ class BinaryDataController @Inject()( // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. - (vertices, neighbors) <- adHocMeshingService.requestAdHocMeshViaActor(adHocMeshRequest) + (vertices, neighbors) <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield { // We need four bytes for each float val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 534e4ec2c8a..900cabae8f8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -4,7 +4,6 @@ import com.google.inject.Inject import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.services._ -import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} @@ -14,11 +13,13 @@ import scala.concurrent.ExecutionContext class DSMeshController @Inject()( accessTokenService: DataStoreAccessTokenService, meshFileService: MeshFileService, - dsRemoteWebKnossosClient: DSRemoteWebKnossosClient, - dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, - binaryDataServiceHolder: BinaryDataServiceHolder + fullMeshService: DSFullMeshService, + val dsRemoteWebknossosClient: DSRemoteWebknossosClient, + val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, + val binaryDataServiceHolder: BinaryDataServiceHolder )(implicit bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Controller + with MeshMappingHelper with FoxImplicits { override def allowRemoteOrigin: Boolean = true @@ -73,83 +74,34 @@ class DSMeshController @Inject()( for { positions <- formatVersion match { case 3 => - targetMappingName match { - case None => - meshFileService.listMeshChunksForSegmentV3(organizationName, datasetName, dataLayerName, request.body) ?~> Messages( - "mesh.file.listChunks.failed", - request.body.segmentId.toString, - request.body.meshFile) ?~> Messages("mesh.file.load.failed", request.body.segmentId.toString) ~> BAD_REQUEST - case Some(mapping) => - for { - segmentIds: List[Long] <- segmentIdsForAgglomerateId(organizationName, - datasetName, - dataLayerName, - mapping, - editableMappingTracingId, - request.body.segmentId, - urlOrHeaderToken(token, request)) - meshChunksForUnmappedSegments = segmentIds.map( - segmentId => - meshFileService - .listMeshChunksForSegmentV3(organizationName, - datasetName, - dataLayerName, - ListMeshChunksRequest(request.body.meshFile, segmentId)) - .toOption) - meshChunksForUnmappedSegmentsFlat = meshChunksForUnmappedSegments.flatten - _ <- bool2Fox(meshChunksForUnmappedSegmentsFlat.nonEmpty) ?~> "zero chunks" ?~> "mesh.file.listChunks.failed" - chunkInfos = meshChunksForUnmappedSegmentsFlat.reduce(_.merge(_)) - } yield chunkInfos - } + for { + _ <- Fox.successful(()) + mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationName, + datasetName, + dataLayerName, + request.body.meshFile) + segmentIds: List[Long] <- segmentIdsForAgglomerateIdIfNeeded( + organizationName, + datasetName, + dataLayerName, + targetMappingName, + editableMappingTracingId, + request.body.segmentId, + mappingNameForMeshFile, + urlOrHeaderToken(token, request) + ) + chunkInfos <- meshFileService.listMeshChunksForSegmentsV3(organizationName, + datasetName, + dataLayerName, + request.body.meshFile, + segmentIds) + } yield chunkInfos case _ => Fox.failure("Wrong format version") ~> BAD_REQUEST } } yield Ok(Json.toJson(positions)) } } - private def segmentIdsForAgglomerateId(organizationName: String, - datasetName: String, - dataLayerName: String, - mappingName: String, - editableMappingTracingId: Option[String], - agglomerateId: Long, - token: Option[String]): Fox[List[Long]] = { - val agglomerateFileKey = AgglomerateFileKey( - organizationName, - datasetName, - dataLayerName, - mappingName - ) - editableMappingTracingId match { - case Some(tracingId) => - for { - tracingstoreUri <- dsRemoteWebKnossosClient.getTracingstoreUri - segmentIdsResult <- dsRemoteTracingstoreClient.getEditableMappingSegmentIdsForAgglomerate(tracingstoreUri, - tracingId, - agglomerateId, - token) - segmentIds <- if (segmentIdsResult.agglomerateIdIsPresent) - Fox.successful(segmentIdsResult.segmentIds) - else - for { - agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - localSegmentIds <- agglomerateService.segmentIdsForAgglomerateId( - agglomerateFileKey, - agglomerateId - ) - } yield localSegmentIds - } yield segmentIds - case _ => - for { - agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox - segmentIds <- agglomerateService.segmentIdsForAgglomerateId( - agglomerateFileKey, - agglomerateId - ) - } yield segmentIds - } - } - def readMeshChunkV0(token: Option[String], organizationName: String, datasetName: String, @@ -191,4 +143,22 @@ class DSMeshController @Inject()( } } } + + def loadFullMeshStl(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String): Action[FullMeshRequest] = + Action.async(validateJson[FullMeshRequest]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], + organizationName, + datasetName, + dataLayerName, + request.body) ?~> "mesh.file.loadChunk.failed" + + } yield Ok(data) + } + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 7cef856bff7..d695f38efca 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -34,7 +34,7 @@ import scala.concurrent.duration._ class DataSourceController @Inject()( dataSourceRepository: DataSourceRepository, dataSourceService: DataSourceService, - remoteWebKnossosClient: DSRemoteWebKnossosClient, + remoteWebknossosClient: DSRemoteWebknossosClient, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, connectomeFileService: ConnectomeFileService, @@ -85,7 +85,7 @@ class DataSourceController @Inject()( for { isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId) _ <- if (!isKnownUpload) { - (remoteWebKnossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") + (remoteWebknossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataset.upload.validation.failed") .flatMap(_ => uploadService.reserveUpload(request.body)) } else Fox.successful(()) } yield Ok @@ -153,7 +153,7 @@ class DataSourceController @Inject()( urlOrHeaderToken(token, request)) { for { (dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body) - _ <- remoteWebKnossosClient.reportUpload( + _ <- remoteWebknossosClient.reportUpload( dataSourceId, datasetSizeBytes, request.body.needsConversion.getOrElse(false), @@ -175,7 +175,7 @@ class DataSourceController @Inject()( accessTokenService.validateAccess(UserAccessRequest.deleteDataSource(dataSourceId), urlOrHeaderToken(token, request)) { for { - _ <- remoteWebKnossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" + _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) ?~> "dataset.delete.webknossos.failed" _ <- uploadService.cancelUpload(request.body) ?~> "Could not cancel the upload." } yield Ok } @@ -353,7 +353,7 @@ class DataSourceController @Inject()( for { _ <- bool2Fox(dataSourceRepository.find(DataSourceId(datasetName, organizationName)).isEmpty) ?~> Messages( "dataSource.alreadyPresent") - _ <- remoteWebKnossosClient.reserveDataSourceUpload( + _ <- remoteWebknossosClient.reserveDataSourceUpload( ReserveUploadInformation( uploadId = "", name = datasetName, @@ -367,7 +367,7 @@ class DataSourceController @Inject()( ) ?~> "dataset.upload.validation.failed" _ <- dataSourceService.updateDataSource(request.body.copy(id = DataSourceId(datasetName, organizationName)), expectExisting = false) - _ <- remoteWebKnossosClient.reportUpload( + _ <- remoteWebknossosClient.reportUpload( DataSourceId(datasetName, organizationName), 0L, needsConversion = false, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala index 430875fa652..1f8222181c2 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ExportsController.scala @@ -6,7 +6,7 @@ import com.google.inject.Inject import com.scalableminds.util.tools.FoxImplicits import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.services.{ - DSRemoteWebKnossosClient, + DSRemoteWebknossosClient, DataStoreAccessTokenService, UserAccessRequest } @@ -25,7 +25,7 @@ object JobExportProperties { implicit val jsonFormat: OFormat[JobExportProperties] = Json.format[JobExportProperties] } -class ExportsController @Inject()(webKnossosClient: DSRemoteWebKnossosClient, +class ExportsController @Inject()(webknossosClient: DSRemoteWebknossosClient, accessTokenService: DataStoreAccessTokenService, config: DataStoreConfig)(implicit ec: ExecutionContext) extends Controller @@ -38,7 +38,7 @@ class ExportsController @Inject()(webKnossosClient: DSRemoteWebKnossosClient, def download(token: Option[String], jobId: String): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccess(UserAccessRequest.downloadJobExport(jobId), urlOrHeaderToken(token, request)) { for { - exportProperties <- webKnossosClient.getJobExportProperties(jobId) + exportProperties <- webknossosClient.getJobExportProperties(jobId) fullPath = exportProperties.fullPathIn(dataBaseDir) _ <- bool2Fox(Files.exists(fullPath)) ?~> "job.export.fileNotFound" } yield Ok.sendPath(fullPath, inline = false) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index 06b5ea29acb..5fa3b2e8414 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -28,7 +28,7 @@ class ZarrStreamingController @Inject()( dataSourceRepository: DataSourceRepository, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, - remoteWebKnossosClient: DSRemoteWebKnossosClient, + remoteWebknossosClient: DSRemoteWebknossosClient, remoteTracingstoreClient: DSRemoteTracingstoreClient, )(implicit ec: ExecutionContext) extends Controller { @@ -66,7 +66,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) annotationLayer = annotationSource.getAnnotationLayer(dataLayerName) @@ -134,7 +134,7 @@ class ZarrStreamingController @Inject()( def dataSourceWithAnnotationPrivateLink(token: Option[String], accessToken: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) @@ -174,7 +174,7 @@ class ZarrStreamingController @Inject()( cxyz: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) layer = annotationSource.getAnnotationLayer(dataLayerName) @@ -253,7 +253,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient + annotationSource <- remoteWebknossosClient .getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) layer = annotationSource.getAnnotationLayer(dataLayerName) @@ -302,7 +302,7 @@ class ZarrStreamingController @Inject()( mag: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) layer = annotationSource.getAnnotationLayer(dataLayerName) @@ -359,7 +359,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND layer = annotationSource.getAnnotationLayer(dataLayerName) relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) @@ -404,7 +404,7 @@ class ZarrStreamingController @Inject()( def dataSourceFolderContentsPrivateLink(token: Option[String], accessToken: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) dataSource <- dataSourceRepository .findUsable(DataSourceId(annotationSource.dataSetName, annotationSource.organizationName)) .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND @@ -438,7 +438,7 @@ class ZarrStreamingController @Inject()( def zGroupPrivateLink(token: Option[String], accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebKnossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken, urlOrHeaderToken(token, request)) ~> NOT_FOUND layer = annotationSource.getAnnotationLayer(dataLayerName) relevantToken = if (annotationSource.accessViaPrivateLink) Some(accessToken) else urlOrHeaderToken(token, request) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/draco/NativeDracoToStlConverter.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/draco/NativeDracoToStlConverter.scala new file mode 100644 index 00000000000..04f26a88ee5 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/draco/NativeDracoToStlConverter.scala @@ -0,0 +1,14 @@ +package com.scalableminds.webknossos.datastore.draco + +import com.github.sbt.jni.nativeLoader + +@nativeLoader("webknossosJni0") +class NativeDracoToStlConverter() { + @native def dracoToStl(a: Array[Byte], + offsetX: Float, + offsetY: Float, + offsetZ: Float, + scaleX: Double, + scaleY: Double, + scaleZ: Double): Array[Byte] +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorer.scala index b12ef60a610..e959da53a87 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorer.scala @@ -259,7 +259,7 @@ class NgffExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplore /* * Guesses the voxel size from all transforms of an ngff multiscale object. * Note: the returned voxel size is in axis units and should later be combined with those units - * to get a webKnossos-typical voxel size in nanometers. + * to get a webknossos-typical voxel size in nanometers. * Note: allCoordinateTransforms is nested: the inner list has all transforms of one ngff “dataset” (mag in our terminology), * the outer list gathers these for all such “datasets” (mags) of one “multiscale object” (layer) */ diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/DataRequests.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/DataRequests.scala index f84b922a462..fb465df2aba 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/DataRequests.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/DataRequests.scala @@ -24,7 +24,7 @@ case class DataRequest( def cuboid(dataLayer: DataLayer): Cuboid = Cuboid(position, width, height, depth) } -case class WebKnossosDataRequest( +case class WebknossosDataRequest( position: Vec3Int, mag: Vec3Int, cubeSize: Int, @@ -41,16 +41,15 @@ case class WebKnossosDataRequest( DataServiceRequestSettings(halfByte = fourBit.getOrElse(false), applyAgglomerate, version, additionalCoordinates) } -object WebKnossosDataRequest { - implicit val jsonFormat: OFormat[WebKnossosDataRequest] = Json.format[WebKnossosDataRequest] +object WebknossosDataRequest { + implicit val jsonFormat: OFormat[WebknossosDataRequest] = Json.format[WebknossosDataRequest] } case class WebknossosAdHocMeshRequest( - position: Vec3Int, + position: Vec3Int, // In mag1 mag: Vec3Int, - cubeSize: Vec3Int, + cubeSize: Vec3Int, // In target mag segmentId: Long, - subsamplingStrides: Vec3Int, scale: Vec3Double, mapping: Option[String] = None, mappingType: Option[String] = None, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala index 585a8e9db24..c2398fbb254 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/requests/DataServiceRequests.scala @@ -1,6 +1,5 @@ package com.scalableminds.webknossos.datastore.models.requests -import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, CubePosition} import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, SegmentationLayer} @@ -20,8 +19,7 @@ case class DataServiceDataRequest( dataLayer: DataLayer, dataLayerMapping: Option[String], cuboid: Cuboid, - settings: DataServiceRequestSettings, - subsamplingStrides: Vec3Int = Vec3Int.ones // if > 1, skip voxels when loading (used for adhoc mesh generation) + settings: DataServiceRequestSettings ) case class DataReadInstruction( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index a40b4923d2c..4ba7159c4e3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -53,7 +53,7 @@ object UserAccessRequest { } trait AccessTokenService { - val remoteWebKnossosClient: RemoteWebKnossosClient + val remoteWebknossosClient: RemoteWebknossosClient private val AccessExpiration: FiniteDuration = 2 minutes private lazy val accessAnswersCache: AlfuCache[(UserAccessRequest, Option[String]), UserAccessAnswer] = @@ -75,7 +75,7 @@ trait AccessTokenService { private def hasUserAccess(accessRequest: UserAccessRequest, token: Option[String])( implicit ec: ExecutionContext): Fox[UserAccessAnswer] = accessAnswersCache.getOrLoad((accessRequest, token), - _ => remoteWebKnossosClient.requestUserAccess(token, accessRequest)) + _ => remoteWebknossosClient.requestUserAccess(token, accessRequest)) def assertUserAccess(accessRequest: UserAccessRequest, token: Option[String])( implicit ec: ExecutionContext): Fox[Unit] = @@ -96,5 +96,5 @@ trait AccessTokenService { } } -class DataStoreAccessTokenService @Inject()(val remoteWebKnossosClient: DSRemoteWebKnossosClient) +class DataStoreAccessTokenService @Inject()(val remoteWebknossosClient: DSRemoteWebknossosClient) extends AccessTokenService diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala index 0f5b5c24b35..cb374803507 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala @@ -28,7 +28,6 @@ case class AdHocMeshRequest(dataSource: Option[DataSource], dataLayer: SegmentationLayer, cuboid: Cuboid, segmentId: Long, - subsamplingStrides: Vec3Int, scale: Vec3Double, mapping: Option[String] = None, mappingType: Option[String] = None, @@ -47,7 +46,7 @@ class AdHocMeshActor(val service: AdHocMeshService, val timeout: FiniteDuration) case request: AdHocMeshRequest => sender() ! Await.result(service.requestAdHocMesh(request).futureBox, timeout) case _ => - sender() ! Failure("Unexpected message sent to AdHocMeshingActor.") + sender() ! Failure("Unexpected message sent to AdHocMeshActor.") } } @@ -116,8 +115,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, request.dataLayer, request.mapping, request.cuboid, - DataServiceRequestSettings(halfByte = false, request.mapping, None), - request.subsamplingStrides + DataServiceRequestSettings(halfByte = false, request.mapping, None) ) agglomerateService.applyAgglomerate(dataRequest)(data) }.getOrElse(Full(data)) @@ -171,23 +169,16 @@ class AdHocMeshService(binaryDataService: BinaryDataService, } val cuboid = request.cuboid - val subsamplingStrides = - Vec3Double(request.subsamplingStrides.x, request.subsamplingStrides.y, request.subsamplingStrides.z) val dataRequest = DataServiceDataRequest( request.dataSource.orNull, request.dataLayer, request.mapping, cuboid, - DataServiceRequestSettings.default.copy(additionalCoordinates = request.additionalCoordinates), - request.subsamplingStrides + DataServiceRequestSettings.default.copy(additionalCoordinates = request.additionalCoordinates) ) - val dataDimensions = Vec3Int( - math.ceil(cuboid.width / subsamplingStrides.x).toInt, - math.ceil(cuboid.height / subsamplingStrides.y).toInt, - math.ceil(cuboid.depth / subsamplingStrides.z).toInt - ) + val dataDimensions = Vec3Int(cuboid.width, cuboid.height, cuboid.depth) val offset = Vec3Double(cuboid.topLeft.voxelXInMag, cuboid.topLeft.voxelYInMag, cuboid.topLeft.voxelZInMag) val scale = Vec3Double(cuboid.topLeft.mag) * request.scale @@ -216,14 +207,8 @@ class AdHocMeshService(binaryDataService: BinaryDataService, math.min(dataDimensions.y - y, 33), math.min(dataDimensions.z - z, 33)) if (subVolumeContainsSegmentId(mappedData, dataDimensions, boundingBox, mappedSegmentId)) { - MarchingCubes.marchingCubes[T](mappedData, - dataDimensions, - boundingBox, - mappedSegmentId, - subsamplingStrides, - offset, - scale, - vertexBuffer) + MarchingCubes + .marchingCubes[T](mappedData, dataDimensions, boundingBox, mappedSegmentId, offset, scale, vertexBuffer) } } (vertexBuffer.flatMap(_.toList.map(_.toFloat)).toArray, neighbors) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshServiceHolder.scala new file mode 100644 index 00000000000..725f3d358ae --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshServiceHolder.scala @@ -0,0 +1,24 @@ +package com.scalableminds.webknossos.datastore.services + +import org.apache.pekko.actor.ActorSystem +import javax.inject.Inject + +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.FiniteDuration + +class AdHocMeshServiceHolder @Inject()(actorSystem: ActorSystem)(implicit ec: ExecutionContext) { + var dataStoreAdHocMeshConfig: (BinaryDataService, MappingService, FiniteDuration, Int) = (null, null, null, 0) + lazy val dataStoreAdHocMeshService: AdHocMeshService = new AdHocMeshService(dataStoreAdHocMeshConfig._1, + dataStoreAdHocMeshConfig._2, + actorSystem, + dataStoreAdHocMeshConfig._3, + dataStoreAdHocMeshConfig._4) + + var tracingStoreAdHocMeshConfig: (BinaryDataService, FiniteDuration, Int) = (null, null, 0) + lazy val tracingStoreAdHocMeshService: AdHocMeshService = new AdHocMeshService(tracingStoreAdHocMeshConfig._1, + null, + actorSystem, + tracingStoreAdHocMeshConfig._2, + tracingStoreAdHocMeshConfig._3) + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshingServiceHolder.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshingServiceHolder.scala deleted file mode 100644 index 4ae043c805b..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshingServiceHolder.scala +++ /dev/null @@ -1,24 +0,0 @@ -package com.scalableminds.webknossos.datastore.services - -import org.apache.pekko.actor.ActorSystem -import javax.inject.Inject - -import scala.concurrent.ExecutionContext -import scala.concurrent.duration.FiniteDuration - -class AdHocMeshingServiceHolder @Inject()(actorSystem: ActorSystem)(implicit ec: ExecutionContext) { - var dataStoreAdHocMeshingConfig: (BinaryDataService, MappingService, FiniteDuration, Int) = (null, null, null, 0) - lazy val dataStoreAdHocMeshingService: AdHocMeshService = new AdHocMeshService(dataStoreAdHocMeshingConfig._1, - dataStoreAdHocMeshingConfig._2, - actorSystem, - dataStoreAdHocMeshingConfig._3, - dataStoreAdHocMeshingConfig._4) - - var tracingStoreAdHocMeshingConfig: (BinaryDataService, FiniteDuration, Int) = (null, null, 0) - lazy val tracingStoreAdHocMeshingService: AdHocMeshService = new AdHocMeshService(tracingStoreAdHocMeshingConfig._1, - null, - actorSystem, - tracingStoreAdHocMeshingConfig._2, - tracingStoreAdHocMeshingConfig._3) - -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 803bd5001f9..586edabbdb4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -40,7 +40,7 @@ class BinaryDataService(val dataBaseDir: Path, if (!request.cuboid.hasValidDimensions) { Fox.failure("Invalid cuboid dimensions (must be > 0 and <= 512).") - } else if (request.cuboid.isSingleBucket(DataLayer.bucketLength) && request.subsamplingStrides == Vec3Int.ones) { + } else if (request.cuboid.isSingleBucket(DataLayer.bucketLength)) { bucketQueue.headOption.toFox.flatMap { bucket => handleBucketRequest(request, bucket.copy(additionalCoordinates = request.settings.additionalCoordinates)) } @@ -124,68 +124,38 @@ class BinaryDataService(val dataBaseDir: Path, private def cutOutCuboid(request: DataServiceDataRequest, rs: List[(BucketPosition, Array[Byte])]): Array[Byte] = { val bytesPerElement = request.dataLayer.bytesPerElement val cuboid = request.cuboid - val subsamplingStrides = request.subsamplingStrides - - val resultVolume = Vec3Int( - math.ceil(cuboid.width.toDouble / subsamplingStrides.x.toDouble).toInt, - math.ceil(cuboid.height.toDouble / subsamplingStrides.y.toDouble).toInt, - math.ceil(cuboid.depth.toDouble / subsamplingStrides.z.toDouble).toInt - ) - val result = new Array[Byte](resultVolume.x * resultVolume.y * resultVolume.z * bytesPerElement) + val subsamplingStrides = Vec3Int.ones + + val resultShape = Vec3Int(cuboid.width, cuboid.height, cuboid.depth) + val result = new Array[Byte](cuboid.volume * bytesPerElement) val bucketLength = DataLayer.bucketLength rs.reverse.foreach { case (bucket, data) => - val xRemainder = cuboid.topLeft.voxelXInMag % subsamplingStrides.x - val yRemainder = cuboid.topLeft.voxelYInMag % subsamplingStrides.y - val zRemainder = cuboid.topLeft.voxelZInMag % subsamplingStrides.z - - val xMin = math - .ceil( - (math - .max(cuboid.topLeft.voxelXInMag, bucket.topLeft.voxelXInMag) - .toDouble - xRemainder) / subsamplingStrides.x.toDouble) - .toInt * subsamplingStrides.x + xRemainder - val yMin = math - .ceil( - (math - .max(cuboid.topLeft.voxelYInMag, bucket.topLeft.voxelYInMag) - .toDouble - yRemainder) / subsamplingStrides.y.toDouble) - .toInt * subsamplingStrides.y + yRemainder - val zMin = math - .ceil( - (math - .max(cuboid.topLeft.voxelZInMag, bucket.topLeft.voxelZInMag) - .toDouble - zRemainder) / subsamplingStrides.z.toDouble) - .toInt * subsamplingStrides.z + zRemainder + val xMin = math.max(cuboid.topLeft.voxelXInMag, bucket.topLeft.voxelXInMag) + val yMin = math.max(cuboid.topLeft.voxelYInMag, bucket.topLeft.voxelYInMag) + val zMin = math.max(cuboid.topLeft.voxelZInMag, bucket.topLeft.voxelZInMag) val xMax = math.min(cuboid.bottomRight.voxelXInMag, bucket.topLeft.voxelXInMag + bucketLength) val yMax = math.min(cuboid.bottomRight.voxelYInMag, bucket.topLeft.voxelYInMag + bucketLength) val zMax = math.min(cuboid.bottomRight.voxelZInMag, bucket.topLeft.voxelZInMag + bucketLength) for { - z <- zMin until zMax by subsamplingStrides.z - y <- yMin until yMax by subsamplingStrides.y - // if subsamplingStrides.x == 1, we can bulk copy a row of voxels and do not need to iterate in the x dimension - x <- xMin until xMax by (if (subsamplingStrides.x == 1) xMax else subsamplingStrides.x) + z <- zMin until zMax + y <- yMin until yMax + // We can bulk copy a row of voxels and do not need to iterate in the x dimension } { val dataOffset = - (x % bucketLength + + (xMin % bucketLength + y % bucketLength * bucketLength + z % bucketLength * bucketLength * bucketLength) * bytesPerElement - val rx = (x - cuboid.topLeft.voxelXInMag) / subsamplingStrides.x + val rx = (xMin - cuboid.topLeft.voxelXInMag) / subsamplingStrides.x val ry = (y - cuboid.topLeft.voxelYInMag) / subsamplingStrides.y val rz = (z - cuboid.topLeft.voxelZInMag) / subsamplingStrides.z - val resultOffset = (rx + ry * resultVolume.x + rz * resultVolume.x * resultVolume.y) * bytesPerElement - if (subsamplingStrides.x == 1) { - // bulk copy a row of voxels - System.arraycopy(data, dataOffset, result, resultOffset, (xMax - x) * bytesPerElement) - } else { - // copy single voxel - System.arraycopy(data, dataOffset, result, resultOffset, bytesPerElement) - } + val resultOffset = (rx + ry * resultShape.x + rz * resultShape.x * resultShape.y) * bytesPerElement + System.arraycopy(data, dataOffset, result, resultOffset, (xMax - xMin) * bytesPerElement) } } result diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala new file mode 100644 index 00000000000..a82e24a246a --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -0,0 +1,177 @@ +package com.scalableminds.webknossos.datastore.services + +import com.google.inject.Inject +import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, box2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.DataStoreConfig +import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, VoxelPosition} +import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.requests.Cuboid +import com.typesafe.scalalogging.LazyLogging +import net.liftweb.common.Box.tryo +import play.api.i18n.MessagesProvider +import play.api.libs.json.{Json, OFormat} + +import scala.concurrent.ExecutionContext + +case class FullMeshRequest( + meshFileName: Option[String], // None means ad-hoc meshing + lod: Option[Int], + segmentId: Long, // if mappingName is set, this is an agglomerate id + mappingName: Option[String], + mappingType: Option[String], // json, agglomerate, editableMapping + editableMappingTracingId: Option[String], + mag: Option[Vec3Int], // required for ad-hoc meshing + seedPosition: Option[Vec3Int], // required for ad-hoc meshing + additionalCoordinates: Option[Seq[AdditionalCoordinate]] +) + +object FullMeshRequest { + implicit val jsonFormat: OFormat[FullMeshRequest] = Json.format[FullMeshRequest] +} + +class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, + meshFileService: MeshFileService, + val binaryDataServiceHolder: BinaryDataServiceHolder, + val dsRemoteWebknossosClient: DSRemoteWebknossosClient, + val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, + mappingService: MappingService, + config: DataStoreConfig, + adHocMeshServiceHolder: AdHocMeshServiceHolder) + extends LazyLogging + with FullMeshHelper + with MeshMappingHelper { + + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService + adHocMeshServiceHolder.dataStoreAdHocMeshConfig = + (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) + val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService + + def loadFor(token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + fullMeshRequest.meshFileName match { + case Some(_) => + loadFullMeshFromMeshfile(token, organizationName, datasetName, dataLayerName, fullMeshRequest) + case None => loadFullMeshFromAdHoc(organizationName, datasetName, dataLayerName, fullMeshRequest) + } + + private def loadFullMeshFromAdHoc( + organizationName: String, + datasetName: String, + dataLayerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + for { + mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" + seedPosition <- fullMeshRequest.seedPosition.toFox ?~> "seedPosition.neededForAdHoc" + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationName, + datasetName, + dataLayerName) + segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" + before = Instant.now + verticesForChunks <- getAllAdHocChunks(dataSource, + segmentationLayer, + fullMeshRequest, + VoxelPosition(seedPosition.x, seedPosition.y, seedPosition.z, mag), + adHocChunkSize) + encoded = verticesForChunks.map(adHocMeshToStl) + array = combineEncodedChunksToStl(encoded) + _ = logMeshingDuration(before, "ad-hoc meshing", array.length) + } yield array + + private def getAllAdHocChunks( + dataSource: DataSource, + segmentationLayer: SegmentationLayer, + fullMeshRequest: FullMeshRequest, + topLeft: VoxelPosition, + chunkSize: Vec3Int, + visited: collection.mutable.Set[VoxelPosition] = collection.mutable.Set[VoxelPosition]())( + implicit ec: ExecutionContext): Fox[List[Array[Float]]] = { + val adHocMeshRequest = AdHocMeshRequest( + Some(dataSource), + segmentationLayer, + Cuboid(topLeft, chunkSize.x + 1, chunkSize.y + 1, chunkSize.z + 1), + fullMeshRequest.segmentId, + dataSource.scale, + fullMeshRequest.mappingName, + fullMeshRequest.mappingType, + fullMeshRequest.additionalCoordinates + ) + visited += topLeft + for { + (vertices: Array[Float], neighbors) <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) + _ = visited ++= nextPositions + neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => + getAllAdHocChunks(dataSource, segmentationLayer, fullMeshRequest, position, chunkSize, visited) + } + allVertices: List[Array[Float]] = vertices +: neighborVerticesNested.flatten + } yield allVertices + } + + private def loadFullMeshFromMeshfile( + token: Option[String], + organizationName: String, + datasetName: String, + layerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "meshFileName.needed" + before = Instant.now + mappingNameForMeshFile = meshFileService.mappingNameForMeshFile(organizationName, + datasetName, + layerName, + meshFileName) + segmentIds <- segmentIdsForAgglomerateIdIfNeeded( + organizationName, + datasetName, + layerName, + fullMeshRequest.mappingName, + fullMeshRequest.editableMappingTracingId, + fullMeshRequest.segmentId, + mappingNameForMeshFile, + token + ) + chunkInfos: WebknossosSegmentInfo <- meshFileService.listMeshChunksForSegmentsV3(organizationName, + datasetName, + layerName, + meshFileName, + segmentIds) + allChunkRanges: List[MeshChunk] = chunkInfos.chunks.lods.head.chunks + stlEncodedChunks: Seq[Array[Byte]] <- Fox.serialCombined(allChunkRanges) { chunkRange: MeshChunk => + readMeshChunkAsStl(organizationName, datasetName, layerName, meshFileName, chunkRange, chunkInfos.transform) + } + stlOutput = combineEncodedChunksToStl(stlEncodedChunks) + _ = logMeshingDuration(before, "meshfile", stlOutput.length) + } yield stlOutput + + private def readMeshChunkAsStl(organizationName: String, + datasetName: String, + layerName: String, + meshfileName: String, + chunkInfo: MeshChunk, + transform: Array[Array[Double]])(implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + (dracoMeshChunkBytes, encoding) <- meshFileService.readMeshChunkV3( + organizationName, + datasetName, + layerName, + MeshChunkDataRequestV3List(meshfileName, List(MeshChunkDataRequestV3(chunkInfo.byteOffset, chunkInfo.byteSize))) + ) + _ <- bool2Fox(encoding == "draco") ?~> s"meshfile encoding is $encoding, only draco is supported" + scale <- tryo(Vec3Double(transform(0)(0), transform(1)(1), transform(2)(2))) ?~> "could not extract scale from meshfile transform attribute" + stlEncodedChunk <- tryo( + dracoToStlConverter.dracoToStl(dracoMeshChunkBytes, + chunkInfo.position.x, + chunkInfo.position.y, + chunkInfo.position.z, + scale.x, + scale.y, + scale.z)) + } yield stlEncodedChunk + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala similarity index 87% rename from webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala rename to webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index f5e3f580ef7..be4c0a670ea 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -31,17 +31,17 @@ object TracingStoreInfo { implicit val jsonFormat: OFormat[TracingStoreInfo] = Json.format[TracingStoreInfo] } -trait RemoteWebKnossosClient { +trait RemoteWebknossosClient { def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] } -class DSRemoteWebKnossosClient @Inject()( +class DSRemoteWebknossosClient @Inject()( rpc: RPC, config: DataStoreConfig, val lifecycle: ApplicationLifecycle, @Named("webknossos-datastore") val system: ActorSystem )(implicit val ec: ExecutionContext) - extends RemoteWebKnossosClient + extends RemoteWebknossosClient with IntervalScheduler with LazyLogging with FoxImplicits { @@ -51,19 +51,19 @@ class DSRemoteWebKnossosClient @Inject()( private val dataStoreUri: String = config.Http.uri private val reportUsedStorageEnabled: Boolean = config.Datastore.ReportUsedStorage.enabled - private val webKnossosUri: String = config.Datastore.WebKnossos.uri + private val webknossosUri: String = config.Datastore.WebKnossos.uri protected lazy val tickerInterval: FiniteDuration = config.Datastore.WebKnossos.pingInterval def tick(): Unit = reportStatus() private def reportStatus(): Fox[_] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/status") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/status") .addQueryString("key" -> dataStoreKey) .patch(DataStoreStatus(ok = true, dataStoreUri, Some(reportUsedStorageEnabled))) def reportDataSource(dataSource: InboxDataSourceLike): Fox[_] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/datasource") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasource") .addQueryString("key" -> dataStoreKey) .put(dataSource) @@ -73,7 +73,7 @@ class DSRemoteWebKnossosClient @Inject()( viaAddRoute: Boolean, userToken: Option[String]): Fox[Unit] = for { - _ <- rpc(s"$webKnossosUri/api/datastores/$dataStoreName/reportDatasetUpload") + _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reportDatasetUpload") .addQueryString("key" -> dataStoreKey) .addQueryString("dataSetName" -> dataSourceId.name) .addQueryString("needsConversion" -> needsConversion.toString) @@ -84,7 +84,7 @@ class DSRemoteWebKnossosClient @Inject()( } yield () def reportDataSources(dataSources: List[InboxDataSourceLike]): Fox[_] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/datasources") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasources") .addQueryString("key" -> dataStoreKey) .silent .put(dataSources) @@ -92,23 +92,23 @@ class DSRemoteWebKnossosClient @Inject()( def reserveDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] = for { userToken <- option2Fox(userTokenOpt) ?~> "reserveUpload.noUserToken" - _ <- rpc(s"$webKnossosUri/api/datastores/$dataStoreName/reserveUpload") + _ <- rpc(s"$webknossosUri/api/datastores/$dataStoreName/reserveUpload") .addQueryString("key" -> dataStoreKey) .addQueryString("token" -> userToken) .post(info) } yield () def deleteDataSource(id: DataSourceId): Fox[_] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).post(id) + rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset").addQueryString("key" -> dataStoreKey).post(id) def getJobExportProperties(jobId: String): Fox[JobExportProperties] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/jobExportProperties") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/jobExportProperties") .addQueryString("jobId" -> jobId) .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[JobExportProperties] override def requestUserAccess(userToken: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/validateUserAccess") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/validateUserAccess") .addQueryString("key" -> dataStoreKey) .addQueryStringOptional("token", userToken) .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) @@ -119,7 +119,7 @@ class DSRemoteWebKnossosClient @Inject()( "tracingStore", _ => for { - tracingStoreInfo <- rpc(s"$webKnossosUri/api/tracingstore") + tracingStoreInfo <- rpc(s"$webknossosUri/api/tracingstore") .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[TracingStoreInfo] } yield tracingStoreInfo.url @@ -134,7 +134,7 @@ class DSRemoteWebKnossosClient @Inject()( annotationSourceCache.getOrLoad( (accessToken, userToken), _ => - rpc(s"$webKnossosUri/api/annotations/source/$accessToken") + rpc(s"$webknossosUri/api/annotations/source/$accessToken") .addQueryString("key" -> dataStoreKey) .addQueryStringOptional("userToken", userToken) .getWithJsonResponse[AnnotationSource] @@ -147,7 +147,7 @@ class DSRemoteWebKnossosClient @Inject()( credentialCache.getOrLoad( credentialId, _ => - rpc(s"$webKnossosUri/api/datastores/$dataStoreName/findCredential") + rpc(s"$webknossosUri/api/datastores/$dataStoreName/findCredential") .addQueryString("credentialId" -> credentialId) .addQueryString("key" -> dataStoreKey) .silent diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala index bd2eebcd7c9..c242f8e93c7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceRepository.scala @@ -13,7 +13,7 @@ import play.api.i18n.{Messages, MessagesProvider} import scala.concurrent.ExecutionContext class DataSourceRepository @Inject()( - remoteWebKnossosClient: DSRemoteWebKnossosClient, + remoteWebknossosClient: DSRemoteWebknossosClient, @Named("webknossos-datastore") val system: ActorSystem )(implicit ec: ExecutionContext) extends TemporaryStore[DataSourceId, InboxDataSource](system) @@ -34,7 +34,7 @@ class DataSourceRepository @Inject()( for { _ <- Fox.successful(()) _ = insert(dataSource.id, dataSource) - _ <- remoteWebKnossosClient.reportDataSource(dataSource) + _ <- remoteWebknossosClient.reportDataSource(dataSource) } yield () def updateDataSources(dataSources: List[InboxDataSource]): Fox[Unit] = @@ -42,12 +42,12 @@ class DataSourceRepository @Inject()( _ <- Fox.successful(()) _ = removeAll() _ = dataSources.foreach(dataSource => insert(dataSource.id, dataSource)) - _ <- remoteWebKnossosClient.reportDataSources(dataSources) + _ <- remoteWebknossosClient.reportDataSources(dataSources) } yield () def cleanUpDataSource(dataSourceId: DataSourceId): Fox[Unit] = for { _ <- Fox.successful(remove(dataSourceId)) - _ <- remoteWebKnossosClient.deleteDataSource(dataSourceId) + _ <- remoteWebknossosClient.deleteDataSource(dataSourceId) } yield () } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FullMeshHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FullMeshHelper.scala new file mode 100644 index 00000000000..091cd4f3c26 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FullMeshHelper.scala @@ -0,0 +1,76 @@ +package com.scalableminds.webknossos.datastore.services + +import com.scalableminds.util.geometry.{Vec3Float, Vec3Int} +import com.scalableminds.util.time.Instant +import com.scalableminds.webknossos.datastore.draco.NativeDracoToStlConverter +import com.scalableminds.webknossos.datastore.models.VoxelPosition +import com.typesafe.scalalogging.LazyLogging + +import java.nio.{ByteBuffer, ByteOrder} + +trait FullMeshHelper extends LazyLogging { + protected lazy val dracoToStlConverter = new NativeDracoToStlConverter() + + protected lazy val adHocChunkSize: Vec3Int = Vec3Int(100, 100, 100) + + protected def generateNextTopLeftsFromNeighbors( + oldTopLeft: VoxelPosition, + neighborIds: List[Int], + chunkSize: Vec3Int, + visited: collection.mutable.Set[VoxelPosition]): List[VoxelPosition] = { + // front_xy, front_xz, front_yz, back_xy, back_xz, back_yz + val neighborLookup = Seq( + Vec3Int(0, 0, -1), + Vec3Int(0, -1, 0), + Vec3Int(-1, 0, 0), + Vec3Int(0, 0, 1), + Vec3Int(0, 1, 0), + Vec3Int(1, 0, 0), + ) + val neighborPositions = neighborIds.map { neighborId => + val neighborMultiplier = neighborLookup(neighborId) + oldTopLeft.move(neighborMultiplier.x * chunkSize.x * oldTopLeft.mag.x, + neighborMultiplier.y * chunkSize.y * oldTopLeft.mag.y, + neighborMultiplier.z * chunkSize.z * oldTopLeft.mag.z) + } + neighborPositions.filterNot(visited.contains) + } + + protected def adHocMeshToStl(vertexBuffer: Array[Float]): Array[Byte] = { + val numFaces = vertexBuffer.length / (3 * 3) // a face has three vertices, a vertex has three floats. + val outputNumBytes = numFaces * 50 + val output = ByteBuffer.allocate(outputNumBytes).order(ByteOrder.LITTLE_ENDIAN) + val unused = Array.fill[Byte](2)(0) + for (faceIndex <- 0 until numFaces) { + val v1 = Vec3Float(vertexBuffer(faceIndex), vertexBuffer(faceIndex + 1), vertexBuffer(faceIndex + 2)) + val v2 = Vec3Float(vertexBuffer(faceIndex + 3), vertexBuffer(faceIndex + 4), vertexBuffer(faceIndex + 5)) + val v3 = Vec3Float(vertexBuffer(faceIndex + 6), vertexBuffer(faceIndex + 7), vertexBuffer(faceIndex + 8)) + val norm = Vec3Float.crossProduct(v2 - v1, v3 - v1).normalize + output.putFloat(norm.x) + output.putFloat(norm.y) + output.putFloat(norm.z) + for (vertexIndex <- 0 until 3) { + for (dimIndex <- 0 until 3) { + output.putFloat(vertexBuffer(9 * faceIndex + 3 * vertexIndex + dimIndex)) + } + } + output.put(unused) + } + output.array() + } + + protected def combineEncodedChunksToStl(stlEncodedChunks: Seq[Array[Byte]]): Array[Byte] = { + val numFaces = stlEncodedChunks.map(_.length / 50).sum // our stl implementation writes exactly 50 bytes per face + val constantStlHeader = Array.fill[Byte](80)(0) + val outputNumBytes = 80 + 4 + stlEncodedChunks.map(_.length).sum + val output = ByteBuffer.allocate(outputNumBytes).order(ByteOrder.LITTLE_ENDIAN) + output.put(constantStlHeader) + output.putInt(numFaces) + stlEncodedChunks.foreach(output.put) + output.array() + } + + protected def logMeshingDuration(before: Instant, label: String, lengthBytes: Int): Unit = + logger.info(s"Served $lengthBytes-byte STL mesh via $label, took ${Instant.since(before)}") + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala index c2f620091d4..fd3a4683e48 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshFileService.scala @@ -208,7 +208,7 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC val meshFileVersions = meshFileNames.map { fileName => val meshFilePath = layerDir.resolve(meshesDir).resolve(s"$fileName.$meshFileExtension") - mappingVersionForMeshFile(meshFilePath) + versionForMeshFile(meshFilePath) } val mappingNameFoxes = meshFileNames.lazyZip(meshFileVersions).map { (fileName, fileVersion) => @@ -235,7 +235,26 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC } ?~> "mesh.file.readEncoding.failed" } - private def mappingVersionForMeshFile(meshFilePath: Path): Long = + // Same as above but this variant constructs the meshFilePath itself and converts null to None + def mappingNameForMeshFile(organizationName: String, + datasetName: String, + dataLayerName: String, + meshFileName: String): Option[String] = { + val meshFilePath = + dataBaseDir + .resolve(organizationName) + .resolve(datasetName) + .resolve(dataLayerName) + .resolve(meshesDir) + .resolve(s"${meshFileName}.$meshFileExtension") + executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => + cachedMeshFile.reader.string().getAttr("/", "mapping_name") + }.toOption.flatMap { value => + Option(value) // catch null + } + } + + private def versionForMeshFile(meshFilePath: Path): Long = executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => cachedMeshFile.reader.int64().getAttr("/", "artifact_schema_version") }.toOption.getOrElse(0) @@ -262,20 +281,34 @@ class MeshFileService @Inject()(config: DataStoreConfig)(implicit ec: ExecutionC }.toFox.flatten ?~> "mesh.file.open.failed" } - def listMeshChunksForSegmentV3(organizationName: String, - datasetName: String, - dataLayerName: String, - listMeshChunksRequest: ListMeshChunksRequest): Box[WebknossosSegmentInfo] = { + def listMeshChunksForSegmentsV3(organizationName: String, + datasetName: String, + dataLayerName: String, + meshFileName: String, + segmentIds: Seq[Long]): Fox[WebknossosSegmentInfo] = { + val meshChunksForUnmappedSegments = segmentIds.map(segmentId => + listMeshChunksForSegmentV3(organizationName, datasetName, dataLayerName, meshFileName, segmentId).toOption) + val meshChunksForUnmappedSegmentsFlat = meshChunksForUnmappedSegments.flatten + for { + _ <- bool2Fox(meshChunksForUnmappedSegmentsFlat.nonEmpty) ?~> "zero chunks" ?~> "mesh.file.listChunks.failed" + chunkInfos = meshChunksForUnmappedSegmentsFlat.reduce(_.merge(_)) + } yield chunkInfos + } + + private def listMeshChunksForSegmentV3(organizationName: String, + datasetName: String, + dataLayerName: String, + meshFileName: String, + segmentId: Long): Box[WebknossosSegmentInfo] = { val meshFilePath = dataBaseDir .resolve(organizationName) .resolve(datasetName) .resolve(dataLayerName) .resolve(meshesDir) - .resolve(s"${listMeshChunksRequest.meshFile}.$meshFileExtension") + .resolve(s"${meshFileName}.$meshFileExtension") executeWithCachedHdf5(meshFilePath, meshFileCache) { cachedMeshFile => - val segmentId = listMeshChunksRequest.segmentId val encoding = cachedMeshFile.reader.string().getAttr("/", "mesh_format") val lodScaleMultiplier = cachedMeshFile.reader.float64().getAttr("/", "lod_scale_multiplier") val transform = cachedMeshFile.reader.float64().getMatrixAttr("/", "transform") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala new file mode 100644 index 00000000000..34b88747fc3 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/MeshMappingHelper.scala @@ -0,0 +1,70 @@ +package com.scalableminds.webknossos.datastore.services + +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.option2Fox +import com.scalableminds.webknossos.datastore.storage.AgglomerateFileKey + +import scala.concurrent.ExecutionContext + +trait MeshMappingHelper { + + protected val dsRemoteWebknossosClient: DSRemoteWebknossosClient + protected val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient + protected val binaryDataServiceHolder: BinaryDataServiceHolder + + protected def segmentIdsForAgglomerateIdIfNeeded( + organizationName: String, + datasetName: String, + dataLayerName: String, + targetMappingName: Option[String], + editableMappingTracingId: Option[String], + agglomerateId: Long, + mappingNameForMeshFile: Option[String], + token: Option[String])(implicit ec: ExecutionContext): Fox[List[Long]] = + targetMappingName match { + + case None => + // No mapping selected, assume id matches meshfile + Fox.successful(List(agglomerateId)) + case Some(mappingName) if mappingNameForMeshFile.contains(mappingName) => + // Mapping selected, but meshfile has the same mapping name in its metadata, assume id matches meshfile + Fox.successful(List(agglomerateId)) + case Some(mappingName) => + // Mapping selected, but meshfile does not have matching mapping name in its metadata, + // assume agglomerate id, fetch oversegmentation segment ids for it + val agglomerateFileKey = AgglomerateFileKey( + organizationName, + datasetName, + dataLayerName, + mappingName + ) + editableMappingTracingId match { + case Some(tracingId) => + for { + tracingstoreUri <- dsRemoteWebknossosClient.getTracingstoreUri + segmentIdsResult <- dsRemoteTracingstoreClient.getEditableMappingSegmentIdsForAgglomerate(tracingstoreUri, + tracingId, + agglomerateId, + token) + segmentIds <- if (segmentIdsResult.agglomerateIdIsPresent) + Fox.successful(segmentIdsResult.segmentIds) + else + for { + agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox + localSegmentIds <- agglomerateService.segmentIdsForAgglomerateId( + agglomerateFileKey, + agglomerateId + ) + } yield localSegmentIds + } yield segmentIds + case _ => + for { + agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox + segmentIds <- agglomerateService.segmentIdsForAgglomerateId( + agglomerateFileKey, + agglomerateId + ) + } yield segmentIds + } + } +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mcubes/MarchingCubes.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mcubes/MarchingCubes.scala index 981c7c1ffb5..415ee37437c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mcubes/MarchingCubes.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mcubes/MarchingCubes.scala @@ -10,7 +10,6 @@ object MarchingCubes { dataDimensions: Vec3Int, boundingBox: BoundingBox, segmentId: T, - subsamplingStrides: Vec3Double, offset: Vec3Double, scale: Vec3Double, vertexBuffer: mutable.ArrayBuffer[Vec3Double]): Unit = { @@ -52,7 +51,7 @@ object MarchingCubes { val position = Vec3Double(x, y, z) MarchingCubesTable.triangleTable(cubeIndex).foreach { edgeDelta => - vertexBuffer += ((position + edgeDelta) * subsamplingStrides + offset) * scale + vertexBuffer += (position + edgeDelta + offset) * scale } } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala index 6caa79ce701..83a1e325189 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWDataLayer, WKW import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrSegmentationLayer} import com.scalableminds.webknossos.datastore.dataformats.zarr3.{Zarr3DataLayer, Zarr3SegmentationLayer} import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.services.{DSRemoteWebKnossosClient, DataSourceRepository} +import com.scalableminds.webknossos.datastore.services.{DSRemoteWebknossosClient, DataSourceRepository} import play.api.libs.json.{Json, OFormat} import java.nio.charset.StandardCharsets @@ -49,7 +49,7 @@ object DataLayerId { } class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, - remoteWebKnossosClient: DSRemoteWebKnossosClient, + remoteWebknossosClient: DSRemoteWebknossosClient, datasetSymlinkService: DatasetSymlinkService)(implicit ec: ExecutionContext) extends FoxImplicits { @@ -70,7 +70,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, None, List(), Some(composeRequest.targetFolderId)) - _ <- remoteWebKnossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." + _ <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." directory = uploadDirectory(composeRequest.organizationName, composeRequest.newDatasetName) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, composeRequest.organizationName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala index 7fab19ba3fe..726f5ab6a58 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/RemoteSourceDescriptorService.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.datavault.VaultPath import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId -import com.scalableminds.webknossos.datastore.services.DSRemoteWebKnossosClient +import com.scalableminds.webknossos.datastore.services.DSRemoteWebknossosClient import net.liftweb.common.Box import net.liftweb.common.Box.tryo @@ -17,7 +17,7 @@ import scala.concurrent.ExecutionContext case class RemoteSourceDescriptor(uri: URI, credential: Option[DataVaultCredential]) -class RemoteSourceDescriptorService @Inject()(dSRemoteWebKnossosClient: DSRemoteWebKnossosClient, +class RemoteSourceDescriptorService @Inject()(dSRemoteWebknossosClient: DSRemoteWebknossosClient, dataStoreConfig: DataStoreConfig, dataVaultService: DataVaultService) { @@ -93,7 +93,7 @@ class RemoteSourceDescriptorService @Inject()(dSRemoteWebKnossosClient: DSRemote private def credentialFor(magLocator: MagLocator)(implicit ec: ExecutionContext): Fox[DataVaultCredential] = magLocator.credentialId match { case Some(credentialId) => - dSRemoteWebKnossosClient.getCredential(credentialId) + dSRemoteWebknossosClient.getCredential(credentialId) case None => magLocator.credentials match { case Some(credential) => Fox.successful(credential) diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 695abcd8a71..f314dd33101 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -5,7 +5,7 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebKnossos(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) +POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) @@ -61,6 +61,7 @@ POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/mes POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunkV0(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/meshes/formatVersion/:formatVersion/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegmentForVersion(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, formatVersion: Int, targetMappingName: Option[String], editableMappingTracingId: Option[String]) POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/meshes/formatVersion/:formatVersion/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunkForVersion(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, formatVersion: Int) +POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) # Connectome files GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String) diff --git a/webknossos-jni/readme.md b/webknossos-jni/readme.md new file mode 100644 index 00000000000..bdca0bd9672 --- /dev/null +++ b/webknossos-jni/readme.md @@ -0,0 +1,28 @@ +# Native Functions for WEBKNOSSOS + +This subproject provides C++ functions for use in webknossos, using JNI + +## Changing Code for Existing Functions + + - If the function/class signatures do not change, compiling the changed cpp code should work with `sbt compile` (Auto-recompile on F5 after line changes does not seem to work) + +## Changing Function Signatures and Adding New Functions + + - Make sure everything currently compiles + - Stop WEBKNOSSOS + - Change the scala definition of the native functions, decorated with `@nativeLoader("webknossosNative0")` and `@native`. Compare the existing `NativeDracoToStlConverter` in the datastore module. + - run `sbt` in interactive mode + - in it, run `project webknossosDatastore` to switch to the datastore subproject + - then, still in sbt, run `javah` to run the header generation + - this should generate a header file in `webknossos-native/src/include/` + - Implement the function defined in the header file in a cpp file also in `webknossos-native/src` + - compile with `sbt compile` + - Note that moving the scala definition to a different package also requires this step, as the full name changes. + - On trouble, `./clean` may help + + +## Adding native dependencies + + - Adapt src/CMakeLists.txt to include a `find_package` call with your dependency + - Run `sbt compile` to see if it can be found. + - A `target_link_libraries` call may also be needed to execute the dependency code diff --git a/webknossos-jni/src/CMakeLists.txt b/webknossos-jni/src/CMakeLists.txt new file mode 100644 index 00000000000..4b815bc4dda --- /dev/null +++ b/webknossos-jni/src/CMakeLists.txt @@ -0,0 +1,36 @@ +cmake_minimum_required(VERSION 3.12) + +option(SBT "Set if invoked from sbt-jni" OFF) + +# Define project and related variables. (required by sbt-jni) please use semantic versioning +project (webknossosJni) +set(PROJECT_VERSION_MAJOR 0) +set(PROJECT_VERSION_MINOR 0) +set(PROJECT_VERSION_PATCH 0) + +find_package(draco REQUIRED) +find_package(JNI REQUIRED) +if (JNI_FOUND) + message (STATUS "JNI include directories: ${JNI_INCLUDE_DIRS}") +endif() + +include_directories(.) +include_directories(include) +include_directories(${JNI_INCLUDE_DIRS}) + +# Sources +file(GLOB LIB_SRC + "*.c" + "*.cc" + "*.cpp" +) + + +# Setup installation targets +# (required by sbt-jni) major version should always be appended to library name +set (LIB_NAME ${PROJECT_NAME}${PROJECT_VERSION_MAJOR}) +add_library(${LIB_NAME} SHARED ${LIB_SRC}) + +target_link_libraries(${LIB_NAME} draco::draco) + +install(TARGETS ${LIB_NAME} LIBRARY DESTINATION .) diff --git a/webknossos-jni/src/dracoToStlDecoder.cpp b/webknossos-jni/src/dracoToStlDecoder.cpp new file mode 100644 index 00000000000..73ff0893055 --- /dev/null +++ b/webknossos-jni/src/dracoToStlDecoder.cpp @@ -0,0 +1,86 @@ +#include "com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter.h" + +#include +#include + +#include + +void throwRuntimeException(JNIEnv * env, + const std::string msg) { + jclass exceptionClass = env -> FindClass("java/lang/RuntimeException"); + + if (exceptionClass != nullptr) { + env -> ThrowNew(exceptionClass, ("An error occurred in native code: " + msg).c_str()); + } +} + +// Takes a byte array containing a DRACO-Encoded mesh, adds offsetX, offsetY, offsetZ to each vertex +// And encodes the results as STL faces (50 bytes per face) +// No STL Header is included, as this will be called on chunks. The caller must add an stl header. +JNIEXPORT jbyteArray JNICALL Java_com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter_dracoToStl + (JNIEnv * env, jobject instance, jbyteArray inputJavaArray, jfloat offsetX, jfloat offsetY, jfloat offsetZ, jdouble scaleX, jdouble scaleY, jdouble scaleZ) { + jsize inputLength = env -> GetArrayLength(inputJavaArray); + jbyte * dataAsJByte = env -> GetByteArrayElements(inputJavaArray, NULL); + const char * inputBytes = reinterpret_cast < const char * > (dataAsJByte); + + try { + draco::Decoder decoder; + draco::DecoderBuffer dracoBuffer; + dracoBuffer.Init(inputBytes, inputLength); + + auto statusOrMesh = decoder.DecodeMeshFromBuffer( & dracoBuffer); + + if (statusOrMesh.ok()) { + std::unique_ptr < draco::Mesh > mesh = std::move(statusOrMesh).value(); + + // Successfully decoded DRACO bytes into a draco::Mesh object. Now encode it as STL faces. + draco::EncoderBuffer encodeBuffer; + + const int positionAttributeId = mesh -> GetNamedAttributeId(draco::GeometryAttribute::POSITION); + uint16_t unused = 0; + + for (draco::FaceIndex faceIndex(0); faceIndex < mesh -> num_faces(); ++faceIndex) { + const auto & face = mesh -> face(faceIndex); + const auto * const positionAttribute = mesh -> attribute(positionAttributeId); + + draco::Vector3f pos[3]; + positionAttribute -> GetMappedValue(face[0], & pos[0][0]); + positionAttribute -> GetMappedValue(face[1], & pos[1][0]); + positionAttribute -> GetMappedValue(face[2], & pos[2][0]); + draco::Vector3f norm = draco::CrossProduct(pos[1] - pos[0], pos[2] - pos[0]); + norm.Normalize(); + encodeBuffer.Encode(norm.data(), sizeof(float) * 3); + + for (int vertexIndex = 0; vertexIndex < 3; ++vertexIndex) { + pos[vertexIndex][0] += offsetX; + pos[vertexIndex][1] += offsetY; + pos[vertexIndex][2] += offsetZ; + pos[vertexIndex][0] *= scaleX; + pos[vertexIndex][1] *= scaleY; + pos[vertexIndex][2] *= scaleZ; + encodeBuffer.Encode( & pos[vertexIndex], sizeof(float) * 3); + } + + encodeBuffer.Encode( & unused, 2); // we write no face attributes, so attribute byte count is zero + } + + const jsize outputLength = static_cast < jsize > (encodeBuffer.size()); + jbyteArray result = env -> NewByteArray(outputLength); + env -> SetByteArrayRegion(result, 0, outputLength, reinterpret_cast < const jbyte * > (encodeBuffer.data())); + env -> ReleaseByteArrayElements(inputJavaArray, dataAsJByte, 0); + return result; + } else { + env -> ReleaseByteArrayElements(inputJavaArray, dataAsJByte, 0); + throwRuntimeException(env, "Invalid DRACO Encoding in Mesh Byte Array"); + return nullptr; + } + } catch (const std::exception & e) { + env -> ReleaseByteArrayElements(inputJavaArray, dataAsJByte, 0); + throwRuntimeException(env, "Native Exception while transcoding DRACO Mesh to STL Faces: " + std::string(e.what())); + return nullptr; + } catch (...) { + env -> ReleaseByteArrayElements(inputJavaArray, dataAsJByte, 0); + throwRuntimeException(env, "Native Exception while transcoding DRACO Mesh to STL Faces"); + return nullptr; + } + } diff --git a/webknossos-jni/src/include/com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter.h b/webknossos-jni/src/include/com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter.h new file mode 100644 index 00000000000..850e98390a2 --- /dev/null +++ b/webknossos-jni/src/include/com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter.h @@ -0,0 +1,21 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter */ + +#ifndef _Included_com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter +#define _Included_com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter + * Method: dracoToStl + * Signature: ([BFFFDDD)[B + */ +JNIEXPORT jbyteArray JNICALL Java_com_scalableminds_webknossos_datastore_draco_NativeDracoToStlConverter_dracoToStl + (JNIEnv *, jobject, jbyteArray, jfloat, jfloat, jfloat, jdouble, jdouble, jdouble); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/webknossos-tracingstore/Dockerfile b/webknossos-tracingstore/Dockerfile index 95b24fe5e27..04d24005720 100644 --- a/webknossos-tracingstore/Dockerfile +++ b/webknossos-tracingstore/Dockerfile @@ -1,5 +1,9 @@ FROM eclipse-temurin:21 +RUN apt-get update \ + && apt-get -y install libdraco4 \ + && rm -rf /var/lib/apt/lists/* + RUN mkdir -p /webknossos-tracingstore \ && groupadd -g 1000 -r webknossos \ && useradd -u 1000 -r -g webknossos webknossos \ diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 2ce2222aedb..a42e4cc1fac 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -2,31 +2,35 @@ package com.scalableminds.webknossos.tracingstore import com.google.inject.Inject import com.scalableminds.util.cache.AlfuCache -import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders -import com.scalableminds.webknossos.datastore.models.WebKnossosDataRequest +import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest +import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.RemoteFallbackLayer +import com.scalableminds.webknossos.datastore.services.FullMeshRequest +import com.scalableminds.webknossos.tracingstore.tracings.RemoteFallbackLayer import com.typesafe.scalalogging.LazyLogging import play.api.http.Status import play.api.inject.ApplicationLifecycle import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt class TSRemoteDatastoreClient @Inject()( rpc: RPC, - remoteWebKnossosClient: TSRemoteWebKnossosClient, + remoteWebknossosClient: TSRemoteWebknossosClient, val lifecycle: ApplicationLifecycle )(implicit ec: ExecutionContext) extends LazyLogging with MissingBucketHeaders { private lazy val dataStoreUriCache: AlfuCache[(String, String), String] = AlfuCache() + private lazy val voxelSizeCache: AlfuCache[String, Vec3Double] = AlfuCache(timeToLive = 10 minutes) private lazy val largestAgglomerateIdCache: AlfuCache[(RemoteFallbackLayer, String, Option[String]), Long] = - AlfuCache() + AlfuCache(timeToLive = 10 minutes) def getAgglomerateSkeleton(userToken: Option[String], remoteFallbackLayer: RemoteFallbackLayer, @@ -40,7 +44,7 @@ class TSRemoteDatastoreClient @Inject()( } yield result def getData(remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebKnossosDataRequest], + dataRequests: List[WebknossosDataRequest], userToken: Option[String]): Fox[(Array[Byte], List[Int])] = for { remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) @@ -111,6 +115,29 @@ class TSRemoteDatastoreClient @Inject()( ) } + def loadFullMeshStl(token: Option[String], + remoteFallbackLayer: RemoteFallbackLayer, + fullMeshRequest: FullMeshRequest): Fox[Array[Byte]] = + for { + remoteLayerUri <- getRemoteLayerUri(remoteFallbackLayer) + result <- rpc(s"$remoteLayerUri/meshes/fullMesh.stl") + .addQueryStringOptional("token", token) + .postJsonWithBytesResponse(fullMeshRequest) + } yield result + + def voxelSizeForTracingWithCache(tracingId: String, token: Option[String]): Fox[Vec3Double] = + voxelSizeCache.getOrLoad(tracingId, tId => voxelSizeForTracing(tId, token)) + + private def voxelSizeForTracing(tracingId: String, token: Option[String]): Fox[Vec3Double] = + for { + dataSourceId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) + dataStoreUri <- dataStoreUriWithCache(dataSourceId.team, dataSourceId.name) + result <- rpc(s"$dataStoreUri/data/datasets/${dataSourceId.team}/${dataSourceId.name}/readInboxDataSource") + .addQueryStringOptional("token", token) + .getWithJsonResponse[InboxDataSource] + scale <- result.scaleOpt ?~> "could not determine voxel size of dataset" + } yield scale + private def getRemoteLayerUri(remoteLayer: RemoteFallbackLayer): Fox[String] = for { datastoreUri <- dataStoreUriWithCache(remoteLayer.organizationName, remoteLayer.dataSetName) @@ -120,6 +147,6 @@ class TSRemoteDatastoreClient @Inject()( private def dataStoreUriWithCache(organizationName: String, datasetName: String): Fox[String] = dataStoreUriCache.getOrLoad( (organizationName, datasetName), - keyTuple => remoteWebKnossosClient.getDataStoreUriForDataSource(keyTuple._1, keyTuple._2)) + keyTuple => remoteWebknossosClient.getDataStoreUriForDataSource(keyTuple._1, keyTuple._2)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebKnossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala similarity index 83% rename from webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebKnossosClient.scala rename to webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index a1774b8cbe8..6174fb4be91 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebKnossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, D import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ AccessTokenService, - RemoteWebKnossosClient, + RemoteWebknossosClient, UserAccessAnswer, UserAccessRequest } @@ -29,34 +29,34 @@ object TracingUpdatesReport { implicit val jsonFormat: OFormat[TracingUpdatesReport] = Json.format[TracingUpdatesReport] } -class TSRemoteWebKnossosClient @Inject()( +class TSRemoteWebknossosClient @Inject()( rpc: RPC, config: TracingStoreConfig, val lifecycle: ApplicationLifecycle -) extends RemoteWebKnossosClient +) extends RemoteWebknossosClient with LazyLogging { private val tracingStoreKey: String = config.Tracingstore.key private val tracingStoreName: String = config.Tracingstore.name - private val webKnossosUri: String = config.Tracingstore.WebKnossos.uri + private val webknossosUri: String = config.Tracingstore.WebKnossos.uri private lazy val dataSourceIdByTracingIdCache: AlfuCache[String, DataSourceId] = AlfuCache() def reportTracingUpdates(tracingUpdatesReport: TracingUpdatesReport): Fox[WSResponse] = - rpc(s"$webKnossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/handleTracingUpdateReport") .addQueryString("key" -> tracingStoreKey) .silent .post(Json.toJson(tracingUpdatesReport)) def getDataSourceForTracing(tracingId: String): Fox[DataSourceLike] = - rpc(s"$webKnossosUri/api/tracingstores/$tracingStoreName/dataSource") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSource") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) .getWithJsonResponse[DataSourceLike] def getDataStoreUriForDataSource(organizationName: String, datasetName: String): Fox[String] = - rpc(s"$webKnossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetName") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetName") .addQueryString("organizationName" -> organizationName) .addQueryString("key" -> tracingStoreKey) .silent @@ -66,18 +66,18 @@ class TSRemoteWebKnossosClient @Inject()( dataSourceIdByTracingIdCache.getOrLoad( tracingId, tracingId => - rpc(s"$webKnossosUri/api/tracingstores/$tracingStoreName/dataSourceId") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") .addQueryString("tracingId" -> tracingId) .addQueryString("key" -> tracingStoreKey) .getWithJsonResponse[DataSourceId] ) override def requestUserAccess(token: Option[String], accessRequest: UserAccessRequest): Fox[UserAccessAnswer] = - rpc(s"$webKnossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/validateUserAccess") .addQueryString("key" -> tracingStoreKey) .addQueryStringOptional("token", token) .postJsonWithJsonResponse[UserAccessRequest, UserAccessAnswer](accessRequest) } -class TracingStoreAccessTokenService @Inject()(val remoteWebKnossosClient: TSRemoteWebKnossosClient) +class TracingStoreAccessTokenService @Inject()(val remoteWebknossosClient: TSRemoteWebknossosClient) extends AccessTokenService diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala index 5886c45b7bf..cd6fb91fc9d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TracingStoreModule.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.tracingstore import org.apache.pekko.actor.ActorSystem import com.google.inject.AbstractModule import com.google.inject.name.Names -import com.scalableminds.webknossos.datastore.services.AdHocMeshingServiceHolder +import com.scalableminds.webknossos.datastore.services.AdHocMeshServiceHolder import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.TracingDataStore import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -20,10 +20,10 @@ class TracingStoreModule extends AbstractModule { bind(classOf[SkeletonTracingService]).asEagerSingleton() bind(classOf[VolumeTracingService]).asEagerSingleton() bind(classOf[TracingStoreAccessTokenService]).asEagerSingleton() - bind(classOf[TSRemoteWebKnossosClient]).asEagerSingleton() + bind(classOf[TSRemoteWebknossosClient]).asEagerSingleton() bind(classOf[TSRemoteDatastoreClient]).asEagerSingleton() bind(classOf[EditableMappingService]).asEagerSingleton() bind(classOf[TSSlackNotificationService]).asEagerSingleton() - bind(classOf[AdHocMeshingServiceHolder]).asEagerSingleton() + bind(classOf[AdHocMeshServiceHolder]).asEagerSingleton() } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala index 9688bb73674..f56b6d0d26e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/SkeletonTracingController.scala @@ -8,7 +8,7 @@ import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.skeleton._ import com.scalableminds.webknossos.tracingstore.tracings.volume.MergedVolumeStats -import com.scalableminds.webknossos.tracingstore.{TSRemoteWebKnossosClient, TracingStoreAccessTokenService} +import com.scalableminds.webknossos.tracingstore.{TSRemoteWebknossosClient, TracingStoreAccessTokenService} import net.liftweb.common.Empty import play.api.i18n.Messages import play.api.libs.json.Json @@ -17,7 +17,7 @@ import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scala.concurrent.ExecutionContext class SkeletonTracingController @Inject()(val tracingService: SkeletonTracingService, - val remoteWebKnossosClient: TSRemoteWebKnossosClient, + val remoteWebknossosClient: TSRemoteWebknossosClient, val accessTokenService: TracingStoreAccessTokenService, val slackNotificationService: TSSlackNotificationService)( implicit val ec: ExecutionContext, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala index 94029ddd710..67e2de89ef8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/TracingController.scala @@ -13,7 +13,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.{ UpdateActionGroup } import com.scalableminds.webknossos.tracingstore.{ - TSRemoteWebKnossosClient, + TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingUpdatesReport } @@ -30,7 +30,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C def tracingService: TracingService[T] - def remoteWebKnossosClient: TSRemoteWebKnossosClient + def remoteWebknossosClient: TSRemoteWebknossosClient def accessTokenService: TracingStoreAccessTokenService @@ -223,7 +223,7 @@ trait TracingController[T <: GeneratedMessage, Ts <: GeneratedMessage] extends C viewChangesCount = updateGroups.map(_.viewChangesCount).sum, userToken ) - remoteWebKnossosClient.reportTracingUpdates(report).flatMap { _ => + remoteWebknossosClient.reportTracingUpdates(report).flatMap { _ => updateGroups.foldLeft(currentCommittedVersion) { (previousVersion, updateGroup) => previousVersion.flatMap { prevVersion: Long => if (prevVersion + 1 == updateGroup.version) { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index c5fec1a0af9..44c35d09ab7 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -12,11 +12,15 @@ import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} import com.scalableminds.webknossos.datastore.models.{ AdditionalCoordinate, - WebKnossosDataRequest, + WebknossosDataRequest, WebknossosAdHocMeshRequest } import com.scalableminds.webknossos.datastore.rpc.RPC -import com.scalableminds.webknossos.datastore.services.{EditableMappingSegmentListResult, UserAccessRequest} +import com.scalableminds.webknossos.datastore.services.{ + EditableMappingSegmentListResult, + FullMeshRequest, + UserAccessRequest +} import com.scalableminds.webknossos.tracingstore.slacknotification.TSSlackNotificationService import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{ EditableMappingService, @@ -28,6 +32,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ MergedVolumeStats, ResolutionRestrictions, SegmentStatisticsParameters, + TSFullMeshService, UpdateMappingNameAction, VolumeDataZipFormat, VolumeSegmentIndexService, @@ -37,7 +42,7 @@ import com.scalableminds.webknossos.tracingstore.tracings.volume.{ import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, UpdateActionGroup} import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, - TSRemoteWebKnossosClient, + TSRemoteWebknossosClient, TracingStoreAccessTokenService, TracingStoreConfig } @@ -68,9 +73,10 @@ class VolumeTracingController @Inject()( val accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, val slackNotificationService: TSSlackNotificationService, - val remoteWebKnossosClient: TSRemoteWebKnossosClient, + val remoteWebknossosClient: TSRemoteWebknossosClient, volumeSegmentStatisticsService: VolumeSegmentStatisticsService, volumeSegmentIndexService: VolumeSegmentIndexService, + fullMeshService: TSFullMeshService, val rpc: RPC)(implicit val ec: ExecutionContext, val bodyParsers: PlayBodyParsers) extends TracingController[VolumeTracing, VolumeTracings] with ProtoGeometryImplicits @@ -162,8 +168,8 @@ class VolumeTracingController @Inject()( } } - def data(token: Option[String], tracingId: String): Action[List[WebKnossosDataRequest]] = - Action.async(validateJson[List[WebKnossosDataRequest]]) { implicit request => + def data(token: Option[String], tracingId: String): Action[List[WebknossosDataRequest]] = + Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { @@ -302,6 +308,15 @@ class VolumeTracingController @Inject()( } } + def loadFullMeshStl(token: Option[String], tracingId: String): Action[FullMeshRequest] = + Action.async(validateJson[FullMeshRequest]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { + for { + data: Array[Byte] <- fullMeshService.loadFor(token: Option[String], tracingId, request.body) ?~> "mesh.file.loadChunk.failed" + } yield Ok(data) + } + } + private def getNeighborIndices(neighbors: List[Int]) = List("NEIGHBORS" -> formatNeighborList(neighbors), "Access-Control-Expose-Headers" -> "NEIGHBORS") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 5334147012c..9cd0fe86fa8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -10,14 +10,14 @@ import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrCoordinatesP import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffGroupHeader, NgffMetadata, ZarrHeader} import com.scalableminds.webknossos.datastore.datareaders.{ArrayOrder, AxisOrder} import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.WebKnossosDataRequest +import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, ElementClass} import com.scalableminds.webknossos.datastore.services.UserAccessRequest import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeTracingService import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, - TSRemoteWebKnossosClient, + TSRemoteWebknossosClient, TracingStoreAccessTokenService } import play.api.i18n.Messages @@ -31,7 +31,7 @@ class VolumeTracingZarrStreamingController @Inject()( accessTokenService: TracingStoreAccessTokenService, editableMappingService: EditableMappingService, remoteDataStoreClient: TSRemoteDatastoreClient, - remoteWebKnossosClient: TSRemoteWebKnossosClient)(implicit ec: ExecutionContext) + remoteWebknossosClient: TSRemoteWebknossosClient)(implicit ec: ExecutionContext) extends ExtendedController with ProtoGeometryImplicits with FoxImplicits { @@ -152,7 +152,7 @@ class VolumeTracingZarrStreamingController @Inject()( tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") ~> NOT_FOUND existingMags = tracing.resolutions.map(vec3IntFromProto) - dataSource <- remoteWebKnossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND + dataSource <- remoteWebknossosClient.getDataSourceForTracing(tracingId) ~> NOT_FOUND omeNgffHeader = NgffMetadata.fromNameScaleAndMags(tracingId, dataSourceScale = dataSource.scale, mags = existingMags.toList) @@ -193,7 +193,7 @@ class VolumeTracingZarrStreamingController @Inject()( (c, x, y, z) <- ZarrCoordinatesParser.parseDotCoordinates(cxyz) ?~> Messages("zarr.invalidChunkCoordinates") ~> NOT_FOUND _ <- bool2Fox(c == 0) ~> Messages("zarr.invalidFirstChunkCoord") ~> NOT_FOUND cubeSize = DataLayer.bucketLength - wkRequest = WebKnossosDataRequest( + wkRequest = WebknossosDataRequest( position = Vec3Int(x, y, z) * cubeSize * magParsed, mag = magParsed, cubeSize = cubeSize, @@ -229,7 +229,7 @@ class VolumeTracingZarrStreamingController @Inject()( if (missingBucketIndices.nonEmpty) { for { remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) ?~> "No data at coordinates, no fallback layer defined" - request = WebKnossosDataRequest( + request = WebknossosDataRequest( position = position * mag * cubeSize, mag = mag, cubeSize = cubeSize, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala similarity index 79% rename from webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/RemoteFallbackLayer.scala rename to webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index 558687d0ce4..655967a9c05 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -1,12 +1,13 @@ -package com.scalableminds.webknossos.tracingstore.tracings.editablemapping +package com.scalableminds.webknossos.tracingstore.tracings import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.option2Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto -import com.scalableminds.webknossos.datastore.models.WebKnossosDataRequest -import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebKnossosClient} +import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.FallbackDataKey +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import scala.concurrent.ExecutionContext @@ -17,7 +18,7 @@ case class RemoteFallbackLayer(organizationName: String, trait FallbackDataHelper { def remoteDatastoreClient: TSRemoteDatastoreClient - def remoteWebKnossosClient: TSRemoteWebKnossosClient + def remoteWebknossosClient: TSRemoteWebknossosClient private lazy val fallbackDataCache: AlfuCache[FallbackDataKey, (Array[Byte], List[Int])] = AlfuCache(maxCapacity = 3000) @@ -26,12 +27,12 @@ trait FallbackDataHelper { implicit ec: ExecutionContext): Fox[RemoteFallbackLayer] = for { layerName <- tracing.fallbackLayer.toFox ?~> "This feature is only defined on volume annotations with fallback segmentation layer." - dataSetId <- remoteWebKnossosClient.getDataSourceIdForTracing(tracingId) + dataSetId <- remoteWebknossosClient.getDataSourceIdForTracing(tracingId) } yield RemoteFallbackLayer(dataSetId.team, dataSetId.name, layerName, tracing.elementClass) def getFallbackDataFromDatastore( remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebKnossosDataRequest], + dataRequests: List[WebknossosDataRequest], userToken: Option[String])(implicit ec: ExecutionContext): Fox[(Array[Byte], List[Int])] = fallbackDataCache.getOrLoad(FallbackDataKey(remoteFallbackLayer, dataRequests, userToken), k => remoteDatastoreClient.getData(k.remoteFallbackLayer, k.dataRequests, k.userToken)) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index a940dd1ea70..aaf3c7f804d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.dataformats.BucketProvider import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits -import com.scalableminds.webknossos.datastore.models.{BucketPosition, WebKnossosDataRequest} +import com.scalableminds.webknossos.datastore.models.{BucketPosition, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{ AdditionalAxis, @@ -41,7 +41,7 @@ class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketP requestedVersion = None, remoteFallbackLayer = remoteFallbackLayer, userToken = layer.token) - dataRequest: WebKnossosDataRequest = WebKnossosDataRequest( + dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, cubeSize = layer.lengthOfUnderlyingCubes(bucket.mag), diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 38a197bd0c8..c45efebf199 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -16,17 +16,19 @@ import com.scalableminds.webknossos.datastore.models.DataRequestCollection.DataR import com.scalableminds.webknossos.datastore.models._ import com.scalableminds.webknossos.datastore.models.requests.DataServiceDataRequest import com.scalableminds.webknossos.datastore.services.{ - BinaryDataService, AdHocMeshRequest, AdHocMeshService, - AdHocMeshingServiceHolder + AdHocMeshServiceHolder, + BinaryDataService } import com.scalableminds.webknossos.tracingstore.tracings.{ + FallbackDataHelper, KeyValueStoreImplicits, + RemoteFallbackLayer, TracingDataStore, VersionedKeyValuePair } -import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebKnossosClient} +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Box, Empty, Failure, Full} import net.liftweb.common.Box.tryo @@ -43,7 +45,7 @@ import scala.jdk.CollectionConverters.CollectionHasAsScala case class FallbackDataKey( remoteFallbackLayer: RemoteFallbackLayer, - dataRequests: List[WebKnossosDataRequest], + dataRequests: List[WebknossosDataRequest], userToken: Option[String] ) @@ -87,9 +89,9 @@ object NodeWithPosition { class EditableMappingService @Inject()( val tracingDataStore: TracingDataStore, - val adHocMeshingServiceHolder: AdHocMeshingServiceHolder, + val adHocMeshServiceHolder: AdHocMeshServiceHolder, val remoteDatastoreClient: TSRemoteDatastoreClient, - val remoteWebKnossosClient: TSRemoteWebKnossosClient + val remoteWebknossosClient: TSRemoteWebknossosClient )(implicit ec: ExecutionContext) extends KeyValueStoreImplicits with FallbackDataHelper @@ -102,8 +104,8 @@ class EditableMappingService @Inject()( private def generateId: String = UUID.randomUUID.toString val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None, None, None) - adHocMeshingServiceHolder.tracingStoreAdHocMeshingConfig = (binaryDataService, 30 seconds, 1) - private val adHocMeshingService: AdHocMeshService = adHocMeshingServiceHolder.tracingStoreAdHocMeshingService + adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) + private val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService private lazy val materializedInfoCache: AlfuCache[(String, Long), EditableMappingInfo] = AlfuCache(maxCapacity = 100) @@ -531,13 +533,12 @@ class EditableMappingService @Inject()( dataLayer = segmentationLayer, cuboid = request.cuboid(segmentationLayer), segmentId = request.segmentId, - subsamplingStrides = request.subsamplingStrides, scale = request.scale, mapping = None, mappingType = None, findNeighbors = request.findNeighbors ) - result <- adHocMeshingService.requestAdHocMeshViaActor(adHocMeshRequest) + result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result def agglomerateGraphKey(mappingId: String, agglomerateId: Long): String = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 4315ea3d4e1..8f51e70c2e1 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -9,7 +9,11 @@ import com.scalableminds.webknossos.datastore.SegmentToAgglomerateProto.{ SegmentToAgglomerateProto } import com.scalableminds.webknossos.tracingstore.TSRemoteDatastoreClient -import com.scalableminds.webknossos.tracingstore.tracings.{KeyValueStoreImplicits, TracingDataStore} +import com.scalableminds.webknossos.tracingstore.tracings.{ + KeyValueStoreImplicits, + RemoteFallbackLayer, + TracingDataStore +} import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.{Empty, Failure, Full} import net.liftweb.common.Box.tryo diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala new file mode 100644 index 00000000000..89fea311db5 --- /dev/null +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -0,0 +1,165 @@ +package com.scalableminds.webknossos.tracingstore.tracings.volume + +import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.Fox.{bool2Fox, option2Fox} +import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing +import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto +import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits +import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} +import com.scalableminds.webknossos.datastore.models.{BucketPosition, VoxelPosition, WebknossosAdHocMeshRequest} +import com.scalableminds.webknossos.datastore.services.{FullMeshHelper, FullMeshRequest} +import com.scalableminds.webknossos.tracingstore.tracings.FallbackDataHelper +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService +import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} +import com.typesafe.scalalogging.LazyLogging + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, + editableMappingService: EditableMappingService, + volumeSegmentIndexService: VolumeSegmentIndexService, + val remoteDatastoreClient: TSRemoteDatastoreClient, + val remoteWebknossosClient: TSRemoteWebknossosClient) + extends FallbackDataHelper + with ProtoGeometryImplicits + with FullMeshHelper + with LazyLogging { + + def loadFor(token: Option[String], tracingId: String, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + tracing <- volumeTracingService.find(tracingId) ?~> "tracing.notFound" + data <- if (fullMeshRequest.meshFileName.isDefined) + loadFullMeshFromMeshfile(token, tracing, tracingId, fullMeshRequest) + else loadFullMeshFromAdHoc(token, tracing, tracingId, fullMeshRequest) + } yield data + + private def loadFullMeshFromMeshfile( + token: Option[String], + tracing: VolumeTracing, + tracingId: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + fullMeshRequestAdapted = if (tracing.mappingIsEditable.getOrElse(false)) + fullMeshRequest.copy(mappingName = tracing.mappingName, + editableMappingTracingId = Some(tracingId), + mappingType = Some("HDF5")) + else fullMeshRequest + array <- remoteDatastoreClient.loadFullMeshStl(token, remoteFallbackLayer, fullMeshRequestAdapted) + } yield array + + private def loadFullMeshFromAdHoc(token: Option[String], + tracing: VolumeTracing, + tracingId: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + for { + mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" + _ <- bool2Fox(tracing.resolutions.contains(vec3IntToProto(mag))) ?~> "mag.notPresentInTracing" + before = Instant.now + voxelSize <- remoteDatastoreClient.voxelSizeForTracingWithCache(tracingId, token) ?~> "voxelSize.failedToFetch" + verticesForChunks <- if (tracing.hasSegmentIndex.getOrElse(false)) + getAllAdHocChunksWithSegmentIndex(token, tracing, tracingId, mag, voxelSize, fullMeshRequest) + else + getAllAdHocChunksWithNeighborLogic(token, + tracing, + tracingId, + mag, + voxelSize, + fullMeshRequest, + fullMeshRequest.seedPosition.map(sp => VoxelPosition(sp.x, sp.y, sp.z, mag)), + adHocChunkSize) + encoded = verticesForChunks.map(adHocMeshToStl) + array = combineEncodedChunksToStl(encoded) + _ = logMeshingDuration(before, "ad-hoc meshing (tracingstore)", array.length) + } yield array + + private def getAllAdHocChunksWithSegmentIndex( + token: Option[String], + tracing: VolumeTracing, + tracingId: String, + mag: Vec3Int, + voxelSize: Vec3Double, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + for { + bucketPositionsRaw: ListOfVec3IntProto <- volumeSegmentIndexService + .getSegmentToBucketIndexWithEmptyFallbackWithoutBuffer(tracingId, + fullMeshRequest.segmentId, + mag, + fullMeshRequest.additionalCoordinates, + AdditionalAxis.fromProtosAsOpt(tracing.additionalAxes)) + bucketPositions = bucketPositionsRaw.values + .map(vec3IntFromProto) + .map(_ * mag * DataLayer.bucketLength) + .map(bp => BucketPosition(bp.x, bp.y, bp.z, mag, fullMeshRequest.additionalCoordinates)) + .toList + vertexChunksWithNeighbors: List[(Array[Float], List[Int])] <- Fox.serialCombined(bucketPositions) { + bucketPosition => + val adHocMeshRequest = WebknossosAdHocMeshRequest( + position = Vec3Int(bucketPosition.voxelMag1X, bucketPosition.voxelMag1Y, bucketPosition.voxelMag1Z), + mag = mag, + cubeSize = Vec3Int.full(DataLayer.bucketLength + 1), + fullMeshRequest.segmentId, + voxelSize, + fullMeshRequest.mappingName, + fullMeshRequest.mappingType, + fullMeshRequest.additionalCoordinates, + findNeighbors = false + ) + loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + } + allVertices = vertexChunksWithNeighbors.map(_._1) + } yield allVertices + + private def getAllAdHocChunksWithNeighborLogic(token: Option[String], + tracing: VolumeTracing, + tracingId: String, + mag: Vec3Int, + voxelSize: Vec3Double, + fullMeshRequest: FullMeshRequest, + topLeftOpt: Option[VoxelPosition], + chunkSize: Vec3Int, + visited: collection.mutable.Set[VoxelPosition] = + collection.mutable.Set[VoxelPosition]())( + implicit ec: ExecutionContext): Fox[List[Array[Float]]] = + for { + topLeft <- topLeftOpt.toFox ?~> "seedPosition.neededForAdHoc" + adHocMeshRequest = WebknossosAdHocMeshRequest( + position = Vec3Int(topLeft.mag1X, topLeft.mag1Y, topLeft.mag1Z), + mag = mag, + cubeSize = Vec3Int(chunkSize.x + 1, chunkSize.y + 1, chunkSize.z + 1), + fullMeshRequest.segmentId, + voxelSize, + fullMeshRequest.mappingName, + fullMeshRequest.mappingType, + fullMeshRequest.additionalCoordinates + ) + _ = visited += topLeft + (vertices: Array[Float], neighbors) <- loadMeshChunkFromAdHoc(token, tracing, adHocMeshRequest, tracingId) + nextPositions: List[VoxelPosition] = generateNextTopLeftsFromNeighbors(topLeft, neighbors, chunkSize, visited) + _ = visited ++= nextPositions + neighborVerticesNested <- Fox.serialCombined(nextPositions) { position: VoxelPosition => + getAllAdHocChunksWithNeighborLogic(token, + tracing, + tracingId, + mag, + voxelSize, + fullMeshRequest, + Some(position), + chunkSize, + visited) + } + allVertices: List[Array[Float]] = vertices +: neighborVerticesNested.flatten + } yield allVertices + + private def loadMeshChunkFromAdHoc(token: Option[String], + tracing: VolumeTracing, + adHocMeshRequest: WebknossosAdHocMeshRequest, + tracingId: String): Fox[(Array[Float], List[Int])] = + if (tracing.mappingIsEditable.getOrElse(false)) + editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) + else volumeTracingService.createAdHocMesh(tracingId, adHocMeshRequest, token) +} diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index cf84d19d144..a997581326c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.models.{ AdditionalCoordinate, UnsignedInteger, UnsignedIntegerArray, - WebKnossosDataRequest + WebknossosDataRequest } import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService @@ -180,7 +180,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci additionalCoordinates: Option[Seq[AdditionalCoordinate]], userToken: Option[String]): Fox[Array[Byte]] = { val dataRequests = bucketPositions.map { position => - WebKnossosDataRequest( + WebknossosDataRequest( position = position * mag * DataLayer.bucketLength, mag = mag, cubeSize = DataLayer.bucketLength, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala index 5842c279ea3..97d64b826db 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingBucketHelper.scala @@ -4,7 +4,7 @@ import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.wkw.{MortonEncoding, WKWDataFormatHelper} import com.scalableminds.webknossos.datastore.models.datasource.{AdditionalAxis, DataLayer, ElementClass} -import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, WebKnossosDataRequest} +import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition, WebknossosDataRequest} import com.scalableminds.webknossos.datastore.services.DataConverter import com.scalableminds.webknossos.tracingstore.tracings._ import com.typesafe.scalalogging.LazyLogging @@ -215,7 +215,7 @@ trait VolumeTracingBucketHelper } private def loadFallbackBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition): Fox[Array[Byte]] = { - val dataRequest: WebKnossosDataRequest = WebKnossosDataRequest( + val dataRequest: WebknossosDataRequest = WebknossosDataRequest( position = Vec3Int(bucket.topLeft.mag1X, bucket.topLeft.mag1Y, bucket.topLeft.mag1Z), mag = bucket.mag, cubeSize = dataLayer.lengthOfUnderlyingCubes(bucket.mag), diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala index 9d2ddf1c5ab..f82518d2dec 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingDownsampling.scala @@ -5,7 +5,7 @@ import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.{BucketPosition, UnsignedIntegerArray} import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceLike, ElementClass} import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto -import com.scalableminds.webknossos.tracingstore.TSRemoteWebKnossosClient +import com.scalableminds.webknossos.tracingstore.TSRemoteWebknossosClient import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.tracingstore.tracings.{ FossilDBClient, @@ -55,7 +55,7 @@ trait VolumeTracingDownsampling with FoxImplicits { val tracingDataStore: TracingDataStore - val tracingStoreWkRpcClient: TSRemoteWebKnossosClient + val tracingStoreWkRpcClient: TSRemoteWebknossosClient protected def saveBucket(dataLayer: VolumeTracingLayer, bucket: BucketPosition, data: Array[Byte], diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 482672cc7f9..dde3df8618f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -23,11 +23,11 @@ import com.scalableminds.webknossos.datastore.models.{ import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.tracingstore.tracings.TracingType.TracingType import com.scalableminds.webknossos.tracingstore.tracings._ -import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.{EditableMappingService, FallbackDataHelper} +import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.EditableMappingService import com.scalableminds.webknossos.tracingstore.tracings.volume.VolumeDataZipFormat.VolumeDataZipFormat import com.scalableminds.webknossos.tracingstore.{ TSRemoteDatastoreClient, - TSRemoteWebKnossosClient, + TSRemoteWebknossosClient, TracingStoreRedisStore } import com.typesafe.scalalogging.LazyLogging @@ -45,8 +45,8 @@ import scala.concurrent.duration._ class VolumeTracingService @Inject()( val tracingDataStore: TracingDataStore, - val tracingStoreWkRpcClient: TSRemoteWebKnossosClient, - val adHocMeshingServiceHolder: AdHocMeshingServiceHolder, + val tracingStoreWkRpcClient: TSRemoteWebknossosClient, + val adHocMeshServiceHolder: AdHocMeshServiceHolder, implicit val temporaryTracingStore: TemporaryTracingStore[VolumeTracing], implicit val temporaryVolumeDataStore: TemporaryVolumeDataStore, implicit val ec: ExecutionContext, @@ -55,7 +55,7 @@ class VolumeTracingService @Inject()( editableMappingService: EditableMappingService, val temporaryTracingIdStore: TracingStoreRedisStore, val remoteDatastoreClient: TSRemoteDatastoreClient, - val remoteWebKnossosClient: TSRemoteWebKnossosClient, + val remoteWebknossosClient: TSRemoteWebknossosClient, val temporaryFileCreator: TemporaryFileCreator, val tracingMigrationService: VolumeTracingMigrationService, volumeSegmentIndexService: VolumeSegmentIndexService @@ -87,8 +87,8 @@ class VolumeTracingService @Inject()( actually load anything from disk, unlike its “normal” instance in the datastore (only from the volume tracing store) */ val binaryDataService = new BinaryDataService(Paths.get(""), 100, None, None, None, None, None) - adHocMeshingServiceHolder.tracingStoreAdHocMeshingConfig = (binaryDataService, 30 seconds, 1) - val adHocMeshingService: AdHocMeshService = adHocMeshingServiceHolder.tracingStoreAdHocMeshingService + adHocMeshServiceHolder.tracingStoreAdHocMeshConfig = (binaryDataService, 30 seconds, 1) + val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.tracingStoreAdHocMeshService override def currentVersion(tracingId: String): Fox[Long] = tracingDataStore.volumes.getVersion(tracingId, mayBeEmpty = Some(true), emptyFallback = Some(0L)) @@ -571,14 +571,13 @@ class VolumeTracingService @Inject()( segmentationLayer, request.cuboid(segmentationLayer), request.segmentId, - request.subsamplingStrides, request.scale, None, None, request.additionalCoordinates, request.findNeighbors ) - result <- adHocMeshingService.requestAdHocMeshViaActor(adHocMeshRequest) + result <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) } yield result def findData(tracingId: String): Fox[Option[Vec3Int]] = diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index 11f25a2b0d3..cecd0e25a7c 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -17,6 +17,7 @@ POST /volume/:tracingId/data @com.scalablemin POST /volume/:tracingId/duplicate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.duplicate(token: Option[String], tracingId: String, fromTask: Option[Boolean], minResolution: Option[Int], maxResolution: Option[Int], downsample: Option[Boolean], editPosition: Option[String], editRotation: Option[String], boundingBox: Option[String]) GET /volume/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.updateActionLog(token: Option[String], tracingId: String, newestVersion: Option[Long], oldestVersion: Option[Long]) POST /volume/:tracingId/adHocMesh @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.requestAdHocMesh(token: Option[String], tracingId: String) +POST /volume/:tracingId/fullMesh.stl @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.loadFullMeshStl(token: Option[String], tracingId: String) POST /volume/:tracingId/segmentIndex/:segmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.getSegmentIndex(token: Option[String], tracingId: String, segmentId: Long) POST /volume/:tracingId/importVolumeData @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.importVolumeData(token: Option[String], tracingId: String) POST /volume/:tracingId/addSegmentIndex @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.addSegmentIndex(token: Option[String], tracingId: String, dryRun: Boolean)