---
docs/datasets.md | 2 +-
.../admin/dataset/composition_wizard/02_upload_files.tsx | 2 +-
.../composition_wizard/04_configure_new_dataset.tsx | 8 ++++----
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/docs/datasets.md b/docs/datasets.md
index 17271a938f1..03bc468fa18 100644
--- a/docs/datasets.md
+++ b/docs/datasets.md
@@ -128,7 +128,7 @@ For manual conversion, we provide the following software tools and libraries:
### Composing Datasets
New datasets can also be composed from existing ones.
-This feature allows to combine layers of already added datasets together to create a new dataset.
+This feature allows to combine layers from previously added datasets to create a new dataset.
During compositions, transforms can optionally be defined in case the datasets are not in the same coordinate system.
There are three different ways to compose a new dataset:
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index cf388c322ef..1e4b67cd5d2 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -71,7 +71,7 @@ export default function UploadFiles({ wizardContext, setWizardContext }: WizardC
fileList={fileList}
onChange={handleChange}
beforeUpload={() => false}
- maxCount={2}
+ maxCount={wizardContext.composeMode === "BIG_WARP" ? 1 : 2}
multiple
>
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
index 8572e4bfa66..0f204cecb0a 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
@@ -85,7 +85,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) {
const handleSubmit = async () => {
if (activeUser == null) {
- throw new Error("Cannot upload dataset without being logged in.");
+ throw new Error("Cannot create dataset without being logged in.");
}
const layers = form.getFieldValue(["layers"]);
@@ -126,7 +126,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) {
setSelectedTeams(selectedTeams)}
+ setSelectedTeams={setSelectedTeams}
formRef={formRef}
/>
@@ -135,7 +135,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) {
Back
- Upload
+ Create Dataset
From 3ea914fbc486c9b4db41e86fcf66e39c76eebf83 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 15:54:46 +0100
Subject: [PATCH 33/46] use title case
---
frontend/javascripts/admin/dataset/dataset_add_view.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx
index 490915b9f84..142d11d6c0c 100644
--- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx
+++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx
@@ -152,7 +152,7 @@ function DatasetAddView({ history }: RouteComponentProps) {
label: (
- Compose from existing datasets
+ Compose From Existing Datasets
),
key: DatasetAddType.COMPOSE,
From f9d447983e8dc1d85035984b03ff26a110d5a992 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 15:58:06 +0100
Subject: [PATCH 34/46] send potential error to airbrake
---
.../admin/dataset/composition_wizard/02_upload_files.tsx | 2 ++
1 file changed, 2 insertions(+)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index cf388c322ef..a210dcb1a57 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -10,6 +10,7 @@ import { Vector3 } from "oxalis/constants";
import { parseNml } from "oxalis/model/helpers/nml_helpers";
import React from "react";
import { tryToFetchDatasetsByName, WizardComponentProps, WizardContext, FileList } from "./common";
+import ErrorHandling from "libs/error_handling";
const EXPECTED_VALUE_COUNT_PER_CSV_LINE = 8;
@@ -49,6 +50,7 @@ export default function UploadFiles({ wizardContext, setWizardContext }: WizardC
Toast.error(
"An error occurred while importing the uploaded files. See the Browser's console for more details.",
);
+ ErrorHandling.notify(exception as Error);
console.error(exception);
}
}
From e41f453c5ac2c2f7468cc6e7896ce2c244392e06 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 15:58:18 +0100
Subject: [PATCH 35/46] use activeUser.organization instead of hardcoded string
---
.../dataset/composition_wizard/04_configure_new_dataset.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
index 8572e4bfa66..4e181c8cf62 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx
@@ -102,7 +102,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) {
await createDatasetComposition(datastoreToUse.url, {
newDatasetName,
targetFolderId: form.getFieldValue(["targetFolderId"]),
- organizationName: "sample_organization",
+ organizationName: activeUser.organization,
scale: linkedDatasets[1].dataSource.scale,
layers,
});
From d75877d9ec77e8591d3ca994fa957f98e34b07c2 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 17:26:09 +0100
Subject: [PATCH 36/46] explain expected structure of NML/CSV and which dataset
is transformed
---
.../composition_wizard/02_upload_files.tsx | 19 ++++++++++++++-----
1 file changed, 14 insertions(+), 5 deletions(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index a210dcb1a57..d03fe98fdab 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -58,11 +58,20 @@ export default function UploadFiles({ wizardContext, setWizardContext }: WizardC
return (
-
- {wizardContext.composeMode === "BIG_WARP"
- ? "Please upload one CSV file that was exported by BigWarp."
- : "Please upload two NML files that contain landmarks that you created with WEBKNOSSOS."}
-
+ {wizardContext.composeMode === "BIG_WARP" ? (
+
+ Please upload one CSV file that was exported by BigWarp. Note that the first dataset
+ referenced by the CSV file will be transformed to the second referenced dataset.
+
+ ) : (
+
+ Please upload two NML files that contain landmarks that you created with WEBKNOSSOS. Note
+ that the dataset that belongs to the first NML will be transformed to the dataset that
+ belongs to the second NML file. The skeletons in the NML files should have exactly one
+ node per tree.
+
+ )}
+
Landmark files ({wizardContext.composeMode === "BIG_WARP" ? "1 CSV file" : "2 NML files"}
From ee7e15bbb96c0b3c21231982dac17b7d77a2fdb2 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 17:38:35 +0100
Subject: [PATCH 37/46] clear select suggestions to avoid confusion
---
frontend/javascripts/components/async_select.tsx | 6 ++++++
.../dashboard/dataset/dataset_selection_component.tsx | 2 +-
2 files changed, 7 insertions(+), 1 deletion(-)
diff --git a/frontend/javascripts/components/async_select.tsx b/frontend/javascripts/components/async_select.tsx
index d5b4a5b80ab..f9e4db68d7b 100644
--- a/frontend/javascripts/components/async_select.tsx
+++ b/frontend/javascripts/components/async_select.tsx
@@ -50,6 +50,12 @@ export default function AsyncSelect<
notFoundContent={fetching ? : null}
{...props}
options={options}
+ // Clear suggestions after the user selected one to avoid confusion.
+ // Otherwise, the user could click into the select field and the old
+ // suggestions would be shown (from the typed string that is now gone).
+ // The user might think that these are all available entries. However,
+ // inputting a new string will show new suggestions.
+ onSelect={() => setOptions([])}
/>
);
}
diff --git a/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx b/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx
index c8ed74574dc..1c3d6709c7c 100644
--- a/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx
+++ b/frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx
@@ -28,7 +28,7 @@ export default function DatasetSelectionComponent({
{
setDatasetValues(newValue as DatasetSelectionValue[]);
From cc5550c7182d508c48811547a5f01cfc697674d7 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 3 Jan 2024 17:40:01 +0100
Subject: [PATCH 38/46] explain how trees are matched
---
.../admin/dataset/composition_wizard/02_upload_files.tsx | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index d03fe98fdab..9d5adcf277d 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -68,7 +68,8 @@ export default function UploadFiles({ wizardContext, setWizardContext }: WizardC
Please upload two NML files that contain landmarks that you created with WEBKNOSSOS. Note
that the dataset that belongs to the first NML will be transformed to the dataset that
belongs to the second NML file. The skeletons in the NML files should have exactly one
- node per tree.
+ node per tree. The n-th tree of the first NML is aligned with the n-th tree of the second
+ NML.
)}
From 80eced85c52f1c0a2b6ff004e26effa6584177eb Mon Sep 17 00:00:00 2001
From: frcroth
Date: Mon, 8 Jan 2024 09:51:51 +0100
Subject: [PATCH 39/46] Make symlink trait a service
---
.../datastore/services/ComposeService.scala | 25 +++++++++++--------
.../datastore/services/UploadService.scala | 10 +++++---
2 files changed, 20 insertions(+), 15 deletions(-)
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
index 783c11cfdc3..dc4f0467d93 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
@@ -31,21 +31,21 @@ case class ComposeRequest(
targetFolderId: String,
organizationName: String,
scale: Vec3Double,
- layers: Seq[ComposeLayer]
+ layers: Seq[ComposeRequestLayer]
)
object ComposeRequest {
implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest]
}
-case class ComposeLayer(
+case class ComposeRequestLayer(
datasetId: DataLayerId,
sourceName: String,
newName: String,
transformations: Seq[CoordinateTransformation]
)
-object ComposeLayer {
- implicit val composeLayerFormat: OFormat[ComposeLayer] = Json.format[ComposeLayer]
+object ComposeRequestLayer {
+ implicit val composeLayerFormat: OFormat[ComposeRequestLayer] = Json.format[ComposeRequestLayer]
}
case class DataLayerId(name: String, owningOrganization: String)
@@ -54,7 +54,8 @@ object DataLayerId {
implicit val dataLayerIdFormat: OFormat[DataLayerId] = Json.format[DataLayerId]
}
-class SymlinkHelper(dataSourceService: DataSourceService)(implicit ec: ExecutionContext) extends FoxImplicits {
+class DatasetSymlinkService @Inject()(dataSourceService: DataSourceService)(implicit ec: ExecutionContext)
+ extends FoxImplicits {
val dataBaseDir: Path = dataSourceService.dataBaseDir
def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] =
@@ -76,10 +77,11 @@ class SymlinkHelper(dataSourceService: DataSourceService)(implicit ec: Execution
}
class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
- dataSourceService: DataSourceService,
- remoteWebKnossosClient: DSRemoteWebKnossosClient)(implicit ec: ExecutionContext)
- extends SymlinkHelper(dataSourceService)(ec)
- with FoxImplicits {
+ remoteWebKnossosClient: DSRemoteWebKnossosClient,
+ datasetSymlinkService: DatasetSymlinkService)(implicit ec: ExecutionContext)
+ extends FoxImplicits {
+
+ val dataBaseDir: Path = datasetSymlinkService.dataBaseDir
private def uploadDirectory(organizationName: String, name: String): Path =
dataBaseDir.resolve(organizationName).resolve(name)
@@ -104,7 +106,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
_ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties)
} yield ()
- private def getLayerFromComposeLayer(composeLayer: ComposeLayer, uploadDir: Path): Fox[DataLayer] =
+ private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] =
for {
dataSourceId <- Fox.successful(
DataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization))
@@ -121,7 +123,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
composeLayer.sourceName,
Some(composeLayer.newName))
layerIsRemote = isLayerRemote(dataSourceId, composeLayer.sourceName)
- _ <- Fox.runIf(!layerIsRemote)(addSymlinksToOtherDatasetLayers(uploadDir, List(linkedLayerIdentifier)))
+ _ <- Fox.runIf(!layerIsRemote)(
+ datasetSymlinkService.addSymlinksToOtherDatasetLayers(uploadDir, List(linkedLayerIdentifier)))
editedLayer: DataLayer = layer match {
case l: PrecomputedDataLayer =>
l.copy(name = composeLayer.newName,
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala
index e86db156b50..6dad0f2055f 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala
@@ -65,9 +65,9 @@ object CancelUploadInformation {
class UploadService @Inject()(dataSourceRepository: DataSourceRepository,
dataSourceService: DataSourceService,
runningUploadMetadataStore: DataStoreRedisStore,
- exploreLocalLayerService: ExploreLocalLayerService)(implicit ec: ExecutionContext)
- extends SymlinkHelper(dataSourceService)
- with DataSetDeleter
+ exploreLocalLayerService: ExploreLocalLayerService,
+ datasetSymlinkService: DatasetSymlinkService)(implicit ec: ExecutionContext)
+ extends DataSetDeleter
with DirectoryConstants
with FoxImplicits
with LazyLogging {
@@ -96,6 +96,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository,
cleanUpOrphanUploads()
+ override def dataBaseDir: Path = dataSourceService.dataBaseDir
+
def isKnownUploadByFileId(uploadFileId: String): Fox[Boolean] = isKnownUpload(extractDatasetUploadId(uploadFileId))
def isKnownUpload(uploadId: String): Fox[Boolean] =
@@ -237,7 +239,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository,
case UploadedDataSourceType.ZARR_MULTILAYER => tryExploringMultipleZarrLayers(unpackToDir, dataSourceId)
case UploadedDataSourceType.WKW => addLayerAndResolutionDirIfMissing(unpackToDir).toFox
}
- _ <- addSymlinksToOtherDatasetLayers(unpackToDir, layersToLink.getOrElse(List.empty))
+ _ <- datasetSymlinkService.addSymlinksToOtherDatasetLayers(unpackToDir, layersToLink.getOrElse(List.empty))
_ <- addLinkedLayersToDataSourceProperties(unpackToDir, dataSourceId.team, layersToLink.getOrElse(List.empty))
} yield ()
}
From 376628804b4cb2dbda76ed38ce80c9dcc4e1fc5d Mon Sep 17 00:00:00 2001
From: frcroth
Date: Mon, 8 Jan 2024 11:19:24 +0100
Subject: [PATCH 40/46] Remove check inbox
---
.../datastore/controllers/DataSourceController.scala | 5 ++---
.../webknossos/datastore/services/ComposeService.scala | 4 ++--
2 files changed, 4 insertions(+), 5 deletions(-)
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
index 0cf1793959e..bdb45815bfd 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
@@ -145,7 +145,6 @@ class DataSourceController @Inject()(
urlOrHeaderToken(token, request)) {
for {
(dataSourceId, datasetSizeBytes) <- uploadService.finishUpload(request.body)
- _ <- dataSourceService.checkInbox(false)
_ <- remoteWebKnossosClient.reportUpload(
dataSourceId,
datasetSizeBytes,
@@ -451,8 +450,8 @@ class DataSourceController @Inject()(
accessTokenService.assertUserAccess(
UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)),
userToken))
- _ <- composeService.composeDataset(request.body, userToken)
- _ <- dataSourceService.checkInbox(verbose = false)
+ dataSource <- composeService.composeDataset(request.body, userToken)
+ _ <- dataSourceRepository.updateDataSource(dataSource)
} yield Ok
}
}
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
index dc4f0467d93..7e58be03349 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
@@ -87,7 +87,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
dataBaseDir.resolve(organizationName).resolve(name)
def composeDataset(composeRequest: ComposeRequest, userToken: Option[String])(
- implicit ec: ExecutionContext): Fox[Unit] =
+ implicit ec: ExecutionContext): Fox[DataSource] =
for {
_ <- Fox.bool2Fox(Files.isWritable(dataBaseDir)) ?~> "Datastore can not write to its data directory."
@@ -104,7 +104,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
dataSource <- createDatasource(composeRequest, composeRequest.organizationName)
properties = Json.toJson(dataSource).toString().getBytes(StandardCharsets.UTF_8)
_ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties)
- } yield ()
+ } yield dataSource
private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] =
for {
From 2f5f948a68d4ab471b1f5e631cca60af49d744cd Mon Sep 17 00:00:00 2001
From: frcroth
Date: Mon, 8 Jan 2024 11:27:13 +0100
Subject: [PATCH 41/46] Reorganize uploading services
---
.../WKRemoteDataStoreController.scala | 7 +---
.../datastore/DataStoreModule.scala | 1 +
.../controllers/DataSourceController.scala | 7 +---
.../services/DSRemoteWebKnossosClient.scala | 1 +
.../{ => uploading}/ComposeService.scala | 39 ++-----------------
.../uploading/DatasetSymlinkService.scala | 31 +++++++++++++++
.../{ => uploading}/UploadService.scala | 9 +++--
7 files changed, 46 insertions(+), 49 deletions(-)
rename webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/{ => uploading}/ComposeService.scala (85%)
create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/DatasetSymlinkService.scala
rename webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/{ => uploading}/UploadService.scala (99%)
diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala
index e08b2f0d0b6..1e8a44676ab 100644
--- a/app/controllers/WKRemoteDataStoreController.scala
+++ b/app/controllers/WKRemoteDataStoreController.scala
@@ -5,11 +5,8 @@ import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.controllers.JobExportProperties
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource}
-import com.scalableminds.webknossos.datastore.services.{
- DataStoreStatus,
- LinkedLayerIdentifier,
- ReserveUploadInformation
-}
+import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation}
+import com.scalableminds.webknossos.datastore.services.DataStoreStatus
import com.typesafe.scalalogging.LazyLogging
import mail.{MailchimpClient, MailchimpTag}
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala
index bca52bafa25..aee6753d473 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala
@@ -4,6 +4,7 @@ import akka.actor.ActorSystem
import com.google.inject.AbstractModule
import com.google.inject.name.Names
import com.scalableminds.webknossos.datastore.services._
+import com.scalableminds.webknossos.datastore.services.uploading.UploadService
import com.scalableminds.webknossos.datastore.storage.DataVaultService
class DataStoreModule extends AbstractModule {
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
index bdb45815bfd..f67dc0f54c1 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
@@ -3,13 +3,10 @@ package com.scalableminds.webknossos.datastore.controllers
import com.google.inject.Inject
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong
-import com.scalableminds.webknossos.datastore.models.datasource.inbox.{
- InboxDataSource,
- InboxDataSourceLike,
- UnusableInboxDataSource
-}
+import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSource, InboxDataSourceLike, UnusableInboxDataSource}
import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceId}
import com.scalableminds.webknossos.datastore.services._
+import com.scalableminds.webknossos.datastore.services.uploading.{CancelUploadInformation, ComposeRequest, ComposeService, ReserveUploadInformation, UploadInformation, UploadService}
import play.api.data.Form
import play.api.data.Forms.{longNumber, nonEmptyText, number, tuple}
import play.api.i18n.Messages
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala
index 4691424e845..cc45c9c60ed 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebKnossosClient.scala
@@ -12,6 +12,7 @@ import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource
import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId
import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike
import com.scalableminds.webknossos.datastore.rpc.RPC
+import com.scalableminds.webknossos.datastore.services.uploading.ReserveUploadInformation
import com.scalableminds.webknossos.datastore.storage.DataVaultCredential
import com.typesafe.scalalogging.LazyLogging
import play.api.inject.ApplicationLifecycle
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
similarity index 85%
rename from webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
rename to webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
index 7e58be03349..e3ab67d8ef9 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
@@ -1,24 +1,15 @@
-package com.scalableminds.webknossos.datastore.services
+package com.scalableminds.webknossos.datastore.services.uploading
import com.scalableminds.util.geometry.Vec3Double
import com.scalableminds.util.io.PathUtils
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5SegmentationLayer}
-import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
- PrecomputedDataLayer,
- PrecomputedSegmentationLayer
-}
+import com.scalableminds.webknossos.datastore.dataformats.precomputed.{PrecomputedDataLayer, PrecomputedSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWDataLayer, WKWSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr3.{Zarr3DataLayer, Zarr3SegmentationLayer}
-import com.scalableminds.webknossos.datastore.models.datasource.{
- CoordinateTransformation,
- DataLayer,
- DataSource,
- DataSourceId,
- GenericDataSource
-}
-import net.liftweb.util.Helpers.tryo
+import com.scalableminds.webknossos.datastore.models.datasource._
+import com.scalableminds.webknossos.datastore.services.{DSRemoteWebKnossosClient, DataSourceRepository}
import play.api.libs.json.{Json, OFormat}
import java.nio.charset.StandardCharsets
@@ -54,28 +45,6 @@ object DataLayerId {
implicit val dataLayerIdFormat: OFormat[DataLayerId] = Json.format[DataLayerId]
}
-class DatasetSymlinkService @Inject()(dataSourceService: DataSourceService)(implicit ec: ExecutionContext)
- extends FoxImplicits {
-
- val dataBaseDir: Path = dataSourceService.dataBaseDir
- def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] =
- Fox
- .serialCombined(layersToLink) { layerToLink =>
- val layerPath = layerToLink.pathIn(dataBaseDir)
- val newLayerPath = dataSetDir.resolve(layerToLink.newLayerName.getOrElse(layerToLink.layerName))
- for {
- _ <- bool2Fox(!Files.exists(newLayerPath)) ?~> s"Cannot symlink layer at $newLayerPath: a layer with this name already exists."
- _ <- bool2Fox(Files.exists(layerPath)) ?~> s"Cannot symlink to layer at $layerPath: The layer does not exist."
- _ <- tryo {
- Files.createSymbolicLink(newLayerPath, newLayerPath.getParent.relativize(layerPath))
- } ?~> s"Failed to create symlink at $newLayerPath."
- } yield ()
- }
- .map { _ =>
- ()
- }
-}
-
class ComposeService @Inject()(dataSourceRepository: DataSourceRepository,
remoteWebKnossosClient: DSRemoteWebKnossosClient,
datasetSymlinkService: DatasetSymlinkService)(implicit ec: ExecutionContext)
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/DatasetSymlinkService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/DatasetSymlinkService.scala
new file mode 100644
index 00000000000..49a2cd5c84c
--- /dev/null
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/DatasetSymlinkService.scala
@@ -0,0 +1,31 @@
+package com.scalableminds.webknossos.datastore.services.uploading
+
+import com.scalableminds.util.tools.{Fox, FoxImplicits}
+import com.scalableminds.webknossos.datastore.services.DataSourceService
+import net.liftweb.util.Helpers.tryo
+
+import java.nio.file.{Files, Path}
+import javax.inject.Inject
+import scala.concurrent.ExecutionContext
+
+class DatasetSymlinkService @Inject()(dataSourceService: DataSourceService)(implicit ec: ExecutionContext)
+ extends FoxImplicits {
+
+ val dataBaseDir: Path = dataSourceService.dataBaseDir
+ def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] =
+ Fox
+ .serialCombined(layersToLink) { layerToLink =>
+ val layerPath = layerToLink.pathIn(dataBaseDir)
+ val newLayerPath = dataSetDir.resolve(layerToLink.newLayerName.getOrElse(layerToLink.layerName))
+ for {
+ _ <- bool2Fox(!Files.exists(newLayerPath)) ?~> s"Cannot symlink layer at $newLayerPath: a layer with this name already exists."
+ _ <- bool2Fox(Files.exists(layerPath)) ?~> s"Cannot symlink to layer at $layerPath: The layer does not exist."
+ _ <- tryo {
+ Files.createSymbolicLink(newLayerPath, newLayerPath.getParent.relativize(layerPath))
+ } ?~> s"Failed to create symlink at $newLayerPath."
+ } yield ()
+ }
+ .map { _ =>
+ ()
+ }
+}
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala
similarity index 99%
rename from webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala
rename to webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala
index 6dad0f2055f..72f75fdc1c0 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala
@@ -1,7 +1,5 @@
-package com.scalableminds.webknossos.datastore.services
+package com.scalableminds.webknossos.datastore.services.uploading
-import java.io.{File, RandomAccessFile}
-import java.nio.file.{Files, Path}
import com.google.inject.Inject
import com.scalableminds.util.io.PathUtils.ensureDirectoryBox
import com.scalableminds.util.io.{PathUtils, ZipIO}
@@ -14,13 +12,16 @@ import com.scalableminds.webknossos.datastore.explore.ExploreLocalLayerService
import com.scalableminds.webknossos.datastore.helpers.{DataSetDeleter, DirectoryConstants}
import com.scalableminds.webknossos.datastore.models.datasource.GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON
import com.scalableminds.webknossos.datastore.models.datasource._
+import com.scalableminds.webknossos.datastore.services.{DataSourceRepository, DataSourceService}
import com.scalableminds.webknossos.datastore.storage.DataStoreRedisStore
import com.typesafe.scalalogging.LazyLogging
-import net.liftweb.common._
import net.liftweb.common.Box.tryo
+import net.liftweb.common._
import org.apache.commons.io.FileUtils
import play.api.libs.json.{Json, OFormat, Reads}
+import java.io.{File, RandomAccessFile}
+import java.nio.file.{Files, Path}
import scala.concurrent.ExecutionContext
case class ReserveUploadInformation(
From 9b6c80f3035e74cb40c0a4f539fdf464c3ea0f59 Mon Sep 17 00:00:00 2001
From: Florian M
Date: Tue, 9 Jan 2024 19:34:03 +0100
Subject: [PATCH 42/46] format
---
.../controllers/DataSourceController.scala | 15 +++++++++++++--
.../services/uploading/ComposeService.scala | 5 ++++-
2 files changed, 17 insertions(+), 3 deletions(-)
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
index f67dc0f54c1..9f1156c80dd 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala
@@ -3,10 +3,21 @@ package com.scalableminds.webknossos.datastore.controllers
import com.google.inject.Inject
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong
-import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSource, InboxDataSourceLike, UnusableInboxDataSource}
+import com.scalableminds.webknossos.datastore.models.datasource.inbox.{
+ InboxDataSource,
+ InboxDataSourceLike,
+ UnusableInboxDataSource
+}
import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, DataSourceId}
import com.scalableminds.webknossos.datastore.services._
-import com.scalableminds.webknossos.datastore.services.uploading.{CancelUploadInformation, ComposeRequest, ComposeService, ReserveUploadInformation, UploadInformation, UploadService}
+import com.scalableminds.webknossos.datastore.services.uploading.{
+ CancelUploadInformation,
+ ComposeRequest,
+ ComposeService,
+ ReserveUploadInformation,
+ UploadInformation,
+ UploadService
+}
import play.api.data.Form
import play.api.data.Forms.{longNumber, nonEmptyText, number, tuple}
import play.api.i18n.Messages
diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
index e3ab67d8ef9..6caa79ce701 100644
--- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
+++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala
@@ -4,7 +4,10 @@ import com.scalableminds.util.geometry.Vec3Double
import com.scalableminds.util.io.PathUtils
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5SegmentationLayer}
-import com.scalableminds.webknossos.datastore.dataformats.precomputed.{PrecomputedDataLayer, PrecomputedSegmentationLayer}
+import com.scalableminds.webknossos.datastore.dataformats.precomputed.{
+ PrecomputedDataLayer,
+ PrecomputedSegmentationLayer
+}
import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWDataLayer, WKWSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrSegmentationLayer}
import com.scalableminds.webknossos.datastore.dataformats.zarr3.{Zarr3DataLayer, Zarr3SegmentationLayer}
From e142d5ff2eece2560ec60518b4330045418f7cad Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 10 Jan 2024 15:27:10 +0100
Subject: [PATCH 43/46] change default tab back to UPLOAD
---
frontend/javascripts/admin/dataset/dataset_add_view.tsx | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx
index 142d11d6c0c..a5105cea949 100644
--- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx
+++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx
@@ -114,8 +114,7 @@ function DatasetAddView({ history }: RouteComponentProps) {
defaultActiveTabFromHash as DatasetAddType,
)
? (defaultActiveTabFromHash as DatasetAddType)
- : // todo: revert
- DatasetAddType.COMPOSE;
+ : DatasetAddType.UPLOAD;
const tabs: TabsProps["items"] = [
{
From 72733c07a7c33c52f33022480b76eff27160ee88 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 10 Jan 2024 15:51:38 +0100
Subject: [PATCH 44/46] allow trees with multiple nodes during composition
instead of expecting single-node trees
---
.../composition_wizard/02_upload_files.tsx | 46 ++++++++++---------
1 file changed, 25 insertions(+), 21 deletions(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index c7c42c6f7ba..18ffbba04fc 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -11,6 +11,7 @@ import { parseNml } from "oxalis/model/helpers/nml_helpers";
import React from "react";
import { tryToFetchDatasetsByName, WizardComponentProps, WizardContext, FileList } from "./common";
import ErrorHandling from "libs/error_handling";
+import * as Utils from "libs/utils";
const EXPECTED_VALUE_COUNT_PER_CSV_LINE = 8;
@@ -67,9 +68,10 @@ export default function UploadFiles({ wizardContext, setWizardContext }: WizardC
Please upload two NML files that contain landmarks that you created with WEBKNOSSOS. Note
that the dataset that belongs to the first NML will be transformed to the dataset that
- belongs to the second NML file. The skeletons in the NML files should have exactly one
- node per tree. The n-th tree of the first NML is aligned with the n-th tree of the second
- NML.
+ belongs to the second NML file. The skeletons in the NML files should match each other
+ exactly. This means that both NMLs should contain the same amount of trees and that the
+ n-th tree of the first and second NML should have the same amount of nodes, as these will
+ be aligned with each other.
)}
@@ -167,26 +169,28 @@ async function parseNmlFiles(fileList: FileList): Promise
throw new SoftError("Could not extract dataset names.");
}
- const nodes1 = Array.from(
- values(trees1)
- .map((tree) => Array.from(tree.nodes.values())[0])
- .values(),
- );
- const nodes2 = Array.from(
- values(trees2)
- .map((tree) => Array.from(tree.nodes.values())[0])
- .values(),
- );
+ if (Object.keys(trees1).length !== Object.keys(trees2).length) {
+ throw new SoftError("The two NML files should have the same tree count.");
+ }
- for (const [node1, node2] of _.zip(nodes1, nodes2)) {
- if ((node1 == null) !== (node2 == null)) {
- throw new SoftError(
- "A tree was empty while its corresponding tree wasn't. Ensure that the NML structures match each other.",
- );
+ for (const [tree1, tree2] of _.zip(Utils.values(trees1), Utils.values(trees2))) {
+ if (tree1 == null || tree2 == null) {
+ // Satisfy TS. This should not happen, as we checked before that both tree collections
+ // have the same size.
+ throw new SoftError("A tree was unexpectedly parsed as null. Please try again");
}
- if (node1 != null && node2 != null) {
- sourcePoints.push(node1.position);
- targetPoints.push(node2.position);
+ const nodes1 = Array.from(tree1.nodes.values());
+ const nodes2 = Array.from(tree2.nodes.values());
+ for (const [node1, node2] of _.zip(nodes1, nodes2)) {
+ if ((node1 == null) !== (node2 == null)) {
+ throw new SoftError(
+ `Tree ${tree1.treeId} and tree ${tree2.treeId} don't have the same amount of trees. Ensure that the NML structures match each other.`,
+ );
+ }
+ if (node1 != null && node2 != null) {
+ sourcePoints.push(node1.position);
+ targetPoints.push(node2.position);
+ }
}
}
From 8474006db3266b0168fbdc862d32f0b8d0078d7d Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Wed, 10 Jan 2024 16:13:05 +0100
Subject: [PATCH 45/46] remove unused import
---
.../admin/dataset/composition_wizard/02_upload_files.tsx | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
index 18ffbba04fc..b59e452961c 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx
@@ -4,7 +4,7 @@ import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload";
import { AsyncButton } from "components/async_clickables";
import { readFileAsText } from "libs/read_file";
import Toast from "libs/toast";
-import { SoftError, values } from "libs/utils";
+import { SoftError } from "libs/utils";
import _ from "lodash";
import { Vector3 } from "oxalis/constants";
import { parseNml } from "oxalis/model/helpers/nml_helpers";
From 655b4e996d7b21949f1f45d92e3a70267bd82460 Mon Sep 17 00:00:00 2001
From: Philipp Otto
Date: Tue, 16 Jan 2024 13:35:48 +0100
Subject: [PATCH 46/46] inline radio buttons in first step of
compose-dataset-wizard
---
.../01_select_import_type.tsx | 30 +++++++------------
1 file changed, 11 insertions(+), 19 deletions(-)
diff --git a/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
index 40cf7db3cda..b7950cf3110 100644
--- a/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
+++ b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx
@@ -26,27 +26,19 @@ export default function SelectImportType({
You can create a new dataset by composing existing datasets together. There are three
different ways to accomplish this:
-
- Select existing datasets which should be combined without any transforms
-
- Create landmarks nodes using the skeleton tool in two datasets. Download the annotations
- as NML and upload these here again.
-
- Import a landmark CSV as it can be exported by Big Warp.
-
+
+
+
+ Combine datasets without any transforms
+
+ Combine datasets by using skeleton annotations (NML)
+
+ Combine datasets by using BigWarp landmarks (CSV)
+
+
+
In all three cases, you can tweak which layers should be used later.
-
-
-
- Combine datasets without any transforms
-
- Combine datasets by using skeleton annotations (NML)
-
- Combine datasets by using BigWarp landmarks (CSV)
-
-
-
Next