From 83fdff0d55915bff0ba955a49494001b0d71058a Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 17 Oct 2023 15:28:52 +0200 Subject: [PATCH 01/46] implement compose-dataset-view which accepts NMLs from different datasets --- .../dataset/dataset_add_compose_view.tsx | 412 ++++++++++++++++++ .../admin/dataset/dataset_add_view.tsx | 18 +- .../admin/dataset/dataset_components.tsx | 67 ++- .../admin/dataset/dataset_upload_view.tsx | 57 +-- frontend/javascripts/libs/estimate_affine.ts | 5 +- .../model/accessors/dataset_accessor.ts | 5 + 6 files changed, 509 insertions(+), 55 deletions(-) create mode 100644 frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx new file mode 100644 index 00000000000..d87f4d520ba --- /dev/null +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -0,0 +1,412 @@ +import { DeleteOutlined, FileExcelOutlined } from "@ant-design/icons"; +import { + AllowedTeamsFormItem, + CardContainer, + DatasetNameFormItem, + layerNameRules, +} from "admin/dataset/dataset_components"; +import { Button, Col, Collapse, Form, FormInstance, Input, List, Radio, Row, Tooltip } from "antd"; +import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload"; +import { Vector3 } from "oxalis/constants"; +import React, { useState } from "react"; +import { readFileAsText } from "libs/read_file"; +import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; +import { parseNml } from "oxalis/model/helpers/nml_helpers"; +import { values } from "libs/utils"; +import _ from "lodash"; +import { flatToNestedMatrix, formatNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; +import { Matrix4x4 } from "libs/mjs"; +import { FormItemWithInfo } from "dashboard/dataset/helper_components"; +import messages from "messages"; +import FolderSelection from "dashboard/folders/folder_selection"; +import { useSelector } from "react-redux"; +import { OxalisState } from "oxalis/store"; +import * as Utils from "libs/utils"; +import { + APIDataset, + APIDataLayer, + APIDatasetId, + APITeam, + CoordinateTransformation, +} from "types/api_flow_types"; +import { syncValidator } from "types/validation"; +import { getDataset } from "admin/admin_rest_api"; + +const FormItem = Form.Item; + +type FileList = UploadFile[]; + +type LayerLink = { + datasetId: APIDatasetId; + sourceName: string; + newName: string; + transformations: CoordinateTransformation[]; +}; + +type Props = { + onAdded: ( + datasetOrganization: string, + uploadedDatasetName: string, + isRemoteDataset: boolean, + needsConversion?: boolean | null | undefined, + ) => Promise; +}; + +export default function DatasetAddComposeView(props: Props) { + const formRef = React.useRef>(null); + + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); + const [form] = Form.useForm(); + const [fileList, setFileList] = useState([]); + const [matrix, setMatrix] = useState(null); + const [selectedTeams, setSelectedTeams] = useState>([]); + + const onRemoveLayer = (layer: LayerLink) => { + const oldLayers = form.getFieldValue(["layers"]); + const newLayers = oldLayers.filter( + (existingLayer: LayerLink) => + existingLayer.datasetId.owningOrganization !== layer.datasetId.owningOrganization || + existingLayer.datasetId.name !== layer.datasetId.name || + existingLayer.sourceName !== layer.sourceName, + ); + form.setFieldsValue({ layers: newLayers }); + }; + + const handleChange = async (info: UploadChangeParam>) => { + const newFileList = info.fileList; + setFileList(newFileList); + + const sourcePoints = []; + const targetPoints = []; + if (newFileList.length === 1 && newFileList[0]?.originFileObj) { + const csv = await readFileAsText(newFileList[0]?.originFileObj); + console.log("csv", csv); + const lines = csv.split("\n"); + for (const line of lines) { + const fields = line.split(","); + const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; + + const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + sourcePoints.push(source); + targetPoints.push(target); + } + } + + if (newFileList.length === 2) { + const nmlString1 = await readFileAsText(newFileList[0]?.originFileObj!); + const nmlString2 = await readFileAsText(newFileList[1]?.originFileObj!); + + if (nmlString1 === "" || nmlString2 === "") { + throw new Error("NML files are empty."); + } + + const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + + if (!datasetName1 || !datasetName2) { + throw new Error("Could not extract dataset names."); + } + + const [dataset1, dataset2] = await Promise.all([ + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName1, + }), + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName2, + }), + ]); + console.log("dataset1, dataset2", dataset1, dataset2); + + const nodes1 = Array.from( + values(trees1) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + const nodes2 = Array.from( + values(trees2) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + + for (const [node1, node2] of _.zip(nodes1, nodes2)) { + if (node1 == null) { + throw new Error("Empty trees found."); + } + sourcePoints.push(node1.position); + if (node2 == null) { + throw new Error("Empty trees found."); + } + targetPoints.push(node2.position); + } + const datasets = [dataset1, dataset2]; + const newMatrix = estimateAffineMatrix4x4(sourcePoints, targetPoints); + const newLinks: LayerLink[] = ( + _.flatMap(datasets, (dataset) => + dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), + ) as [APIDataset, APIDataLayer][] + ).map( + ([dataset, dataLayer]): LayerLink => ({ + datasetId: { + owningOrganization: dataset.owningOrganization, + name: dataset.name, + }, + sourceName: dataLayer.name, + newName: dataLayer.name, + transformations: + dataset === datasets[0] + ? [ + { + type: "affine", + matrix: flatToNestedMatrix(newMatrix), + }, + ] + : [], + }), + ); + + form.setFieldsValue({ layers: newLinks }); + } + }; + + // The following transform should be added to the layer which belongs to the first NML + // so that it is transformed to fit the layer of the second NML. + // const datasourceJSON = + // matrix != null + // ? ` + // "coordinateTransformations": [{ + // "type": "affine", + // "matrix": ${formatNestedMatrix(flatToNestedMatrix(matrix))} + // }] + // ` + // : ""; + + const handleSubmit = () => { + const layers = form.getFieldValue(["layers"]); + // @ts-ignore + const createDatasetComposition = (obj) => console.log(obj); + createDatasetComposition({ + newDatasetName: form.getFieldValue(["name"]), + layers, + // - scale + }); + }; + + return ( + // Using Forms here only to validate fields and for easy layout +
+ +

+ You can create a new dataset by composing existing datasets together. To align multiple + datasets with each other, create landmarks nodes using the skeleton tool. Then, download + these annotations as NMLs and drop them in the following landmarks input. Alternatively, + you can also add a landmark CSV as it can be exported by Big Warp. WEBKNOSSOS will try to + find the datasets that are referenced in these files and will create transformations using + these landmarks. +

+ +
+ Big Warp Landmarks (CSV)} + hasFeedback + > + false} + maxCount={2} + multiple + > +

+ +

+

+ Click or Drag your Big Warp Landmarks to this Area to Upload +

+

...

+
+
+ + + + + + + setSelectedTeams(selectedTeams)} + formRef={formRef} + /> + + + + + + + + prevValues.layers !== curValues.layers} + > + {({ getFieldValue }) => { + const layers = getFieldValue("layers") || []; + return ( + + Layers +
+ } + > + {layers.map((layer: LayerLink, idx: number) => ( + // the layer name may change in this view, the order does not, so idx is the right key choice here + + + + ))} + + ); + }} + + + + + + + + + ); +} + +function LinkedLayerForm({ + layer, + index, + onRemoveLayer, + form, + datasetId, +}: { + layer: LayerLink; + index: number; + onRemoveLayer: (layer: LayerLink) => void; + form: FormInstance; + datasetId: APIDatasetId; +}) { + const layers = Form.useWatch(["layers"]); + + React.useEffect(() => { + // Always validate all fields so that in the case of duplicate layer + // names all relevant fields are properly validated. + // This is a workaround, since shouldUpdate=true on a + // FormItemWithInfo doesn't work for some reason. + form.validateFields(); + }, [layers]); + + return ( +
+
+ +
+ + + + + layers.filter((someLayer: LayerLink) => someLayer.newName === value).length <= + 1, + "Layer names must be unique.", + ), + }, + ]} + > + + + + + + + {datasetId.name} + {" "} + / {layer.sourceName} + + + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index 55d42a6180a..84972fda14d 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -1,7 +1,7 @@ import type { RouteComponentProps } from "react-router-dom"; import { withRouter } from "react-router-dom"; import { Tabs, Modal, Button, Layout } from "antd"; -import { DatabaseOutlined, UploadOutlined } from "@ant-design/icons"; +import { CopyOutlined, DatabaseOutlined, UploadOutlined } from "@ant-design/icons"; import React, { useState } from "react"; import { connect } from "react-redux"; import type { APIDataStore } from "types/api_flow_types"; @@ -12,6 +12,7 @@ import DatasetUploadView from "admin/dataset/dataset_upload_view"; import features from "features"; import { getDatastores } from "admin/admin_rest_api"; import { useFetch } from "libs/react_helpers"; +import DatasetAddComposeView from "./dataset_add_compose_view"; const { TabPane } = Tabs; const { Content, Sider } = Layout; @@ -19,6 +20,7 @@ const { Content, Sider } = Layout; enum DatasetAddViewTabs { UPLOAD = "upload", REMOTE = "remote", + COMPOSE = "compose", } function DatasetAddView({ history }: RouteComponentProps) { @@ -105,7 +107,8 @@ function DatasetAddView({ history }: RouteComponentProps) { defaultActiveTabFromHash as DatasetAddViewTabs, ) ? (defaultActiveTabFromHash as DatasetAddViewTabs) - : DatasetAddViewTabs.UPLOAD; + : // todo: revert + DatasetAddViewTabs.COMPOSE; return ( @@ -134,6 +137,17 @@ function DatasetAddView({ history }: RouteComponentProps) { > + + + Compose from existing datasets + + } + key={DatasetAddViewTabs.COMPOSE} + > + + diff --git a/frontend/javascripts/admin/dataset/dataset_components.tsx b/frontend/javascripts/admin/dataset/dataset_components.tsx index d2bab29e5cc..2b6e35341c8 100644 --- a/frontend/javascripts/admin/dataset/dataset_components.tsx +++ b/frontend/javascripts/admin/dataset/dataset_components.tsx @@ -1,9 +1,13 @@ import * as React from "react"; -import { Form, Input, Select, Card } from "antd"; +import { Form, Input, Select, Card, FormInstance } from "antd"; import messages from "messages"; import { isDatasetNameValid } from "admin/admin_rest_api"; -import type { APIDataStore, APIUser } from "types/api_flow_types"; +import type { APIDataStore, APITeam, APIUser } from "types/api_flow_types"; import { syncValidator } from "types/validation"; +import { FormItemWithInfo } from "dashboard/dataset/helper_components"; +import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; +import features from "features"; + const FormItem = Form.Item; export function CardContainer({ children, @@ -43,7 +47,7 @@ export const layerNameRules = [ }, { validator: syncValidator( - (value: string) => !value.startsWith("."), + (value: string | null) => !value || !value.startsWith("."), "The name must not start with a dot.", ), }, @@ -138,3 +142,60 @@ export function DatastoreFormItem({ ); } + +export function AllowedTeamsFormItem({ + isDatasetManagerOrAdmin, + selectedTeams, + setSelectedTeams, + formRef, +}: { + isDatasetManagerOrAdmin: boolean; + selectedTeams: APITeam | Array; + setSelectedTeams: (teams: APITeam | Array) => void; + formRef: React.RefObject>; +}) { + return ( + + { + if (formRef.current == null) return; + + if (!Array.isArray(selectedTeams)) { + // Making sure that we always have an array even when only one team is selected. + selectedTeams = [selectedTeams]; + } + + formRef.current.setFieldsValue({ + initialTeams: selectedTeams, + }); + setSelectedTeams(selectedTeams); + }} + afterFetchedTeams={(fetchedTeams) => { + if (!features().isWkorgInstance) { + return; + } + + const teamOfOrganization = fetchedTeams.find((team) => team.name === team.organization); + + if (teamOfOrganization == null) { + return; + } + + if (formRef.current == null) return; + formRef.current.setFieldsValue({ + initialTeams: [teamOfOrganization], + }); + setSelectedTeams([teamOfOrganization]); + }} + /> + + ); +} diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 08de123b4a2..2f03d24af08 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -34,6 +34,7 @@ import messages from "messages"; import { trackAction } from "oxalis/model/helpers/analytics"; import Zip from "libs/zipjs_wrapper"; import { + AllowedTeamsFormItem, CardContainer, DatasetNameFormItem, DatastoreFormItem, @@ -155,7 +156,7 @@ class DatasetUploadView extends React.Component { unblock: ((...args: Array) => any) | null | undefined; blockTimeoutId: number | null = null; - formRef = React.createRef(); + formRef: React.RefObject> = React.createRef(); componentDidMount() { sendAnalyticsEvent("open_upload_view"); @@ -670,54 +671,12 @@ class DatasetUploadView extends React.Component { - - { - if (this.formRef.current == null) return; - - if (!Array.isArray(selectedTeams)) { - // Making sure that we always have an array even when only one team is selected. - selectedTeams = [selectedTeams]; - } - - this.formRef.current.setFieldsValue({ - initialTeams: selectedTeams, - }); - this.setState({ - selectedTeams, - }); - }} - afterFetchedTeams={(fetchedTeams) => { - if (!features().isWkorgInstance) { - return; - } - - const teamOfOrganization = fetchedTeams.find( - (team) => team.name === team.organization, - ); - - if (teamOfOrganization == null) { - return; - } - - if (this.formRef.current == null) return; - this.formRef.current.setFieldsValue({ - initialTeams: [teamOfOrganization], - }); - this.setState({ - selectedTeams: [teamOfOrganization], - }); - }} - /> - + this.setState({ selectedTeams })} + formRef={this.formRef} + /> diff --git a/frontend/javascripts/libs/estimate_affine.ts b/frontend/javascripts/libs/estimate_affine.ts index fa2e5aabd61..715a87d1385 100644 --- a/frontend/javascripts/libs/estimate_affine.ts +++ b/frontend/javascripts/libs/estimate_affine.ts @@ -48,6 +48,9 @@ export default function estimateAffine(sourcePoints: Vector3[], targetPoints: Ve return new Matrix(affineMatrix); } -export function estimateAffineMatrix4x4(sourcePoints: Vector3[], targetPoints: Vector3[]) { +export function estimateAffineMatrix4x4( + sourcePoints: Vector3[], + targetPoints: Vector3[], +): Matrix4x4 { return estimateAffine(sourcePoints, targetPoints).to1DArray() as any as Matrix4x4; } diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index be886044623..0e2363b25dc 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -817,6 +817,11 @@ export function flatToNestedMatrix(matrix: Matrix4x4): [Vector4, Vector4, Vector ]; } +export function formatNestedMatrix(matrix: [Vector4, Vector4, Vector4, Vector4]): string { + const formatVec = (vec: Vector4) => `[${vec.join(", ")}]`; + return `[${matrix.map(formatVec).join(", ")}]`; +} + // Transposition is often needed so that the matrix has the right format // for matrix operations (e.g., on the GPU; but not for ThreeJS). // Inversion is needed when the position of an "output voxel" (e.g., during From 739ce7ef353a5d440ff2845e0db9836478a5f6ec Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 17 Oct 2023 16:51:06 +0200 Subject: [PATCH 02/46] wording --- .../javascripts/admin/dataset/dataset_add_compose_view.tsx | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index d87f4d520ba..133bd492be7 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -211,7 +211,7 @@ export default function DatasetAddComposeView(props: Props) {
Big Warp Landmarks (CSV)} + label={Landmark files (NML pairs or CSV)} hasFeedback >

-

- Click or Drag your Big Warp Landmarks to this Area to Upload -

+

Drag your landmark files to this area

...

From 9722b8cd570c19649cdd3d34a2560ad09fee6d0f Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 20 Nov 2023 16:50:11 +0100 Subject: [PATCH 03/46] Add backend for composing datasets --- .../controllers/DataSourceController.scala | 13 +- .../datastore/services/ComposeService.scala | 168 ++++++++++++++++++ ....scalableminds.webknossos.datastore.routes | 1 + 3 files changed, 181 insertions(+), 1 deletion(-) create mode 100644 webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 54745d2baad..042737fe071 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -34,7 +34,8 @@ class DataSourceController @Inject()( connectomeFileService: ConnectomeFileService, storageUsageService: DSUsedStorageService, datasetErrorLoggingService: DatasetErrorLoggingService, - uploadService: UploadService + uploadService: UploadService, + composeService: ComposeService )(implicit bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Controller with FoxImplicits { @@ -531,6 +532,16 @@ Expects: } } + def compose(token: Option[String]): Action[ComposeRequest] = + Action.async(validateJson[ComposeRequest]) { implicit request => // TODO: Validate read access to every included data source + //accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName), + // urlOrHeaderToken(token, request)) { + for { + _ <-composeService.composeDataset(request.body, urlOrHeaderToken(token, request)) + } yield Ok + //} + } + @ApiOperation(hidden = true, value = "") def listConnectomeFiles(token: Option[String], organizationName: String, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala new file mode 100644 index 00000000000..4e5a95d2c7a --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -0,0 +1,168 @@ +package com.scalableminds.webknossos.datastore.services + +import com.scalableminds.util.geometry.Vec3Double +import com.scalableminds.util.io.PathUtils +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.dataformats.n5.{N5DataLayer, N5SegmentationLayer} +import com.scalableminds.webknossos.datastore.dataformats.precomputed.{ + PrecomputedDataLayer, + PrecomputedSegmentationLayer +} +import com.scalableminds.webknossos.datastore.dataformats.wkw.{WKWDataLayer, WKWSegmentationLayer} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{ZarrDataLayer, ZarrSegmentationLayer} +import com.scalableminds.webknossos.datastore.dataformats.zarr3.{Zarr3DataLayer, Zarr3SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{ + CoordinateTransformation, + DataLayer, + DataSource, + DataSourceId, + GenericDataSource +} +import net.liftweb.util.Helpers.tryo +import play.api.libs.json.{Json, OFormat} + +import java.nio.charset.StandardCharsets +import java.nio.file.{Files, Path} +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +case class ComposeRequest( + newDatasetName: String, + targetFolderId: String, + dataStoreHost: String, + organizationName: String, + layers: Seq[ComposeLayer] +) + +object ComposeRequest { + implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest] +} +case class ComposeLayer( + id: DataSourceId, + sourceName: String, + newName: String, + transformations: Seq[CoordinateTransformation] +) + +object ComposeLayer { + implicit val composeLayerFormat: OFormat[ComposeLayer] = Json.format[ComposeLayer] +} + +class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, + dataSourceService: DataSourceService, + remoteWebKnossosClient: DSRemoteWebKnossosClient)(implicit ec: ExecutionContext) + extends FoxImplicits { + // TODO: Extract to common trait with UploadService? + val dataBaseDir: Path = dataSourceService.dataBaseDir + + private def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] = + Fox + .serialCombined(layersToLink) { layerToLink => + val layerPath = layerToLink.pathIn(dataBaseDir) + val newLayerPath = dataSetDir.resolve(layerToLink.newLayerName.getOrElse(layerToLink.layerName)) + for { + _ <- bool2Fox(!Files.exists(newLayerPath)) ?~> s"Cannot symlink layer at $newLayerPath: a layer with this name already exists." + _ <- bool2Fox(Files.exists(layerPath)) ?~> s"Cannot symlink to layer at $layerPath: The layer does not exist." + _ <- tryo { + Files.createSymbolicLink(newLayerPath, newLayerPath.getParent.relativize(layerPath)) + } ?~> s"Failed to create symlink at $newLayerPath." + } yield () + } + .map { _ => + () + } + + private def uploadDirectory(organizationName: String, name: String): Path = + dataBaseDir.resolve(organizationName).resolve(name) + + def composeDataset(composeRequest: ComposeRequest, userToken: Option[String])( + implicit ec: ExecutionContext): Fox[Unit] = + for { + _ <- Fox.successful(()) + reserveUploadInfo = ReserveUploadInformation("", + composeRequest.newDatasetName, + composeRequest.organizationName, + 1, + None, + List(), + Some(composeRequest.targetFolderId)) + _ <- remoteWebKnossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "reserveUpload.failed" + directory = uploadDirectory(composeRequest.organizationName, composeRequest.newDatasetName) + _ = PathUtils.ensureDirectory(directory) + dataSource <- createDatasource(composeRequest, composeRequest.organizationName) + properties = Json.toJson(dataSource).toString().getBytes(StandardCharsets.UTF_8) + _ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties) + } yield () + + private def getLayerFromComposeLayer(composeLayer: ComposeLayer, + organizationName: String, + datasetName: String): Fox[DataLayer] = + for { + _ <- Fox.successful(()) + dataSource <- Fox.option2Fox(dataSourceRepository.find(composeLayer.id)) + ds <- Fox.option2Fox(dataSource.toUsable) + layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) + applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => + cOpt match { + case Some(c) => Some(c ++ composeLayer.transformations.toList) + case None => Some(composeLayer.transformations.toList) + } + linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.id.team, + composeLayer.id.name, + composeLayer.sourceName, + Some(composeLayer.newName)) + layerIsRemote = isLayerRemote(composeLayer.id, composeLayer.sourceName) + _ <- Fox.runIf(!layerIsRemote)( + addSymlinksToOtherDatasetLayers(uploadDirectory(organizationName, datasetName), List(linkedLayerIdentifier))) + editedLayer: DataLayer = layer match { + case l: PrecomputedDataLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: PrecomputedSegmentationLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: ZarrDataLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: ZarrSegmentationLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: N5DataLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: N5SegmentationLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: Zarr3DataLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: Zarr3SegmentationLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: WKWDataLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + case l: WKWSegmentationLayer => + l.copy(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) + } + } yield editedLayer + + private def createDatasource(composeRequest: ComposeRequest, organizationName: String): Fox[DataSource] = + for { + layers <- Fox.serialCombined(composeRequest.layers.toList)( + getLayerFromComposeLayer(_, organizationName, composeRequest.newDatasetName)) + dataSource = GenericDataSource( + DataSourceId(composeRequest.newDatasetName, organizationName), + layers, + Vec3Double(1, 1, 1), + None + ) + + } yield dataSource + + private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { + val layerPath = dataBaseDir.resolve(dataSourceId.team).resolve(dataSourceId.name).resolve(layerName) + !Files.exists(layerPath) + } +} diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index 7d6fd6e0a13..889f60f0ef8 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -84,6 +84,7 @@ POST /datasets/:organizationName/:dataSetName PUT /datasets/:organizationName/:dataSetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(token: Option[String], organizationName: String, dataSetName: String, folderId: Option[String]) GET /datasets/:organizationName/:dataSetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.explore(token: Option[String], organizationName: String, dataSetName: String) DELETE /datasets/:organizationName/:dataSetName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(token: Option[String], organizationName: String, dataSetName: String) +POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose(token: Option[String]) # Actions POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking(token: Option[String]) From 2ce1031f771566f7fc2160565f4043f575e7b1b1 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 20 Nov 2023 16:58:01 +0100 Subject: [PATCH 04/46] Extract common method --- .../datastore/services/ComposeService.scala | 17 ++++++++------ .../datastore/services/UploadService.scala | 22 ++----------------- 2 files changed, 12 insertions(+), 27 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala index 4e5a95d2c7a..6f0b7498cc0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -48,14 +48,10 @@ object ComposeLayer { implicit val composeLayerFormat: OFormat[ComposeLayer] = Json.format[ComposeLayer] } -class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, - dataSourceService: DataSourceService, - remoteWebKnossosClient: DSRemoteWebKnossosClient)(implicit ec: ExecutionContext) - extends FoxImplicits { - // TODO: Extract to common trait with UploadService? - val dataBaseDir: Path = dataSourceService.dataBaseDir +class SymlinkHelper(dataSourceService: DataSourceService)(implicit ec: ExecutionContext) extends FoxImplicits { - private def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] = + val dataBaseDir: Path = dataSourceService.dataBaseDir + def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] = Fox .serialCombined(layersToLink) { layerToLink => val layerPath = layerToLink.pathIn(dataBaseDir) @@ -71,6 +67,13 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, .map { _ => () } +} + +class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, + dataSourceService: DataSourceService, + remoteWebKnossosClient: DSRemoteWebKnossosClient)(implicit ec: ExecutionContext) + extends SymlinkHelper(dataSourceService)(ec) + with FoxImplicits { private def uploadDirectory(organizationName: String, name: String): Path = dataBaseDir.resolve(organizationName).resolve(name) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala index 047d84a01e1..bea9d3dce7a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala @@ -63,9 +63,8 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, extends LazyLogging with DataSetDeleter with DirectoryConstants - with FoxImplicits { - - val dataBaseDir: Path = dataSourceService.dataBaseDir + with FoxImplicits + with SymlinkHelper(dataSourceService) { /* Redis stores different information for each upload, with different prefixes in the keys: * uploadId -> fileCount @@ -276,23 +275,6 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceDir } - private def addSymlinksToOtherDatasetLayers(dataSetDir: Path, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] = - Fox - .serialCombined(layersToLink) { layerToLink => - val layerPath = layerToLink.pathIn(dataBaseDir) - val newLayerPath = dataSetDir.resolve(layerToLink.newLayerName.getOrElse(layerToLink.layerName)) - for { - _ <- bool2Fox(!Files.exists(newLayerPath)) ?~> s"Cannot symlink layer at $newLayerPath: a layer with this name already exists." - _ <- bool2Fox(Files.exists(layerPath)) ?~> s"Cannot symlink to layer at $layerPath: The layer does not exist." - _ <- tryo { - Files.createSymbolicLink(newLayerPath, newLayerPath.getParent.relativize(layerPath)) - } ?~> s"Failed to create symlink at $newLayerPath." - } yield () - } - .map { _ => - () - } - private def addLinkedLayersToDataSourceProperties(unpackToDir: Path, organizationName: String, layersToLink: List[LinkedLayerIdentifier]): Fox[Unit] = From 0fa38a405868b56514d7e304a274d14aa033da7c Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 20 Nov 2023 17:14:12 +0100 Subject: [PATCH 05/46] Use scale and check if directory is writeable --- .../datastore/services/ComposeService.scala | 22 +++++++++---------- .../datastore/services/UploadService.scala | 4 ++-- 2 files changed, 12 insertions(+), 14 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala index 6f0b7498cc0..4a08749b11d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -31,6 +31,7 @@ case class ComposeRequest( targetFolderId: String, dataStoreHost: String, organizationName: String, + scale: Vec3Double, layers: Seq[ComposeLayer] ) @@ -81,7 +82,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, def composeDataset(composeRequest: ComposeRequest, userToken: Option[String])( implicit ec: ExecutionContext): Fox[Unit] = for { - _ <- Fox.successful(()) + _ <- Fox.bool2Fox(Files.isWritable(dataBaseDir)) ?~> "Datastore can not write to its data directory." reserveUploadInfo = ReserveUploadInformation("", composeRequest.newDatasetName, composeRequest.organizationName, @@ -89,7 +90,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, None, List(), Some(composeRequest.targetFolderId)) - _ <- remoteWebKnossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "reserveUpload.failed" + _ <- remoteWebKnossosClient.reserveDataSourceUpload(reserveUploadInfo, userToken) ?~> "Failed to reserve upload." directory = uploadDirectory(composeRequest.organizationName, composeRequest.newDatasetName) _ = PathUtils.ensureDirectory(directory) dataSource <- createDatasource(composeRequest, composeRequest.organizationName) @@ -97,11 +98,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, _ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties) } yield () - private def getLayerFromComposeLayer(composeLayer: ComposeLayer, - organizationName: String, - datasetName: String): Fox[DataLayer] = + private def getLayerFromComposeLayer(composeLayer: ComposeLayer, uploadDir: Path): Fox[DataLayer] = for { - _ <- Fox.successful(()) dataSource <- Fox.option2Fox(dataSourceRepository.find(composeLayer.id)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) @@ -115,8 +113,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, composeLayer.sourceName, Some(composeLayer.newName)) layerIsRemote = isLayerRemote(composeLayer.id, composeLayer.sourceName) - _ <- Fox.runIf(!layerIsRemote)( - addSymlinksToOtherDatasetLayers(uploadDirectory(organizationName, datasetName), List(linkedLayerIdentifier))) + _ <- Fox.runIf(!layerIsRemote)(addSymlinksToOtherDatasetLayers(uploadDir, List(linkedLayerIdentifier))) editedLayer: DataLayer = layer match { case l: PrecomputedDataLayer => l.copy(name = composeLayer.newName, @@ -151,18 +148,19 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, } } yield editedLayer - private def createDatasource(composeRequest: ComposeRequest, organizationName: String): Fox[DataSource] = + private def createDatasource(composeRequest: ComposeRequest, organizationName: String): Fox[DataSource] = { + val uploadDir = uploadDirectory(organizationName, composeRequest.newDatasetName) for { - layers <- Fox.serialCombined(composeRequest.layers.toList)( - getLayerFromComposeLayer(_, organizationName, composeRequest.newDatasetName)) + layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) dataSource = GenericDataSource( DataSourceId(composeRequest.newDatasetName, organizationName), layers, - Vec3Double(1, 1, 1), + composeRequest.scale, None ) } yield dataSource + } private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { val layerPath = dataBaseDir.resolve(dataSourceId.team).resolve(dataSourceId.name).resolve(layerName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala index 21506041978..612cb889d44 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/UploadService.scala @@ -65,11 +65,11 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceService: DataSourceService, runningUploadMetadataStore: DataStoreRedisStore, exploreLocalLayerService: ExploreLocalLayerService)(implicit ec: ExecutionContext) - extends LazyLogging + extends SymlinkHelper(dataSourceService) with DataSetDeleter with DirectoryConstants with FoxImplicits - with SymlinkHelper(dataSourceService) { + with LazyLogging { /* Redis stores different information for each upload, with different prefixes in the keys: * uploadId -> fileCount From e7b797280ceaa350c13a1dc4adcb494c5a2c4645 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 20 Nov 2023 17:46:51 +0100 Subject: [PATCH 06/46] Validate user access to all included datasets --- .../datastore/controllers/DataSourceController.scala | 12 +++++++----- .../datastore/services/AccessTokenService.scala | 7 +++++++ .../datastore/services/ComposeService.scala | 2 +- 3 files changed, 15 insertions(+), 6 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 042737fe071..4eb09b534a0 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -533,13 +533,15 @@ Expects: } def compose(token: Option[String]): Action[ComposeRequest] = - Action.async(validateJson[ComposeRequest]) { implicit request => // TODO: Validate read access to every included data source - //accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(organizationName), - // urlOrHeaderToken(token, request)) { + Action.async(validateJson[ComposeRequest]) { implicit request => + val userToken = urlOrHeaderToken(token, request) + accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationName), token) { for { - _ <-composeService.composeDataset(request.body, urlOrHeaderToken(token, request)) + _ <- Fox.serialCombined(request.body.layers.map(_.id).toList)(id => + accessTokenService.assertUserAccess(UserAccessRequest.readDataSources(id), userToken)) + _ <- composeService.composeDataset(request.body, userToken) } yield Ok - //} + } } @ApiOperation(hidden = true, value = "") diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 48cc35ef4c5..a40b4923d2c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -77,6 +77,13 @@ trait AccessTokenService { accessAnswersCache.getOrLoad((accessRequest, token), _ => remoteWebKnossosClient.requestUserAccess(token, accessRequest)) + def assertUserAccess(accessRequest: UserAccessRequest, token: Option[String])( + implicit ec: ExecutionContext): Fox[Unit] = + for { + userAccessAnswer <- hasUserAccess(accessRequest, token) ?~> "Failed to check data access, token may be expired, consider reloading." + _ <- Fox.bool2Fox(userAccessAnswer.granted) ?~> userAccessAnswer.msg.getOrElse("Access forbidden.") + } yield () + private def executeBlockOnPositiveAnswer(userAccessAnswer: UserAccessAnswer, block: => Future[Result]): Future[Result] = userAccessAnswer match { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala index 4a08749b11d..971d05f60bc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -29,7 +29,6 @@ import scala.concurrent.ExecutionContext case class ComposeRequest( newDatasetName: String, targetFolderId: String, - dataStoreHost: String, organizationName: String, scale: Vec3Double, layers: Seq[ComposeLayer] @@ -83,6 +82,7 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, implicit ec: ExecutionContext): Fox[Unit] = for { _ <- Fox.bool2Fox(Files.isWritable(dataBaseDir)) ?~> "Datastore can not write to its data directory." + reserveUploadInfo = ReserveUploadInformation("", composeRequest.newDatasetName, composeRequest.organizationName, From f131d7af24c0cb3cce7f66ed22f7f87da9cb06ef Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 24 Nov 2023 14:56:03 +0100 Subject: [PATCH 07/46] integrate new compose route --- frontend/javascripts/admin/admin_rest_api.ts | 17 +++++ .../dataset/dataset_add_compose_view.tsx | 72 ++++++++++--------- .../admin/dataset/dataset_add_remote_view.tsx | 3 +- .../admin/dataset/dataset_add_view.tsx | 47 ++++++++---- .../admin/dataset/dataset_upload_view.tsx | 3 +- frontend/javascripts/types/api_flow_types.ts | 8 +++ 6 files changed, 97 insertions(+), 53 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 91b9e43fe06..7f6d0696d9e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -66,6 +66,7 @@ import type { MaintenanceInfo, AdditionalCoordinate, RenderAnimationOptions, + LayerLink, } from "types/api_flow_types"; import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3, Vector6 } from "oxalis/constants"; @@ -1501,6 +1502,22 @@ export function getDatasetAccessList(datasetId: APIDatasetId): Promise + Request.sendJSONReceiveJSON(`${datastoreUrl}/data/datasets/compose?token=${token}`, { + data: payload, + }), + ); +} + export function createResumableUpload(datastoreUrl: string, uploadId: string): Promise { // @ts-expect-error ts-migrate(7006) FIXME: Parameter 'file' implicitly has an 'any' type. const generateUniqueIdentifier = (file) => { diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 133bd492be7..5a1b65205a9 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -28,28 +28,24 @@ import { APIDatasetId, APITeam, CoordinateTransformation, + APIDataStore, + LayerLink, } from "types/api_flow_types"; import { syncValidator } from "types/validation"; -import { getDataset } from "admin/admin_rest_api"; +import { createDatasetComposition, getDataset } from "admin/admin_rest_api"; +import Toast from "libs/toast"; const FormItem = Form.Item; type FileList = UploadFile[]; -type LayerLink = { - datasetId: APIDatasetId; - sourceName: string; - newName: string; - transformations: CoordinateTransformation[]; -}; - type Props = { onAdded: ( datasetOrganization: string, uploadedDatasetName: string, - isRemoteDataset: boolean, needsConversion?: boolean | null | undefined, ) => Promise; + datastores: APIDataStore[]; }; export default function DatasetAddComposeView(props: Props) { @@ -59,8 +55,8 @@ export default function DatasetAddComposeView(props: Props) { const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); const [form] = Form.useForm(); const [fileList, setFileList] = useState([]); - const [matrix, setMatrix] = useState(null); const [selectedTeams, setSelectedTeams] = useState>([]); + const [linkedDatasets, setLinkedDatasets] = useState([]); const onRemoveLayer = (layer: LayerLink) => { const oldLayers = form.getFieldValue(["layers"]); @@ -133,16 +129,16 @@ export default function DatasetAddComposeView(props: Props) { ); for (const [node1, node2] of _.zip(nodes1, nodes2)) { - if (node1 == null) { - throw new Error("Empty trees found."); + if ((node1 == null) != (node2 == null)) { + throw new Error("A tree was empty while its corresponding tree wasn't."); } - sourcePoints.push(node1.position); - if (node2 == null) { - throw new Error("Empty trees found."); + if (node1 != null && node2 != null) { + sourcePoints.push(node1.position); + targetPoints.push(node2.position); } - targetPoints.push(node2.position); } const datasets = [dataset1, dataset2]; + setLinkedDatasets(datasets); const newMatrix = estimateAffineMatrix4x4(sourcePoints, targetPoints); const newLinks: LayerLink[] = ( _.flatMap(datasets, (dataset) => @@ -150,6 +146,12 @@ export default function DatasetAddComposeView(props: Props) { ) as [APIDataset, APIDataLayer][] ).map( ([dataset, dataLayer]): LayerLink => ({ + // todo: backend should expect datasetId + id: { + // todo: backend should expect owningOrganization, too + team: dataset.owningOrganization, + name: dataset.name, + }, datasetId: { owningOrganization: dataset.owningOrganization, name: dataset.name, @@ -172,27 +174,28 @@ export default function DatasetAddComposeView(props: Props) { } }; - // The following transform should be added to the layer which belongs to the first NML - // so that it is transformed to fit the layer of the second NML. - // const datasourceJSON = - // matrix != null - // ? ` - // "coordinateTransformations": [{ - // "type": "affine", - // "matrix": ${formatNestedMatrix(flatToNestedMatrix(matrix))} - // }] - // ` - // : ""; - - const handleSubmit = () => { + const handleSubmit = async () => { + if (activeUser == null) { + throw new Error("Cannot upload dataset without being logged in."); + } const layers = form.getFieldValue(["layers"]); - // @ts-ignore - const createDatasetComposition = (obj) => console.log(obj); - createDatasetComposition({ - newDatasetName: form.getFieldValue(["name"]), + + const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); + const datastoreToUse = uploadableDatastores[0]; + if (!datastoreToUse) { + Toast.error("Could not find datastore that allows uploading."); + return; + } + + const newDatasetName = form.getFieldValue(["name"]); + await createDatasetComposition(datastoreToUse.url, { + newDatasetName, + targetFolderId: form.getFieldValue(["targetFolderId"]), + organizationName: "sample_organization", + scale: linkedDatasets[1].dataSource.scale, layers, - // - scale }); + props.onAdded(activeUser.organization, newDatasetName, false); }; return ( @@ -334,6 +337,7 @@ function LinkedLayerForm({ form: FormInstance; datasetId: APIDatasetId; }) { + const activeUser = useSelector((state: OxalisState) => state.activeUser); const layers = Form.useWatch(["layers"]); React.useEffect(() => { diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index e06e81ca944..36a187d806c 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -48,7 +48,6 @@ type OwnProps = { onAdded: ( datasetOrganization: string, uploadedDatasetName: string, - isRemoteDataset: boolean, needsConversion?: boolean | null | undefined, ) => Promise; datastores: APIDataStore[]; @@ -228,7 +227,7 @@ function DatasetAddRemoteView(props: Props) { Toast.error(`The datasource config could not be stored. ${e}`); return; } - onAdded(activeUser.organization, configJSON.id.name, true); + onAdded(activeUser.organization, configJSON.id.name); } } diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index 84972fda14d..9ee0056787e 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -17,28 +17,36 @@ import DatasetAddComposeView from "./dataset_add_compose_view"; const { TabPane } = Tabs; const { Content, Sider } = Layout; -enum DatasetAddViewTabs { +// Used for the tab keys as well as for +// distinguishing between the add type after +// successful import. +enum DatasetAddType { UPLOAD = "upload", REMOTE = "remote", COMPOSE = "compose", } +const addTypeToVerb: Record = { + upload: "uploaded", + remote: "added", + compose: "created", +}; function DatasetAddView({ history }: RouteComponentProps) { const datastores = useFetch(getDatastores, [], []); const [datasetName, setDatasetName] = useState(""); const [organization, setOrganization] = useState(""); const [datasetNeedsConversion, setDatasetNeedsConversion] = useState(false); - const [isRemoteDataset, setIsRemoteDataset] = useState(false); + const [datasetAddType, setImportType] = useState(DatasetAddType.UPLOAD); const handleDatasetAdded = async ( + datasetAddType: DatasetAddType, datasetOrganization: string, uploadedDatasetName: string, - isRemoteDataset: boolean, needsConversion: boolean | null | undefined, ): Promise => { setOrganization(datasetOrganization); setDatasetName(uploadedDatasetName); - setIsRemoteDataset(isRemoteDataset); + setImportType(datasetAddType); // @ts-expect-error ts-migrate(2345) FIXME: Argument of type 'boolean | null | undefined' is n... Remove this comment to see the full error message setDatasetNeedsConversion(needsConversion); }; @@ -58,7 +66,7 @@ function DatasetAddView({ history }: RouteComponentProps) { textAlign: "center", }} > - The dataset was {isRemoteDataset ? "imported" : "uploaded"} successfully + The dataset was {addTypeToVerb[datasetAddType]} successfully {datasetNeedsConversion ? " and a conversion job was started." : null}.
@@ -122,9 +130,12 @@ function DatasetAddView({ history }: RouteComponentProps) { Upload Dataset } - key={DatasetAddViewTabs.UPLOAD} + key={DatasetAddType.UPLOAD} > - + } - key={DatasetAddViewTabs.REMOTE} + key={DatasetAddType.REMOTE} > - + } - key={DatasetAddViewTabs.COMPOSE} + key={DatasetAddType.COMPOSE} > - + diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index e158aadeb7d..2d884213327 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -60,7 +60,7 @@ const logRetryToAnalytics = _.throttle((datasetName: string) => { type OwnProps = { datastores: Array; withoutCard?: boolean; - onUploaded: (arg0: string, arg1: string, arg2: boolean, arg3: boolean) => Promise | void; + onUploaded: (arg0: string, arg1: string, arg2: boolean) => Promise | void; }; type StateProps = { activeUser: APIUser | null | undefined; @@ -346,7 +346,6 @@ class DatasetUploadView extends React.Component { this.props.onUploaded( activeUser.organization, formValues.name, - false, this.state.needsConversion, ); } diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 8b9775bc0ed..92e5c7c1a57 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -84,6 +84,14 @@ export type APISegmentationLayer = APIDataLayerBase & { readonly tracingId?: string; }; export type APIDataLayer = APIColorLayer | APISegmentationLayer; + +export type LayerLink = { + datasetId: APIDatasetId; + sourceName: string; + newName: string; + transformations: CoordinateTransformation[]; +}; + export type APIHistogramData = HistogramDatum[]; export type HistogramDatum = { numberOfElements: number; From 75471b271ace988ebdfbb9ad00838a2439a2faee Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 24 Nov 2023 14:56:12 +0100 Subject: [PATCH 08/46] temporarily disable most ci checks --- .circleci/not-on-master.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.circleci/not-on-master.sh b/.circleci/not-on-master.sh index 581393ebead..e3078cdb9ce 100755 --- a/.circleci/not-on-master.sh +++ b/.circleci/not-on-master.sh @@ -1,8 +1,8 @@ #!/usr/bin/env bash set -Eeuo pipefail -if [ "${CIRCLE_BRANCH}" == "master" ]; then +# if [ "${CIRCLE_BRANCH}" == "master" ]; then echo "Skipping this step on master..." -else - exec "$@" -fi +# else +# exec "$@" +# fi From 47e4508f789b7feb04f519d5f6275e023d3632d7 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Fri, 24 Nov 2023 15:03:36 +0100 Subject: [PATCH 09/46] improve loading state --- .../dataset/dataset_add_compose_view.tsx | 23 +++++++++++++------ 1 file changed, 16 insertions(+), 7 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 5a1b65205a9..143586c201c 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -51,6 +51,7 @@ type Props = { export default function DatasetAddComposeView(props: Props) { const formRef = React.useRef>(null); + const [isLoading, setIsLoading] = useState(false); const activeUser = useSelector((state: OxalisState) => state.activeUser); const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); const [form] = Form.useForm(); @@ -188,13 +189,20 @@ export default function DatasetAddComposeView(props: Props) { } const newDatasetName = form.getFieldValue(["name"]); - await createDatasetComposition(datastoreToUse.url, { - newDatasetName, - targetFolderId: form.getFieldValue(["targetFolderId"]), - organizationName: "sample_organization", - scale: linkedDatasets[1].dataSource.scale, - layers, - }); + setIsLoading(true); + try { + await createDatasetComposition(datastoreToUse.url, { + newDatasetName, + targetFolderId: form.getFieldValue(["targetFolderId"]), + organizationName: "sample_organization", + scale: linkedDatasets[1].dataSource.scale, + layers, + }); + await Utils.sleep(3000); // wait for 3 seconds so the server can catch up / do its thing + } finally { + setIsLoading(false); + } + props.onAdded(activeUser.organization, newDatasetName, false); }; @@ -311,6 +319,7 @@ export default function DatasetAddComposeView(props: Props) { size="large" type="primary" htmlType="submit" + loading={isLoading} style={{ width: "100%", }} From 49cd2a65a97d0bed14d1a13cbdcdcc14bcadb3f9 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 27 Nov 2023 09:31:06 +0100 Subject: [PATCH 10/46] Do not use datasource id for compose API --- .../controllers/DataSourceController.scala | 7 +++++-- .../datastore/services/ComposeService.scala | 15 +++++++++++---- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 4eb09b534a0..5cead95444c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -537,8 +537,11 @@ Expects: val userToken = urlOrHeaderToken(token, request) accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationName), token) { for { - _ <- Fox.serialCombined(request.body.layers.map(_.id).toList)(id => - accessTokenService.assertUserAccess(UserAccessRequest.readDataSources(id), userToken)) + _ <- Fox.serialCombined(request.body.layers.map(_.id).toList)( + id => + accessTokenService.assertUserAccess( + UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), + userToken)) _ <- composeService.composeDataset(request.body, userToken) } yield Ok } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala index 971d05f60bc..e2c385c17c3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -38,7 +38,7 @@ object ComposeRequest { implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest] } case class ComposeLayer( - id: DataSourceId, + id: DataLayerId, sourceName: String, newName: String, transformations: Seq[CoordinateTransformation] @@ -48,6 +48,12 @@ object ComposeLayer { implicit val composeLayerFormat: OFormat[ComposeLayer] = Json.format[ComposeLayer] } +case class DataLayerId(name: String, owningOrganization: String) + +object DataLayerId { + implicit val dataLayerIdFormat: OFormat[DataLayerId] = Json.format[DataLayerId] +} + class SymlinkHelper(dataSourceService: DataSourceService)(implicit ec: ExecutionContext) extends FoxImplicits { val dataBaseDir: Path = dataSourceService.dataBaseDir @@ -100,7 +106,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def getLayerFromComposeLayer(composeLayer: ComposeLayer, uploadDir: Path): Fox[DataLayer] = for { - dataSource <- Fox.option2Fox(dataSourceRepository.find(composeLayer.id)) + dataSourceId <- Fox.successful(DataSourceId(composeLayer.id.name, composeLayer.id.owningOrganization)) + dataSource <- Fox.option2Fox(dataSourceRepository.find(dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => @@ -108,11 +115,11 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, case Some(c) => Some(c ++ composeLayer.transformations.toList) case None => Some(composeLayer.transformations.toList) } - linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.id.team, + linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.id.owningOrganization, composeLayer.id.name, composeLayer.sourceName, Some(composeLayer.newName)) - layerIsRemote = isLayerRemote(composeLayer.id, composeLayer.sourceName) + layerIsRemote = isLayerRemote(dataSourceId, composeLayer.sourceName) _ <- Fox.runIf(!layerIsRemote)(addSymlinksToOtherDatasetLayers(uploadDir, List(linkedLayerIdentifier))) editedLayer: DataLayer = layer match { case l: PrecomputedDataLayer => From bc2d2780135e963a7b6bfa44bde9938f10bf09df Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 27 Nov 2023 11:03:21 +0100 Subject: [PATCH 11/46] Refresh inbox after composing dataset --- .../webknossos/datastore/controllers/DataSourceController.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 5cead95444c..aa03d9aa9f3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -543,6 +543,7 @@ Expects: UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), userToken)) _ <- composeService.composeDataset(request.body, userToken) + _ <- dataSourceService.checkInbox(verbose = false) } yield Ok } } From 034c1b48b22fe9187aa09af6e1f5e5ac7a951901 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 27 Nov 2023 11:53:37 +0100 Subject: [PATCH 12/46] Rename id to datasetId --- .../datastore/controllers/DataSourceController.scala | 2 +- .../webknossos/datastore/services/ComposeService.scala | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index aa03d9aa9f3..7f019321719 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -537,7 +537,7 @@ Expects: val userToken = urlOrHeaderToken(token, request) accessTokenService.validateAccess(UserAccessRequest.administrateDataSources(request.body.organizationName), token) { for { - _ <- Fox.serialCombined(request.body.layers.map(_.id).toList)( + _ <- Fox.serialCombined(request.body.layers.map(_.datasetId).toList)( id => accessTokenService.assertUserAccess( UserAccessRequest.readDataSources(DataSourceId(id.name, id.owningOrganization)), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala index e2c385c17c3..783c11cfdc3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ComposeService.scala @@ -38,7 +38,7 @@ object ComposeRequest { implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest] } case class ComposeLayer( - id: DataLayerId, + datasetId: DataLayerId, sourceName: String, newName: String, transformations: Seq[CoordinateTransformation] @@ -106,7 +106,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, private def getLayerFromComposeLayer(composeLayer: ComposeLayer, uploadDir: Path): Fox[DataLayer] = for { - dataSourceId <- Fox.successful(DataSourceId(composeLayer.id.name, composeLayer.id.owningOrganization)) + dataSourceId <- Fox.successful( + DataSourceId(composeLayer.datasetId.name, composeLayer.datasetId.owningOrganization)) dataSource <- Fox.option2Fox(dataSourceRepository.find(dataSourceId)) ds <- Fox.option2Fox(dataSource.toUsable) layer <- Fox.option2Fox(ds.dataLayers.find(_.name == composeLayer.sourceName)) @@ -115,8 +116,8 @@ class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, case Some(c) => Some(c ++ composeLayer.transformations.toList) case None => Some(composeLayer.transformations.toList) } - linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.id.owningOrganization, - composeLayer.id.name, + linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.datasetId.owningOrganization, + composeLayer.datasetId.name, composeLayer.sourceName, Some(composeLayer.newName)) layerIsRemote = isLayerRemote(dataSourceId, composeLayer.sourceName) From a47e04a2858efdc51d42bb4590a23e46ca55699e Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Mon, 27 Nov 2023 11:58:26 +0100 Subject: [PATCH 13/46] remove sleep and id workaround; also improve formatting of jumping-to-position-toast --- .../javascripts/admin/dataset/dataset_add_compose_view.tsx | 7 ------- .../oxalis/view/left-border-tabs/layer_settings_tab.tsx | 4 +++- 2 files changed, 3 insertions(+), 8 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 143586c201c..39f6d0e544f 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -147,12 +147,6 @@ export default function DatasetAddComposeView(props: Props) { ) as [APIDataset, APIDataLayer][] ).map( ([dataset, dataLayer]): LayerLink => ({ - // todo: backend should expect datasetId - id: { - // todo: backend should expect owningOrganization, too - team: dataset.owningOrganization, - name: dataset.name, - }, datasetId: { owningOrganization: dataset.owningOrganization, name: dataset.name, @@ -198,7 +192,6 @@ export default function DatasetAddComposeView(props: Props) { scale: linkedDatasets[1].dataSource.scale, layers, }); - await Utils.sleep(3000); // wait for 3 seconds so the server can catch up / do its thing } finally { setIsLoading(false); } diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx index 050ec7f4ab7..c19c5ecbf49 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/layer_settings_tab.tsx @@ -978,7 +978,9 @@ class DatasetSettings extends React.PureComponent { this.props.onSetPosition(foundPosition); const zoomValue = this.props.onZoomToResolution(layerName, foundResolution); Toast.success( - `Jumping to position ${foundPosition.join(", ")} and zooming to ${zoomValue.toFixed(2)}`, + `Jumping to position ${foundPosition + .map((el) => Math.floor(el)) + .join(", ")} and zooming to ${zoomValue.toFixed(2)}`, ); }; From 92c3d6bbf82984c25e6c70a393e328cf9b978688 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Tue, 28 Nov 2023 14:18:14 +0100 Subject: [PATCH 14/46] better error handling and don't crash if no transformation is necessary (due to empty NMLs) --- .../dataset/dataset_add_compose_view.tsx | 184 +++++++++--------- 1 file changed, 94 insertions(+), 90 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 39f6d0e544f..07b30466e9c 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -61,111 +61,115 @@ export default function DatasetAddComposeView(props: Props) { const onRemoveLayer = (layer: LayerLink) => { const oldLayers = form.getFieldValue(["layers"]); - const newLayers = oldLayers.filter( - (existingLayer: LayerLink) => - existingLayer.datasetId.owningOrganization !== layer.datasetId.owningOrganization || - existingLayer.datasetId.name !== layer.datasetId.name || - existingLayer.sourceName !== layer.sourceName, - ); + const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); form.setFieldsValue({ layers: newLayers }); }; const handleChange = async (info: UploadChangeParam>) => { - const newFileList = info.fileList; - setFileList(newFileList); - - const sourcePoints = []; - const targetPoints = []; - if (newFileList.length === 1 && newFileList[0]?.originFileObj) { - const csv = await readFileAsText(newFileList[0]?.originFileObj); - console.log("csv", csv); - const lines = csv.split("\n"); - for (const line of lines) { - const fields = line.split(","); - const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; - - const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; - const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; - sourcePoints.push(source); - targetPoints.push(target); - } - } + try { + const newFileList = info.fileList; + setFileList(newFileList); - if (newFileList.length === 2) { - const nmlString1 = await readFileAsText(newFileList[0]?.originFileObj!); - const nmlString2 = await readFileAsText(newFileList[1]?.originFileObj!); + const sourcePoints = []; + const targetPoints = []; + if (newFileList.length === 1 && newFileList[0]?.originFileObj) { + const csv = await readFileAsText(newFileList[0]?.originFileObj); + console.log("csv", csv); + const lines = csv.split("\n"); + for (const line of lines) { + const fields = line.split(","); + const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; - if (nmlString1 === "" || nmlString2 === "") { - throw new Error("NML files are empty."); + const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + sourcePoints.push(source); + targetPoints.push(target); + } } - const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); - const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + if (newFileList.length === 2) { + const nmlString1 = await readFileAsText(newFileList[0]?.originFileObj!); + const nmlString2 = await readFileAsText(newFileList[1]?.originFileObj!); - if (!datasetName1 || !datasetName2) { - throw new Error("Could not extract dataset names."); - } + if (nmlString1 === "" || nmlString2 === "") { + throw new Error("NML files are empty."); + } - const [dataset1, dataset2] = await Promise.all([ - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName1, - }), - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName2, - }), - ]); - console.log("dataset1, dataset2", dataset1, dataset2); - - const nodes1 = Array.from( - values(trees1) - .map((tree) => Array.from(tree.nodes.values())[0]) - .values(), - ); - const nodes2 = Array.from( - values(trees2) - .map((tree) => Array.from(tree.nodes.values())[0]) - .values(), - ); + const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); - for (const [node1, node2] of _.zip(nodes1, nodes2)) { - if ((node1 == null) != (node2 == null)) { - throw new Error("A tree was empty while its corresponding tree wasn't."); + if (!datasetName1 || !datasetName2) { + throw new Error("Could not extract dataset names."); } - if (node1 != null && node2 != null) { - sourcePoints.push(node1.position); - targetPoints.push(node2.position); + + const [dataset1, dataset2] = await Promise.all([ + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName1, + }), + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName2, + }), + ]); + console.log("dataset1, dataset2", dataset1, dataset2); + + const nodes1 = Array.from( + values(trees1) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + const nodes2 = Array.from( + values(trees2) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + + for (const [node1, node2] of _.zip(nodes1, nodes2)) { + if ((node1 == null) != (node2 == null)) { + throw new Error("A tree was empty while its corresponding tree wasn't."); + } + if (node1 != null && node2 != null) { + sourcePoints.push(node1.position); + targetPoints.push(node2.position); + } } + const datasets = [dataset1, dataset2]; + setLinkedDatasets(datasets); + + const transformationArr = + sourcePoints.length > 0 && targetPoints.length > 0 + ? [ + { + type: "affine" as const, + matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), + }, + ] + : []; + console.log("transformationArr", transformationArr); + const newLinks: LayerLink[] = ( + _.flatMap(datasets, (dataset) => + dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), + ) as [APIDataset, APIDataLayer][] + ).map( + ([dataset, dataLayer]): LayerLink => ({ + datasetId: { + owningOrganization: dataset.owningOrganization, + name: dataset.name, + }, + sourceName: dataLayer.name, + newName: dataLayer.name, + transformations: dataset === datasets[0] ? transformationArr : [], + }), + ); + + form.setFieldsValue({ layers: newLinks }); } - const datasets = [dataset1, dataset2]; - setLinkedDatasets(datasets); - const newMatrix = estimateAffineMatrix4x4(sourcePoints, targetPoints); - const newLinks: LayerLink[] = ( - _.flatMap(datasets, (dataset) => - dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), - ) as [APIDataset, APIDataLayer][] - ).map( - ([dataset, dataLayer]): LayerLink => ({ - datasetId: { - owningOrganization: dataset.owningOrganization, - name: dataset.name, - }, - sourceName: dataLayer.name, - newName: dataLayer.name, - transformations: - dataset === datasets[0] - ? [ - { - type: "affine", - matrix: flatToNestedMatrix(newMatrix), - }, - ] - : [], - }), + } catch (exception) { + Toast.error( + "An error occurred while importing the uploaded files. See the Browser's console for more feedback.", ); - - form.setFieldsValue({ layers: newLinks }); + console.error(exception); } }; From 94fff831be388e74622254bcd53a16fb8876a996 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 14 Dec 2023 16:24:07 +0100 Subject: [PATCH 15/46] implement wizard for dataset composition --- .../dataset/dataset_add_compose_view.tsx | 654 +++++++++++++----- 1 file changed, 477 insertions(+), 177 deletions(-) diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 07b30466e9c..5e3a08d6c6e 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -5,10 +5,23 @@ import { DatasetNameFormItem, layerNameRules, } from "admin/dataset/dataset_components"; -import { Button, Col, Collapse, Form, FormInstance, Input, List, Radio, Row, Tooltip } from "antd"; +import { + Button, + Col, + Collapse, + Form, + FormInstance, + Input, + List, + Radio, + RadioChangeEvent, + Row, + Space, + Tooltip, +} from "antd"; import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload"; import { Vector3 } from "oxalis/constants"; -import React, { useState } from "react"; +import React, { useEffect, useState } from "react"; import { readFileAsText } from "libs/read_file"; import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; @@ -32,8 +45,10 @@ import { LayerLink, } from "types/api_flow_types"; import { syncValidator } from "types/validation"; -import { createDatasetComposition, getDataset } from "admin/admin_rest_api"; +import { createDatasetComposition, getDataset, getDatasets } from "admin/admin_rest_api"; import Toast from "libs/toast"; +import AsyncSelect from "components/async_select"; +import { AsyncButton } from "components/async_clickables"; const FormItem = Form.Item; @@ -47,37 +62,158 @@ type Props = { ) => Promise; datastores: APIDataStore[]; }; +const EXPECTED_VALUE_COUNT_PER_LINE = 8; -export default function DatasetAddComposeView(props: Props) { - const formRef = React.useRef>(null); +// Usage of AsyncSelect +interface DatasetValue { + label: string; + value: string; +} - const [isLoading, setIsLoading] = useState(false); - const activeUser = useSelector((state: OxalisState) => state.activeUser); - const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); - const [form] = Form.useForm(); - const [fileList, setFileList] = useState([]); - const [selectedTeams, setSelectedTeams] = useState>([]); - const [linkedDatasets, setLinkedDatasets] = useState([]); +async function fetchDatasets(query: string): Promise { + const datasets = await getDatasets(false, null, query, null, 20); - const onRemoveLayer = (layer: LayerLink) => { - const oldLayers = form.getFieldValue(["layers"]); - const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); - form.setFieldsValue({ layers: newLayers }); + return datasets.map((d) => ({ + label: d.name, + value: d.name, + })); +} + +const DatasetSelect = ({ + datasetValues, + setDatasetValues, +}: { + datasetValues: DatasetValue[]; + setDatasetValues: (values: DatasetValue[]) => void; +}) => { + return ( + { + setDatasetValues(newValue as DatasetValue[]); + console.log("set value to", newValue); + }} + style={{ width: "100%" }} + /> + ); +}; + +const WIZARD_STEPS = [ + { + title: "Import type", + component: ImportTypeQuestion, + }, + { + title: "Upload file(s)", + component: UploadFiles, + }, + { + title: "Select Datasets", + component: SelectDatasets, + }, + { + title: "Configure New Datasets", + component: CompositionForm, + }, +]; + +type COMPOSE_MODE = "WITHOUT_TRANSFORMS" | "WK_ANNOTATIONS" | "BIG_WARP"; +type WizardContext = { + currentWizardStep: number; + fileList: FileList; + composeMode: COMPOSE_MODE; + datasets: APIDataset[]; + sourcePoints: Vector3[]; + targetPoints: Vector3[]; +}; + +type WizardComponentProps = { + wizardContext: WizardContext; + setWizardContext: React.Dispatch>; + onNext: () => void; + onPrev: (() => void) | null; + datastores: APIDataStore[]; + onAdded: Props["onAdded"]; +}; + +function ImportTypeQuestion({ wizardContext, setWizardContext }: WizardComponentProps) { + const { composeMode } = wizardContext; + + const onNext = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: composeMode === "WITHOUT_TRANSFORMS" ? 2 : 1, + })); }; + const onChange = (e: RadioChangeEvent) => { + console.log("radio checked", e.target.value); + setWizardContext((oldContext) => ({ + ...oldContext, + composeMode: e.target.value, + })); + }; + + return ( +
+
+

Select how you want to create a new dataset:

+ + + Combine datasets without any transforms + Combine datasets by using skeleton annotations + Combine datasets by using a BigWarp CSV + + +
+ +
+ ); +} +function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) { + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const fileList = wizardContext.fileList; const handleChange = async (info: UploadChangeParam>) => { + setWizardContext((oldContext) => ({ + ...oldContext, + fileList: info.fileList, + })); + }; + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: 0, + })); + }; + const onNext = async () => { try { - const newFileList = info.fileList; - setFileList(newFileList); + const sourcePoints: Vector3[] = []; + const targetPoints: Vector3[] = []; + if (wizardContext.composeMode === "BIG_WARP") { + if (fileList.length != 1 || fileList[0]?.originFileObj == null) { + Toast.error("Expected exactly one CSV file."); + return; + } - const sourcePoints = []; - const targetPoints = []; - if (newFileList.length === 1 && newFileList[0]?.originFileObj) { - const csv = await readFileAsText(newFileList[0]?.originFileObj); + const csv = await readFileAsText(fileList[0]?.originFileObj); console.log("csv", csv); const lines = csv.split("\n"); for (const line of lines) { const fields = line.split(","); + if (fields.length != EXPECTED_VALUE_COUNT_PER_LINE) { + if (line.trim() != "") { + throw new Error( + `Cannot interpret line in CSV file. Expected ${EXPECTED_VALUE_COUNT_PER_LINE} values, got ${fields.length}.`, + ); + } + continue; + } const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; @@ -85,13 +221,25 @@ export default function DatasetAddComposeView(props: Props) { sourcePoints.push(source); targetPoints.push(target); } - } - if (newFileList.length === 2) { - const nmlString1 = await readFileAsText(newFileList[0]?.originFileObj!); - const nmlString2 = await readFileAsText(newFileList[1]?.originFileObj!); + setWizardContext((oldContext) => ({ + ...oldContext, + sourcePoints, + targetPoints, + datasets: [], + currentWizardStep: 2, + })); + } else if (wizardContext.composeMode == "WK_ANNOTATIONS") { + if (fileList.length != 2) { + Toast.error("Expected exactly two NML files."); + return; + } + + const nmlString1 = await readFileAsText(fileList[0]?.originFileObj!); + const nmlString2 = await readFileAsText(fileList[1]?.originFileObj!); if (nmlString1 === "" || nmlString2 === "") { + // todop unify error handling throw new Error("NML files are empty."); } @@ -102,18 +250,6 @@ export default function DatasetAddComposeView(props: Props) { throw new Error("Could not extract dataset names."); } - const [dataset1, dataset2] = await Promise.all([ - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName1, - }), - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName2, - }), - ]); - console.log("dataset1, dataset2", dataset1, dataset2); - const nodes1 = Array.from( values(trees1) .map((tree) => Array.from(tree.nodes.values())[0]) @@ -134,36 +270,33 @@ export default function DatasetAddComposeView(props: Props) { targetPoints.push(node2.position); } } - const datasets = [dataset1, dataset2]; - setLinkedDatasets(datasets); - - const transformationArr = - sourcePoints.length > 0 && targetPoints.length > 0 - ? [ - { - type: "affine" as const, - matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), - }, - ] - : []; - console.log("transformationArr", transformationArr); - const newLinks: LayerLink[] = ( - _.flatMap(datasets, (dataset) => - dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), - ) as [APIDataset, APIDataLayer][] - ).map( - ([dataset, dataLayer]): LayerLink => ({ - datasetId: { - owningOrganization: dataset.owningOrganization, - name: dataset.name, - }, - sourceName: dataLayer.name, - newName: dataLayer.name, - transformations: dataset === datasets[0] ? transformationArr : [], - }), - ); - form.setFieldsValue({ layers: newLinks }); + const datasets: APIDataset[] = []; + try { + const [dataset1, dataset2] = await Promise.all([ + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName1, + }), + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetName2, + }), + ]); + datasets.push(dataset1); + datasets.push(dataset2); + } catch (exception) { + console.warn(exception); + Toast.warning("Could not derive datasets from NML. Please specify these manally."); + } + + setWizardContext((oldContext) => ({ + ...oldContext, + datasets, + sourcePoints, + targetPoints, + currentWizardStep: 2, + })); } } catch (exception) { Toast.error( @@ -173,6 +306,161 @@ export default function DatasetAddComposeView(props: Props) { } }; + return ( +
+
+

+ Landmark files ({wizardContext.composeMode === "BIG_WARP" ? "1 CSV file" : "2 NML files"} + ): +

+ false} + maxCount={2} + multiple + > +

+ +

+

Drag your landmark files to this area

+

...

+
+
+ + + + + Next + +
+ ); +} + +function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const [datasetValues, setDatasetValues] = useState([]); + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: wizardContext.composeMode === "WITHOUT_TRANSFORMS" ? 0 : 1, + })); + }; + const onNext = async () => { + const datasets: APIDataset[] = []; + try { + const [dataset1, dataset2] = await Promise.all([ + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetValues[0].value, + }), + getDataset({ + owningOrganization: activeUser?.organization || "", + name: datasetValues[1].value, + }), + ]); + datasets.push(dataset1); + datasets.push(dataset2); + } catch (exception) { + console.warn(exception); + Toast.warning("Could not derive datasets from NML. Please specify these manally."); + return; + } + + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: 3, + datasets, + })); + }; + + useEffect(() => { + setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); + }, []); + + return ( +
+ + + + + + Next + +
+ ); +} + +export function CompositionForm(props: WizardComponentProps) { + const formRef = React.useRef>(null); + + const onPrev = () => { + props.setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: 2, + })); + }; + + const [isLoading, setIsLoading] = useState(false); + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); + const [form] = Form.useForm(); + const [selectedTeams, setSelectedTeams] = useState>([]); + + const { wizardContext } = props; + const linkedDatasets = wizardContext.datasets; + + const onRemoveLayer = (layer: LayerLink) => { + const oldLayers = form.getFieldValue(["layers"]); + const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); + form.setFieldsValue({ layers: newLayers }); + }; + + const handleTransformImport = async (sourcePoints: Vector3[], targetPoints: Vector3[]) => { + const datasets = linkedDatasets; + const transformationArr = + sourcePoints.length > 0 && targetPoints.length > 0 + ? [ + { + type: "affine" as const, + matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), + }, + ] + : []; + + const newLinks: LayerLink[] = ( + _.flatMap(datasets, (dataset) => + dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), + ) as [APIDataset, APIDataLayer][] + ).map( + ([dataset, dataLayer]): LayerLink => ({ + datasetId: { + owningOrganization: dataset.owningOrganization, + name: dataset.name, + }, + sourceName: dataLayer.name, + newName: dataLayer.name, + transformations: dataset === datasets[0] ? transformationArr : [], + }), + ); + form.setFieldsValue({ layers: newLinks }); + }; + + useEffect(() => { + handleTransformImport(wizardContext.sourcePoints, wizardContext.targetPoints); + }, []); + const handleSubmit = async () => { if (activeUser == null) { throw new Error("Cannot upload dataset without being logged in."); @@ -205,6 +493,120 @@ export default function DatasetAddComposeView(props: Props) { return ( // Using Forms here only to validate fields and for easy layout +
+ + + + + + + setSelectedTeams(selectedTeams)} + formRef={formRef} + /> + + + + + + + + prevValues.layers !== curValues.layers}> + {({ getFieldValue }) => { + const layers = getFieldValue("layers") || []; + return ( + + Layers +
+ } + > + {layers.map((layer: LayerLink, idx: number) => ( + // the layer name may change in this view, the order does not, so idx is the right key choice here + + + + ))} + + ); + }} + + + + + + + +
+ ); +} + +export default function DatasetAddComposeView(props: Props) { + const [wizardContext, setWizardContext] = useState({ + currentWizardStep: 0, + fileList: [], + composeMode: "WITHOUT_TRANSFORMS", + datasets: [], + sourcePoints: [], + targetPoints: [], + }); + const { currentWizardStep } = wizardContext; + const CurrentWizardComponent = WIZARD_STEPS[currentWizardStep].component; + const onNext = () => { + setWizardContext(({ currentWizardStep, ...rest }) => ({ + ...rest, + currentWizardStep: Math.min(currentWizardStep + 1, WIZARD_STEPS.length - 1), + })); + }; + const onPrev = + wizardContext.currentWizardStep > 0 + ? () => { + setWizardContext(({ currentWizardStep, ...rest }) => ({ + ...rest, + currentWizardStep: currentWizardStep - 1, + })); + } + : null; + return (

@@ -215,116 +617,14 @@ export default function DatasetAddComposeView(props: Props) { find the datasets that are referenced in these files and will create transformations using these landmarks.

- -
- Landmark files (NML pairs or CSV)} - hasFeedback - > - false} - maxCount={2} - multiple - > -

- -

-

Drag your landmark files to this area

-

...

-
-
- - - - - - - setSelectedTeams(selectedTeams)} - formRef={formRef} - /> - - - - - - - - prevValues.layers !== curValues.layers} - > - {({ getFieldValue }) => { - const layers = getFieldValue("layers") || []; - return ( - - Layers -
- } - > - {layers.map((layer: LayerLink, idx: number) => ( - // the layer name may change in this view, the order does not, so idx is the right key choice here - - - - ))} - - ); - }} - - - - - - + ); From 3cbc54ead9805ad878031536e3b4b11704344b4d Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 14 Dec 2023 16:44:05 +0100 Subject: [PATCH 16/46] clean up a bit --- frontend/javascripts/admin/admin_rest_api.ts | 2 + .../dataset/dataset_add_compose_view.tsx | 94 +++++++++---------- .../admin/dataset/dataset_upload_view.tsx | 1 - 3 files changed, 47 insertions(+), 50 deletions(-) diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 3daaf757bd3..6cd8323cd5e 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -1430,10 +1430,12 @@ export async function getActiveDatasetsOfMyOrganization(): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; return Request.receiveJSON( `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}${sharingTokenSuffix}`, + options, ); } diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 5e3a08d6c6e..513511b85eb 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -8,7 +8,6 @@ import { import { Button, Col, - Collapse, Form, FormInstance, Input, @@ -27,8 +26,7 @@ import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; import { parseNml } from "oxalis/model/helpers/nml_helpers"; import { values } from "libs/utils"; import _ from "lodash"; -import { flatToNestedMatrix, formatNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; -import { Matrix4x4 } from "libs/mjs"; +import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; import { FormItemWithInfo } from "dashboard/dataset/helper_components"; import messages from "messages"; import FolderSelection from "dashboard/folders/folder_selection"; @@ -40,7 +38,6 @@ import { APIDataLayer, APIDatasetId, APITeam, - CoordinateTransformation, APIDataStore, LayerLink, } from "types/api_flow_types"; @@ -49,6 +46,7 @@ import { createDatasetComposition, getDataset, getDatasets } from "admin/admin_r import Toast from "libs/toast"; import AsyncSelect from "components/async_select"; import { AsyncButton } from "components/async_clickables"; +import { Store } from "oxalis/singletons"; const FormItem = Form.Item; @@ -176,7 +174,6 @@ function ImportTypeQuestion({ wizardContext, setWizardContext }: WizardComponent } function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) { - const activeUser = useSelector((state: OxalisState) => state.activeUser); const fileList = wizardContext.fileList; const handleChange = async (info: UploadChangeParam>) => { setWizardContext((oldContext) => ({ @@ -196,7 +193,7 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) const sourcePoints: Vector3[] = []; const targetPoints: Vector3[] = []; if (wizardContext.composeMode === "BIG_WARP") { - if (fileList.length != 1 || fileList[0]?.originFileObj == null) { + if (fileList.length !== 1 || fileList[0]?.originFileObj == null) { Toast.error("Expected exactly one CSV file."); return; } @@ -206,8 +203,8 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) const lines = csv.split("\n"); for (const line of lines) { const fields = line.split(","); - if (fields.length != EXPECTED_VALUE_COUNT_PER_LINE) { - if (line.trim() != "") { + if (fields.length !== EXPECTED_VALUE_COUNT_PER_LINE) { + if (line.trim() !== "") { throw new Error( `Cannot interpret line in CSV file. Expected ${EXPECTED_VALUE_COUNT_PER_LINE} values, got ${fields.length}.`, ); @@ -229,8 +226,8 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) datasets: [], currentWizardStep: 2, })); - } else if (wizardContext.composeMode == "WK_ANNOTATIONS") { - if (fileList.length != 2) { + } else if (wizardContext.composeMode === "WK_ANNOTATIONS") { + if (fileList.length !== 2) { Toast.error("Expected exactly two NML files."); return; } @@ -262,7 +259,7 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) ); for (const [node1, node2] of _.zip(nodes1, nodes2)) { - if ((node1 == null) != (node2 == null)) { + if ((node1 == null) !== (node2 == null)) { throw new Error("A tree was empty while its corresponding tree wasn't."); } if (node1 != null && node2 != null) { @@ -271,28 +268,11 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) } } - const datasets: APIDataset[] = []; - try { - const [dataset1, dataset2] = await Promise.all([ - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName1, - }), - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetName2, - }), - ]); - datasets.push(dataset1); - datasets.push(dataset2); - } catch (exception) { - console.warn(exception); - Toast.warning("Could not derive datasets from NML. Please specify these manally."); - } + const datasets = await tryToFetchDatasetsByName(datasetName1, datasetName2); setWizardContext((oldContext) => ({ ...oldContext, - datasets, + datasets: datasets || [], sourcePoints, targetPoints, currentWizardStep: 2, @@ -345,8 +325,39 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) ); } +async function tryToFetchDatasetsByName( + name1: string, + name2: string, +): Promise { + const { activeUser } = Store.getState(); + try { + const [dataset1, dataset2] = await Promise.all([ + getDataset( + { + owningOrganization: activeUser?.organization || "", + name: name1, + }, + null, + { showErrorToast: false }, + ), + getDataset( + { + owningOrganization: activeUser?.organization || "", + name: name2, + }, + null, + { showErrorToast: false }, + ), + ]); + return [dataset1, dataset2]; + } catch (exception) { + console.warn(exception); + Toast.warning("Could not derive datasets from NML. Please specify these manally."); + return null; + } +} + function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { - const activeUser = useSelector((state: OxalisState) => state.activeUser); const [datasetValues, setDatasetValues] = useState([]); const onPrev = () => { @@ -356,23 +367,9 @@ function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProp })); }; const onNext = async () => { - const datasets: APIDataset[] = []; - try { - const [dataset1, dataset2] = await Promise.all([ - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetValues[0].value, - }), - getDataset({ - owningOrganization: activeUser?.organization || "", - name: datasetValues[1].value, - }), - ]); - datasets.push(dataset1); - datasets.push(dataset2); - } catch (exception) { - console.warn(exception); - Toast.warning("Could not derive datasets from NML. Please specify these manally."); + const datasets = await tryToFetchDatasetsByName(datasetValues[0].value, datasetValues[1].value); + if (datasets == null) { + // An error message was already shown in tryToFetchDatasetsByName return; } @@ -643,7 +640,6 @@ function LinkedLayerForm({ form: FormInstance; datasetId: APIDatasetId; }) { - const activeUser = useSelector((state: OxalisState) => state.activeUser); const layers = Form.useWatch(["layers"]); React.useEffect(() => { diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 64ae9c4c6e6..64a94fa4afc 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -40,7 +40,6 @@ import { DatastoreFormItem, } from "admin/dataset/dataset_components"; import { Vector3Input } from "libs/vector_input"; -import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; import features from "features"; import { syncValidator } from "types/validation"; import { FormInstance } from "antd/lib/form"; From 911ebff2eeb5eccc806ea2ccaf1087d2c8da6377 Mon Sep 17 00:00:00 2001 From: Philipp Otto Date: Thu, 14 Dec 2023 16:50:30 +0100 Subject: [PATCH 17/46] refactor dataset selection component into own module --- .../dataset/dataset_add_compose_view.tsx | 57 ++++--------------- .../dataset/dataset_selection_component.tsx | 40 +++++++++++++ 2 files changed, 52 insertions(+), 45 deletions(-) create mode 100644 frontend/javascripts/dashboard/dataset/dataset_selection_component.tsx diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index 513511b85eb..f1fcaf88c79 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -42,11 +42,13 @@ import { LayerLink, } from "types/api_flow_types"; import { syncValidator } from "types/validation"; -import { createDatasetComposition, getDataset, getDatasets } from "admin/admin_rest_api"; +import { createDatasetComposition, getDataset } from "admin/admin_rest_api"; import Toast from "libs/toast"; -import AsyncSelect from "components/async_select"; import { AsyncButton } from "components/async_clickables"; import { Store } from "oxalis/singletons"; +import DatasetSelectionComponent, { + DatasetSelectionValue, +} from "dashboard/dataset/dataset_selection_component"; const FormItem = Form.Item; @@ -62,43 +64,6 @@ type Props = { }; const EXPECTED_VALUE_COUNT_PER_LINE = 8; -// Usage of AsyncSelect -interface DatasetValue { - label: string; - value: string; -} - -async function fetchDatasets(query: string): Promise { - const datasets = await getDatasets(false, null, query, null, 20); - - return datasets.map((d) => ({ - label: d.name, - value: d.name, - })); -} - -const DatasetSelect = ({ - datasetValues, - setDatasetValues, -}: { - datasetValues: DatasetValue[]; - setDatasetValues: (values: DatasetValue[]) => void; -}) => { - return ( - { - setDatasetValues(newValue as DatasetValue[]); - console.log("set value to", newValue); - }} - style={{ width: "100%" }} - /> - ); -}; - const WIZARD_STEPS = [ { title: "Import type", @@ -147,7 +112,6 @@ function ImportTypeQuestion({ wizardContext, setWizardContext }: WizardComponent })); }; const onChange = (e: RadioChangeEvent) => { - console.log("radio checked", e.target.value); setWizardContext((oldContext) => ({ ...oldContext, composeMode: e.target.value, @@ -199,7 +163,6 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) } const csv = await readFileAsText(fileList[0]?.originFileObj); - console.log("csv", csv); const lines = csv.split("\n"); for (const line of lines) { const fields = line.split(","); @@ -228,7 +191,7 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) })); } else if (wizardContext.composeMode === "WK_ANNOTATIONS") { if (fileList.length !== 2) { - Toast.error("Expected exactly two NML files."); + Toast.warning("Expected exactly two NML files."); return; } @@ -237,7 +200,8 @@ function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) if (nmlString1 === "" || nmlString2 === "") { // todop unify error handling - throw new Error("NML files are empty."); + Toast.warning("NML files should not be empty."); + return; } const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); @@ -358,7 +322,7 @@ async function tryToFetchDatasetsByName( } function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { - const [datasetValues, setDatasetValues] = useState([]); + const [datasetValues, setDatasetValues] = useState([]); const onPrev = () => { setWizardContext((oldContext) => ({ @@ -386,7 +350,10 @@ function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProp return (
- + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx new file mode 100644 index 00000000000..4d660aac35f --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -0,0 +1,165 @@ +import { FileExcelOutlined } from "@ant-design/icons"; +import { Button } from "antd"; +import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload"; +import { AsyncButton } from "components/async_clickables"; +import { readFileAsText } from "libs/read_file"; +import Toast from "libs/toast"; +import { values } from "libs/utils"; +import _ from "lodash"; +import { Vector3 } from "oxalis/constants"; +import { parseNml } from "oxalis/model/helpers/nml_helpers"; +import React from "react"; +import { tryToFetchDatasetsByName, WizardComponentProps } from "./common"; +const EXPECTED_VALUE_COUNT_PER_LINE = 8; + +export default function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) { + const fileList = wizardContext.fileList; + const handleChange = async (info: UploadChangeParam>) => { + setWizardContext((oldContext) => ({ + ...oldContext, + fileList: info.fileList, + })); + }; + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "SelectImportType", + })); + }; + const onNext = async () => { + try { + const sourcePoints: Vector3[] = []; + const targetPoints: Vector3[] = []; + if (wizardContext.composeMode === "BIG_WARP") { + if (fileList.length !== 1 || fileList[0]?.originFileObj == null) { + Toast.error("Expected exactly one CSV file."); + return; + } + + const csv = await readFileAsText(fileList[0]?.originFileObj); + const lines = csv.split("\n"); + for (const line of lines) { + const fields = line.split(","); + if (fields.length !== EXPECTED_VALUE_COUNT_PER_LINE) { + if (line.trim() !== "") { + throw new Error( + `Cannot interpret line in CSV file. Expected ${EXPECTED_VALUE_COUNT_PER_LINE} values, got ${fields.length}.`, + ); + } + continue; + } + const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; + + const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + sourcePoints.push(source); + targetPoints.push(target); + } + + setWizardContext((oldContext) => ({ + ...oldContext, + sourcePoints, + targetPoints, + datasets: [], + currentWizardStep: "SelectDatasets", + })); + } else if (wizardContext.composeMode === "WK_ANNOTATIONS") { + if (fileList.length !== 2) { + Toast.warning("Expected exactly two NML files."); + return; + } + + const nmlString1 = await readFileAsText(fileList[0]?.originFileObj!); + const nmlString2 = await readFileAsText(fileList[1]?.originFileObj!); + + if (nmlString1 === "" || nmlString2 === "") { + // todop unify error handling + Toast.warning("NML files should not be empty."); + return; + } + + const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + + if (!datasetName1 || !datasetName2) { + throw new Error("Could not extract dataset names."); + } + + const nodes1 = Array.from( + values(trees1) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + const nodes2 = Array.from( + values(trees2) + .map((tree) => Array.from(tree.nodes.values())[0]) + .values(), + ); + + for (const [node1, node2] of _.zip(nodes1, nodes2)) { + if ((node1 == null) !== (node2 == null)) { + throw new Error("A tree was empty while its corresponding tree wasn't."); + } + if (node1 != null && node2 != null) { + sourcePoints.push(node1.position); + targetPoints.push(node2.position); + } + } + + const datasets = await tryToFetchDatasetsByName(datasetName1, datasetName2); + + setWizardContext((oldContext) => ({ + ...oldContext, + datasets: datasets || [], + sourcePoints, + targetPoints, + currentWizardStep: "SelectDatasets", + })); + } + } catch (exception) { + Toast.error( + "An error occurred while importing the uploaded files. See the Browser's console for more feedback.", + ); + console.error(exception); + } + }; + + return ( +
+
+

+ Landmark files ({wizardContext.composeMode === "BIG_WARP" ? "1 CSV file" : "2 NML files"} + ): +

+ false} + maxCount={2} + multiple + > +

+ +

+

Drag your landmark files to this area

+

...

+
+
+ + + + + Next + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx new file mode 100644 index 00000000000..4d008cd8634 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -0,0 +1,53 @@ +import { Button } from "antd"; +import { AsyncButton } from "components/async_clickables"; +import DatasetSelectionComponent, { + DatasetSelectionValue, +} from "dashboard/dataset/dataset_selection_component"; +import React, { useEffect, useState } from "react"; +import { tryToFetchDatasetsByName, WizardComponentProps } from "./common"; + +export default function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { + const [datasetValues, setDatasetValues] = useState([]); + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: + wizardContext.composeMode === "WITHOUT_TRANSFORMS" ? "SelectImportType" : "UploadFiles", + })); + }; + const onNext = async () => { + const datasets = await tryToFetchDatasetsByName(datasetValues[0].value, datasetValues[1].value); + if (datasets == null) { + // An error message was already shown in tryToFetchDatasetsByName + return; + } + + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "ConfigureNewDataset", + datasets, + })); + }; + + useEffect(() => { + setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); + }, []); + + return ( +
+ + + + + + Next + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx new file mode 100644 index 00000000000..cb196309c42 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -0,0 +1,291 @@ +import { DeleteOutlined } from "@ant-design/icons"; +import { createDatasetComposition } from "admin/admin_rest_api"; +import { + AllowedTeamsFormItem, + DatasetNameFormItem, + layerNameRules, +} from "admin/dataset/dataset_components"; +import { Button, Col, Form, FormInstance, Input, List, Row, Tooltip } from "antd"; +import { FormItemWithInfo } from "dashboard/dataset/helper_components"; +import FolderSelection from "dashboard/folders/folder_selection"; +import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import messages from "messages"; +import { Vector3 } from "oxalis/constants"; +import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; +import { OxalisState } from "oxalis/store"; +import React, { useEffect, useState } from "react"; +import { useSelector } from "react-redux"; +import { APIDataLayer, APIDataset, APIDatasetId, APITeam, LayerLink } from "types/api_flow_types"; +import { syncValidator } from "types/validation"; +import { WizardComponentProps } from "./common"; + +const FormItem = Form.Item; + +export function ConfigureNewDataset(props: WizardComponentProps) { + const formRef = React.useRef>(null); + + const onPrev = () => { + props.setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "SelectDatasets", + })); + }; + + const [isLoading, setIsLoading] = useState(false); + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); + const [form] = Form.useForm(); + const [selectedTeams, setSelectedTeams] = useState>([]); + + const { wizardContext } = props; + const linkedDatasets = wizardContext.datasets; + + const onRemoveLayer = (layer: LayerLink) => { + const oldLayers = form.getFieldValue(["layers"]); + const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); + form.setFieldsValue({ layers: newLayers }); + }; + + const handleTransformImport = async (sourcePoints: Vector3[], targetPoints: Vector3[]) => { + const datasets = linkedDatasets; + const transformationArr = + sourcePoints.length > 0 && targetPoints.length > 0 + ? [ + { + type: "affine" as const, + matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), + }, + ] + : []; + + const newLinks: LayerLink[] = ( + _.flatMap(datasets, (dataset) => + dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), + ) as [APIDataset, APIDataLayer][] + ).map( + ([dataset, dataLayer]): LayerLink => ({ + datasetId: { + owningOrganization: dataset.owningOrganization, + name: dataset.name, + }, + sourceName: dataLayer.name, + newName: dataLayer.name, + transformations: dataset === datasets[0] ? transformationArr : [], + }), + ); + form.setFieldsValue({ layers: newLinks }); + }; + + useEffect(() => { + handleTransformImport(wizardContext.sourcePoints, wizardContext.targetPoints); + }, []); + + const handleSubmit = async () => { + if (activeUser == null) { + throw new Error("Cannot upload dataset without being logged in."); + } + const layers = form.getFieldValue(["layers"]); + + const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); + const datastoreToUse = uploadableDatastores[0]; + if (!datastoreToUse) { + Toast.error("Could not find datastore that allows uploading."); + return; + } + + const newDatasetName = form.getFieldValue(["name"]); + setIsLoading(true); + try { + await createDatasetComposition(datastoreToUse.url, { + newDatasetName, + targetFolderId: form.getFieldValue(["targetFolderId"]), + organizationName: "sample_organization", + scale: linkedDatasets[1].dataSource.scale, + layers, + }); + } finally { + setIsLoading(false); + } + + props.onAdded(activeUser.organization, newDatasetName, false); + }; + + return ( + // Using Forms here only to validate fields and for easy layout +
+
+ + + + + + setSelectedTeams(selectedTeams)} + formRef={formRef} + /> + + + + + + + + prevValues.layers !== curValues.layers}> + {({ getFieldValue }) => { + const layers = getFieldValue("layers") || []; + return ( + + Layers +
+ } + > + {layers.map((layer: LayerLink, idx: number) => ( + // the layer name may change in this view, the order does not, so idx is the right key choice here + + + + ))} + + ); + }} + + + + + + + + + ); +} + +function LinkedLayerForm({ + layer, + index, + onRemoveLayer, + form, + datasetId, +}: { + layer: LayerLink; + index: number; + onRemoveLayer: (layer: LayerLink) => void; + form: FormInstance; + datasetId: APIDatasetId; +}) { + const layers = Form.useWatch(["layers"]); + + React.useEffect(() => { + // Always validate all fields so that in the case of duplicate layer + // names all relevant fields are properly validated. + // This is a workaround, since shouldUpdate=true on a + // FormItemWithInfo doesn't work for some reason. + form.validateFields(); + }, [layers]); + + return ( +
+
+ +
+ + + + + layers.filter((someLayer: LayerLink) => someLayer.newName === value).length <= + 1, + "Layer names must be unique.", + ), + }, + ]} + > + + + + + + + {datasetId.name} + {" "} + / {layer.sourceName} + + + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts new file mode 100644 index 00000000000..fdd8be87191 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -0,0 +1,67 @@ +import { getDataset } from "admin/admin_rest_api"; +import { UploadFile } from "antd"; +import Toast from "libs/toast"; +import { Vector3 } from "oxalis/constants"; +import { Store } from "oxalis/singletons"; +import { APIDataset, APIDataStore } from "types/api_flow_types"; + +type FileList = UploadFile[]; + +export type WizardStep = + | "SelectImportType" + | "UploadFiles" + | "SelectDatasets" + | "ConfigureNewDataset"; + +export type COMPOSE_MODE = "WITHOUT_TRANSFORMS" | "WK_ANNOTATIONS" | "BIG_WARP"; +export type WizardContext = { + currentWizardStep: WizardStep; + fileList: FileList; + composeMode: COMPOSE_MODE; + datasets: APIDataset[]; + sourcePoints: Vector3[]; + targetPoints: Vector3[]; +}; + +export type WizardComponentProps = { + wizardContext: WizardContext; + setWizardContext: React.Dispatch>; + datastores: APIDataStore[]; + onAdded: ( + datasetOrganization: string, + uploadedDatasetName: string, + needsConversion?: boolean | null | undefined, + ) => Promise; +}; + +export async function tryToFetchDatasetsByName( + name1: string, + name2: string, +): Promise { + const { activeUser } = Store.getState(); + try { + const [dataset1, dataset2] = await Promise.all([ + getDataset( + { + owningOrganization: activeUser?.organization || "", + name: name1, + }, + null, + { showErrorToast: false }, + ), + getDataset( + { + owningOrganization: activeUser?.organization || "", + name: name2, + }, + null, + { showErrorToast: false }, + ), + ]); + return [dataset1, dataset2]; + } catch (exception) { + console.warn(exception); + Toast.warning("Could not derive datasets from NML. Please specify these manally."); + return null; + } +} diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx index d39617bc2e0..2c6ba49cb0f 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -1,550 +1,39 @@ -import { DeleteOutlined, FileExcelOutlined } from "@ant-design/icons"; -import { - AllowedTeamsFormItem, - CardContainer, - DatasetNameFormItem, - layerNameRules, -} from "admin/dataset/dataset_components"; -import { - Button, - Col, - Form, - FormInstance, - Input, - List, - Radio, - RadioChangeEvent, - Row, - Space, - Tooltip, -} from "antd"; -import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload"; -import { Vector3 } from "oxalis/constants"; -import React, { useEffect, useState } from "react"; -import { readFileAsText } from "libs/read_file"; -import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; -import { parseNml } from "oxalis/model/helpers/nml_helpers"; -import { values } from "libs/utils"; -import _ from "lodash"; -import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; -import { FormItemWithInfo } from "dashboard/dataset/helper_components"; -import messages from "messages"; -import FolderSelection from "dashboard/folders/folder_selection"; -import { useSelector } from "react-redux"; -import { OxalisState } from "oxalis/store"; -import * as Utils from "libs/utils"; -import { - APIDataset, - APIDataLayer, - APIDatasetId, - APITeam, - APIDataStore, - LayerLink, -} from "types/api_flow_types"; -import { syncValidator } from "types/validation"; -import { createDatasetComposition, getDataset } from "admin/admin_rest_api"; -import Toast from "libs/toast"; -import { AsyncButton } from "components/async_clickables"; -import { Store } from "oxalis/singletons"; -import DatasetSelectionComponent, { - DatasetSelectionValue, -} from "dashboard/dataset/dataset_selection_component"; - -const FormItem = Form.Item; - -type FileList = UploadFile[]; +import { CardContainer } from "admin/dataset/dataset_components"; +import React, { useState } from "react"; +import { APIDataStore } from "types/api_flow_types"; +import SelectImportType from "./composition_wizard/01_select_import_type"; +import UploadFiles from "./composition_wizard/02_upload_files"; +import SelectDatasets from "./composition_wizard/03_select_datasets"; +import { ConfigureNewDataset } from "./composition_wizard/04_configure_new_dataset"; +import { WizardComponentProps, WizardContext } from "./composition_wizard/common"; type Props = { - onAdded: ( - datasetOrganization: string, - uploadedDatasetName: string, - needsConversion?: boolean | null | undefined, - ) => Promise; + onAdded: WizardComponentProps["onAdded"]; datastores: APIDataStore[]; }; -const EXPECTED_VALUE_COUNT_PER_LINE = 8; -const WIZARD_STEPS = [ - { +const WIZARD_STEPS = { + SelectImportType: { title: "Import type", - component: ImportTypeQuestion, + component: SelectImportType, }, - { + UploadFiles: { title: "Upload file(s)", component: UploadFiles, }, - { + SelectDatasets: { title: "Select Datasets", component: SelectDatasets, }, - { + ConfigureNewDataset: { title: "Configure New Datasets", - component: CompositionForm, + component: ConfigureNewDataset, }, -]; - -type COMPOSE_MODE = "WITHOUT_TRANSFORMS" | "WK_ANNOTATIONS" | "BIG_WARP"; -type WizardContext = { - currentWizardStep: number; - fileList: FileList; - composeMode: COMPOSE_MODE; - datasets: APIDataset[]; - sourcePoints: Vector3[]; - targetPoints: Vector3[]; -}; - -type WizardComponentProps = { - wizardContext: WizardContext; - setWizardContext: React.Dispatch>; - datastores: APIDataStore[]; - onAdded: Props["onAdded"]; -}; - -function ImportTypeQuestion({ wizardContext, setWizardContext }: WizardComponentProps) { - const { composeMode } = wizardContext; - - const onNext = () => { - setWizardContext((oldContext) => ({ - ...oldContext, - currentWizardStep: composeMode === "WITHOUT_TRANSFORMS" ? 2 : 1, - })); - }; - const onChange = (e: RadioChangeEvent) => { - setWizardContext((oldContext) => ({ - ...oldContext, - composeMode: e.target.value, - })); - }; - - return ( -
-
-

Select how you want to create a new dataset:

- - - Combine datasets without any transforms - Combine datasets by using skeleton annotations - Combine datasets by using a BigWarp CSV - - -
- -
- ); -} - -function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) { - const fileList = wizardContext.fileList; - const handleChange = async (info: UploadChangeParam>) => { - setWizardContext((oldContext) => ({ - ...oldContext, - fileList: info.fileList, - })); - }; - - const onPrev = () => { - setWizardContext((oldContext) => ({ - ...oldContext, - currentWizardStep: 0, - })); - }; - const onNext = async () => { - try { - const sourcePoints: Vector3[] = []; - const targetPoints: Vector3[] = []; - if (wizardContext.composeMode === "BIG_WARP") { - if (fileList.length !== 1 || fileList[0]?.originFileObj == null) { - Toast.error("Expected exactly one CSV file."); - return; - } - - const csv = await readFileAsText(fileList[0]?.originFileObj); - const lines = csv.split("\n"); - for (const line of lines) { - const fields = line.split(","); - if (fields.length !== EXPECTED_VALUE_COUNT_PER_LINE) { - if (line.trim() !== "") { - throw new Error( - `Cannot interpret line in CSV file. Expected ${EXPECTED_VALUE_COUNT_PER_LINE} values, got ${fields.length}.`, - ); - } - continue; - } - const [_pointName, _enabled, x1, y1, z1, x2, y2, z2] = fields; - - const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; - const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; - sourcePoints.push(source); - targetPoints.push(target); - } - - setWizardContext((oldContext) => ({ - ...oldContext, - sourcePoints, - targetPoints, - datasets: [], - currentWizardStep: 2, - })); - } else if (wizardContext.composeMode === "WK_ANNOTATIONS") { - if (fileList.length !== 2) { - Toast.warning("Expected exactly two NML files."); - return; - } - - const nmlString1 = await readFileAsText(fileList[0]?.originFileObj!); - const nmlString2 = await readFileAsText(fileList[1]?.originFileObj!); - - if (nmlString1 === "" || nmlString2 === "") { - // todop unify error handling - Toast.warning("NML files should not be empty."); - return; - } - - const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); - const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); - - if (!datasetName1 || !datasetName2) { - throw new Error("Could not extract dataset names."); - } - - const nodes1 = Array.from( - values(trees1) - .map((tree) => Array.from(tree.nodes.values())[0]) - .values(), - ); - const nodes2 = Array.from( - values(trees2) - .map((tree) => Array.from(tree.nodes.values())[0]) - .values(), - ); - - for (const [node1, node2] of _.zip(nodes1, nodes2)) { - if ((node1 == null) !== (node2 == null)) { - throw new Error("A tree was empty while its corresponding tree wasn't."); - } - if (node1 != null && node2 != null) { - sourcePoints.push(node1.position); - targetPoints.push(node2.position); - } - } - - const datasets = await tryToFetchDatasetsByName(datasetName1, datasetName2); - - setWizardContext((oldContext) => ({ - ...oldContext, - datasets: datasets || [], - sourcePoints, - targetPoints, - currentWizardStep: 2, - })); - } - } catch (exception) { - Toast.error( - "An error occurred while importing the uploaded files. See the Browser's console for more feedback.", - ); - console.error(exception); - } - }; - - return ( -
-
-

- Landmark files ({wizardContext.composeMode === "BIG_WARP" ? "1 CSV file" : "2 NML files"} - ): -

- false} - maxCount={2} - multiple - > -

- -

-

Drag your landmark files to this area

-

...

-
-
- - - - - Next - -
- ); -} - -async function tryToFetchDatasetsByName( - name1: string, - name2: string, -): Promise { - const { activeUser } = Store.getState(); - try { - const [dataset1, dataset2] = await Promise.all([ - getDataset( - { - owningOrganization: activeUser?.organization || "", - name: name1, - }, - null, - { showErrorToast: false }, - ), - getDataset( - { - owningOrganization: activeUser?.organization || "", - name: name2, - }, - null, - { showErrorToast: false }, - ), - ]); - return [dataset1, dataset2]; - } catch (exception) { - console.warn(exception); - Toast.warning("Could not derive datasets from NML. Please specify these manally."); - return null; - } -} - -function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { - const [datasetValues, setDatasetValues] = useState([]); - - const onPrev = () => { - setWizardContext((oldContext) => ({ - ...oldContext, - currentWizardStep: wizardContext.composeMode === "WITHOUT_TRANSFORMS" ? 0 : 1, - })); - }; - const onNext = async () => { - const datasets = await tryToFetchDatasetsByName(datasetValues[0].value, datasetValues[1].value); - if (datasets == null) { - // An error message was already shown in tryToFetchDatasetsByName - return; - } - - setWizardContext((oldContext) => ({ - ...oldContext, - currentWizardStep: 3, - datasets, - })); - }; - - useEffect(() => { - setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); - }, []); - - return ( -
- - - - - - Next - -
- ); -} - -export function CompositionForm(props: WizardComponentProps) { - const formRef = React.useRef>(null); - - const onPrev = () => { - props.setWizardContext((oldContext) => ({ - ...oldContext, - currentWizardStep: 2, - })); - }; - - const [isLoading, setIsLoading] = useState(false); - const activeUser = useSelector((state: OxalisState) => state.activeUser); - const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); - const [form] = Form.useForm(); - const [selectedTeams, setSelectedTeams] = useState>([]); - - const { wizardContext } = props; - const linkedDatasets = wizardContext.datasets; - - const onRemoveLayer = (layer: LayerLink) => { - const oldLayers = form.getFieldValue(["layers"]); - const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); - form.setFieldsValue({ layers: newLayers }); - }; - - const handleTransformImport = async (sourcePoints: Vector3[], targetPoints: Vector3[]) => { - const datasets = linkedDatasets; - const transformationArr = - sourcePoints.length > 0 && targetPoints.length > 0 - ? [ - { - type: "affine" as const, - matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), - }, - ] - : []; - - const newLinks: LayerLink[] = ( - _.flatMap(datasets, (dataset) => - dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), - ) as [APIDataset, APIDataLayer][] - ).map( - ([dataset, dataLayer]): LayerLink => ({ - datasetId: { - owningOrganization: dataset.owningOrganization, - name: dataset.name, - }, - sourceName: dataLayer.name, - newName: dataLayer.name, - transformations: dataset === datasets[0] ? transformationArr : [], - }), - ); - form.setFieldsValue({ layers: newLinks }); - }; - - useEffect(() => { - handleTransformImport(wizardContext.sourcePoints, wizardContext.targetPoints); - }, []); - - const handleSubmit = async () => { - if (activeUser == null) { - throw new Error("Cannot upload dataset without being logged in."); - } - const layers = form.getFieldValue(["layers"]); - - const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); - const datastoreToUse = uploadableDatastores[0]; - if (!datastoreToUse) { - Toast.error("Could not find datastore that allows uploading."); - return; - } - - const newDatasetName = form.getFieldValue(["name"]); - setIsLoading(true); - try { - await createDatasetComposition(datastoreToUse.url, { - newDatasetName, - targetFolderId: form.getFieldValue(["targetFolderId"]), - organizationName: "sample_organization", - scale: linkedDatasets[1].dataSource.scale, - layers, - }); - } finally { - setIsLoading(false); - } - - props.onAdded(activeUser.organization, newDatasetName, false); - }; - - return ( - // Using Forms here only to validate fields and for easy layout -
-
- - - - - - setSelectedTeams(selectedTeams)} - formRef={formRef} - /> - - - - - - - - prevValues.layers !== curValues.layers}> - {({ getFieldValue }) => { - const layers = getFieldValue("layers") || []; - return ( - - Layers -
- } - > - {layers.map((layer: LayerLink, idx: number) => ( - // the layer name may change in this view, the order does not, so idx is the right key choice here - - - - ))} - - ); - }} - - - - - - - - - ); -} +} as const; export default function DatasetAddComposeView(props: Props) { const [wizardContext, setWizardContext] = useState({ - currentWizardStep: 0, + currentWizardStep: "SelectImportType", fileList: [], composeMode: "WITHOUT_TRANSFORMS", datasets: [], @@ -575,91 +64,3 @@ export default function DatasetAddComposeView(props: Props) { ); } - -function LinkedLayerForm({ - layer, - index, - onRemoveLayer, - form, - datasetId, -}: { - layer: LayerLink; - index: number; - onRemoveLayer: (layer: LayerLink) => void; - form: FormInstance; - datasetId: APIDatasetId; -}) { - const layers = Form.useWatch(["layers"]); - - React.useEffect(() => { - // Always validate all fields so that in the case of duplicate layer - // names all relevant fields are properly validated. - // This is a workaround, since shouldUpdate=true on a - // FormItemWithInfo doesn't work for some reason. - form.validateFields(); - }, [layers]); - - return ( -
-
- -
- - - - - layers.filter((someLayer: LayerLink) => someLayer.newName === value).length <= - 1, - "Layer names must be unique.", - ), - }, - ]} - > - - - - - - - {datasetId.name} - {" "} - / {layer.sourceName} - - - -
- ); -} diff --git a/frontend/javascripts/components/async_select.tsx b/frontend/javascripts/components/async_select.tsx new file mode 100644 index 00000000000..72c1b479d05 --- /dev/null +++ b/frontend/javascripts/components/async_select.tsx @@ -0,0 +1,55 @@ +import { Select, Spin } from "antd"; +import type { SelectProps } from "antd/es/select"; +import debounce from "lodash/debounce"; +import React, { useMemo, useRef, useState } from "react"; + +// This module is inspired by the "Search and Select Users" example +// in the antd documentation (for version 4). +// Quote: +// A complete multiple select sample with remote search, debounce fetch, ajax callback order flow, and loading state. + +export interface AsyncSelectProps + extends Omit, "options" | "children"> { + fetchOptions: (search: string) => Promise; + debounceTimeout?: number; +} + +export default function AsyncSelect< + ValueType extends { key?: string; label: React.ReactNode; value: string | number } = any, +>({ fetchOptions, debounceTimeout = 800, ...props }: AsyncSelectProps) { + const [fetching, setFetching] = useState(false); + const [options, setOptions] = useState([]); + const fetchRef = useRef(0); + + const debounceFetcher = useMemo(() => { + const loadOptions = (value: string) => { + fetchRef.current += 1; + const fetchId = fetchRef.current; + setOptions([]); + setFetching(true); + + fetchOptions(value).then((newOptions) => { + if (fetchId !== fetchRef.current) { + // for fetch callback order + return; + } + + setOptions(newOptions); + setFetching(false); + }); + }; + + return debounce(loadOptions, debounceTimeout); + }, [fetchOptions, debounceTimeout]); + + return ( +