diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 342f4881279..9fe943692bd 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -17,11 +17,11 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Added route for triggering the compute segment index worker job. [#7471](https://github.com/scalableminds/webknossos/pull/7471) - Added thumbnails to the dashboard dataset list. [#7479](https://github.com/scalableminds/webknossos/pull/7479) - Adhoc mesh rendering is now available for ND datasets.[#7394](https://github.com/scalableminds/webknossos/pull/7394) +- Added the ability to compose a new dataset from existing dataset layers. This can be done with or without transforms (transforms will be derived from landmarks given via BigWarp CSV or WK NMLs). [#7395](https://github.com/scalableminds/webknossos/pull/7395) - When setting up WEBKNOSSOS from the git repository for development, the organization directory for storing datasets is now automatically created on startup. [#7517](https://github.com/scalableminds/webknossos/pull/7517) - Multiple segments can be dragged and dropped in the segments tab. [#7536](https://github.com/scalableminds/webknossos/pull/7536) - Added the option to convert agglomerate skeletons to freely modifiable skeletons in the context menu of the Skeleton tab. [#7537](https://github.com/scalableminds/webknossos/pull/7537) - ### Changed - Improved loading speed of the annotation list. [#7410](https://github.com/scalableminds/webknossos/pull/7410) - Admins and Team Managers can now also download job exports for jobs of other users, if they have the link. [#7462](https://github.com/scalableminds/webknossos/pull/7462) diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index cf7a6ba2338..ba98be24107 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -5,11 +5,8 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} -import com.scalableminds.webknossos.datastore.services.{ - DataStoreStatus, - LinkedLayerIdentifier, - ReserveUploadInformation -} +import com.scalableminds.webknossos.datastore.services.uploading.{LinkedLayerIdentifier, ReserveUploadInformation} +import com.scalableminds.webknossos.datastore.services.DataStoreStatus import com.typesafe.scalalogging.LazyLogging import mail.{MailchimpClient, MailchimpTag} diff --git a/docs/datasets.md b/docs/datasets.md index b71b6605892..03bc468fa18 100644 --- a/docs/datasets.md +++ b/docs/datasets.md @@ -126,6 +126,18 @@ For manual conversion, we provide the following software tools and libraries: - The [WEBKNOSSOS CLI](https://docs.webknossos.org/cli) is a CLI tool that can convert many formats to WKW. - For other file formats, the [WEBKNOSSOS Python library](https://docs.webknossos.org/webknossos-py/index.html) can be an option for custom scripting. +### Composing Datasets +New datasets can also be composed from existing ones. +This feature allows to combine layers from previously added datasets to create a new dataset. +During compositions, transforms can optionally be defined in case the datasets are not in the same coordinate system. +There are three different ways to compose a new dataset: + +1) Combine datasets by selecting from existing datasets. No transforms between these datasets will be added. +2) Create landmark annotations (using the skeleton tool) for each dataset. Then, these datasets can be combined while transforming one dataset to match the other. +3) Similar to (2), two datasets can be combined while respecting landmarks that were generated with BigWarp. + +See the "Compose from existing datasets" tab in the "Add Dataset" screen for more details. + ## Configuring Datasets You can configure the metadata, permission, and other properties of a dataset at any time. diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index 02bc6153e18..57a2f2e08c4 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -66,6 +66,7 @@ import type { MaintenanceInfo, AdditionalCoordinate, RenderAnimationOptions, + LayerLink, } from "types/api_flow_types"; import { APIAnnotationTypeEnum } from "types/api_flow_types"; import type { LOG_LEVELS, Vector2, Vector3, Vector6 } from "oxalis/constants"; @@ -1422,10 +1423,12 @@ export async function getActiveDatasetsOfMyOrganization(): Promise { const sharingTokenSuffix = sharingToken != null ? `?sharingToken=${sharingToken}` : ""; return Request.receiveJSON( `/api/datasets/${datasetId.owningOrganization}/${datasetId.name}${sharingTokenSuffix}`, + options, ); } @@ -1506,6 +1509,22 @@ export function getDatasetAccessList(datasetId: APIDatasetId): Promise + Request.sendJSONReceiveJSON(`${datastoreUrl}/data/datasets/compose?token=${token}`, { + data: payload, + }), + ); +} + export function createResumableUpload(datastoreUrl: string, uploadId: string): Promise { // @ts-expect-error ts-migrate(7006) FIXME: Parameter 'file' implicitly has an 'any' type. const generateUniqueIdentifier = (file) => { diff --git a/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx new file mode 100644 index 00000000000..b7950cf3110 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/01_select_import_type.tsx @@ -0,0 +1,47 @@ +import { Button, Radio, RadioChangeEvent, Space } from "antd"; +import React from "react"; +import { WizardComponentProps } from "./common"; + +export default function SelectImportType({ + wizardContext, + setWizardContext, +}: WizardComponentProps) { + const { composeMode } = wizardContext; + + const onNext = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: composeMode === "WITHOUT_TRANSFORMS" ? "SelectDatasets" : "UploadFiles", + })); + }; + const onChange = (e: RadioChangeEvent) => { + setWizardContext((oldContext) => ({ + ...oldContext, + composeMode: e.target.value, + })); + }; + + return ( +
+
+ You can create a new dataset by composing existing datasets together. There are three + different ways to accomplish this: +
+ + + Combine datasets without any transforms + + Combine datasets by using skeleton annotations (NML) + + Combine datasets by using BigWarp landmarks (CSV) + + +
+ In all three cases, you can tweak which layers should be used later. +
+ +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx new file mode 100644 index 00000000000..b59e452961c --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/02_upload_files.tsx @@ -0,0 +1,208 @@ +import { FileExcelOutlined } from "@ant-design/icons"; +import { Button } from "antd"; +import Upload, { UploadChangeParam, UploadFile } from "antd/lib/upload"; +import { AsyncButton } from "components/async_clickables"; +import { readFileAsText } from "libs/read_file"; +import Toast from "libs/toast"; +import { SoftError } from "libs/utils"; +import _ from "lodash"; +import { Vector3 } from "oxalis/constants"; +import { parseNml } from "oxalis/model/helpers/nml_helpers"; +import React from "react"; +import { tryToFetchDatasetsByName, WizardComponentProps, WizardContext, FileList } from "./common"; +import ErrorHandling from "libs/error_handling"; +import * as Utils from "libs/utils"; + +const EXPECTED_VALUE_COUNT_PER_CSV_LINE = 8; + +export default function UploadFiles({ wizardContext, setWizardContext }: WizardComponentProps) { + const fileList = wizardContext.fileList; + const handleChange = async (info: UploadChangeParam>) => { + setWizardContext((oldContext) => ({ + ...oldContext, + fileList: info.fileList, + })); + }; + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "SelectImportType", + })); + }; + const onNext = async () => { + try { + let newContextPartial: Partial | null = null; + if (wizardContext.composeMode === "BIG_WARP") { + newContextPartial = await parseBigWarpFile(fileList); + } else if (wizardContext.composeMode === "WK_ANNOTATIONS") { + newContextPartial = await parseNmlFiles(fileList); + } else { + throw new Error("Unexpected compose mode: " + wizardContext.composeMode); + } + setWizardContext((oldContext) => ({ + ...oldContext, + ...newContextPartial, + })); + } catch (exception) { + if (exception instanceof SoftError) { + Toast.warning(exception.message); + } else { + Toast.error( + "An error occurred while importing the uploaded files. See the Browser's console for more details.", + ); + ErrorHandling.notify(exception as Error); + console.error(exception); + } + } + }; + + return ( +
+ {wizardContext.composeMode === "BIG_WARP" ? ( +

+ Please upload one CSV file that was exported by BigWarp. Note that the first dataset + referenced by the CSV file will be transformed to the second referenced dataset. +

+ ) : ( +

+ Please upload two NML files that contain landmarks that you created with WEBKNOSSOS. Note + that the dataset that belongs to the first NML will be transformed to the dataset that + belongs to the second NML file. The skeletons in the NML files should match each other + exactly. This means that both NMLs should contain the same amount of trees and that the + n-th tree of the first and second NML should have the same amount of nodes, as these will + be aligned with each other. +

+ )} + +
+

+ Landmark files ({wizardContext.composeMode === "BIG_WARP" ? "1 CSV file" : "2 NML files"} + ): +

+ false} + maxCount={wizardContext.composeMode === "BIG_WARP" ? 1 : 2} + multiple + > +

+ +

+

Drag your landmark file(s) to this area

+
+
+ + + + + Next + +
+ ); +} + +async function parseBigWarpFile(fileList: FileList): Promise> { + const sourcePoints: Vector3[] = []; + const targetPoints: Vector3[] = []; + if (fileList.length !== 1 || fileList[0]?.originFileObj == null) { + throw new SoftError("Expected exactly one CSV file."); + } + + const csv = await readFileAsText(fileList[0]?.originFileObj); + const lines = csv.split("\n"); + for (const line of lines) { + const fields = line.split(","); + if (fields.length !== EXPECTED_VALUE_COUNT_PER_CSV_LINE) { + if (line.trim() !== "") { + throw new SoftError( + `Cannot interpret line in CSV file. Expected ${EXPECTED_VALUE_COUNT_PER_CSV_LINE} values, got ${fields.length}.`, + ); + } + continue; + } + const [_pointName, enabled, x1, y1, z1, x2, y2, z2] = fields; + + if (enabled) { + const source = [x1, y1, z1].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + const target = [x2, y2, z2].map((el) => parseInt(el.replaceAll('"', ""))) as Vector3; + sourcePoints.push(source); + targetPoints.push(target); + } + } + + return { + sourcePoints, + targetPoints, + datasets: [], + currentWizardStep: "SelectDatasets", + }; +} + +async function parseNmlFiles(fileList: FileList): Promise | null> { + const sourcePoints: Vector3[] = []; + const targetPoints: Vector3[] = []; + if (fileList.length !== 2) { + throw new SoftError("Expected exactly two NML files."); + } + + const nmlString1 = await readFileAsText(fileList[0]?.originFileObj!); + const nmlString2 = await readFileAsText(fileList[1]?.originFileObj!); + + if (nmlString1 === "" || nmlString2 === "") { + throw new SoftError("NML files should not be empty."); + } + + const { trees: trees1, datasetName: datasetName1 } = await parseNml(nmlString1); + const { trees: trees2, datasetName: datasetName2 } = await parseNml(nmlString2); + + if (!datasetName1 || !datasetName2) { + throw new SoftError("Could not extract dataset names."); + } + + if (Object.keys(trees1).length !== Object.keys(trees2).length) { + throw new SoftError("The two NML files should have the same tree count."); + } + + for (const [tree1, tree2] of _.zip(Utils.values(trees1), Utils.values(trees2))) { + if (tree1 == null || tree2 == null) { + // Satisfy TS. This should not happen, as we checked before that both tree collections + // have the same size. + throw new SoftError("A tree was unexpectedly parsed as null. Please try again"); + } + const nodes1 = Array.from(tree1.nodes.values()); + const nodes2 = Array.from(tree2.nodes.values()); + for (const [node1, node2] of _.zip(nodes1, nodes2)) { + if ((node1 == null) !== (node2 == null)) { + throw new SoftError( + `Tree ${tree1.treeId} and tree ${tree2.treeId} don't have the same amount of trees. Ensure that the NML structures match each other.`, + ); + } + if (node1 != null && node2 != null) { + sourcePoints.push(node1.position); + targetPoints.push(node2.position); + } + } + } + + const datasets = await tryToFetchDatasetsByName( + [datasetName1, datasetName2], + "Could not derive datasets from NML. Please specify these manually.", + ); + + return { + datasets: datasets || [], + sourcePoints, + targetPoints, + currentWizardStep: "SelectDatasets", + }; +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx new file mode 100644 index 00000000000..85526775d67 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/03_select_datasets.tsx @@ -0,0 +1,70 @@ +import { Button } from "antd"; +import { AsyncButton } from "components/async_clickables"; +import DatasetSelectionComponent, { + DatasetSelectionValue, +} from "dashboard/dataset/dataset_selection_component"; +import React, { useEffect, useState } from "react"; +import { tryToFetchDatasetsByName, WizardComponentProps } from "./common"; + +export default function SelectDatasets({ wizardContext, setWizardContext }: WizardComponentProps) { + const [datasetValues, setDatasetValues] = useState([]); + + const onPrev = () => { + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: + wizardContext.composeMode === "WITHOUT_TRANSFORMS" ? "SelectImportType" : "UploadFiles", + })); + }; + const onNext = async () => { + const datasets = await tryToFetchDatasetsByName( + datasetValues.map((el) => el.value), + "Could not find datasets. Please doublecheck your selection.", + ); + if (datasets == null) { + // An error message was already shown in tryToFetchDatasetsByName + return; + } + + setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "ConfigureNewDataset", + datasets, + })); + }; + + useEffect(() => { + setDatasetValues(wizardContext.datasets.map((ds) => ({ value: ds.name, label: ds.name }))); + }, []); + + // When not using any transforms, + let isDatasetCountValid = true; + if (wizardContext.composeMode === "WITHOUT_TRANSFORMS") { + isDatasetCountValid = datasetValues.length > 0; + } else { + isDatasetCountValid = datasetValues.length === 2; + } + + return ( +
+

Select the datasets that you want to combine or doublecheck the pre-selected datasets.

+ + + + + + Next + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx new file mode 100644 index 00000000000..9cc587174e5 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -0,0 +1,285 @@ +import { DeleteOutlined } from "@ant-design/icons"; +import { createDatasetComposition } from "admin/admin_rest_api"; +import { + AllowedTeamsFormItem, + DatasetNameFormItem, + layerNameRules, +} from "admin/dataset/dataset_components"; +import { Button, Col, Form, FormInstance, Input, List, Row, Tooltip } from "antd"; +import { FormItemWithInfo } from "dashboard/dataset/helper_components"; +import FolderSelection from "dashboard/folders/folder_selection"; +import { estimateAffineMatrix4x4 } from "libs/estimate_affine"; +import Toast from "libs/toast"; +import * as Utils from "libs/utils"; +import _ from "lodash"; +import messages from "messages"; +import { Vector3 } from "oxalis/constants"; +import { flatToNestedMatrix } from "oxalis/model/accessors/dataset_accessor"; +import { OxalisState } from "oxalis/store"; +import React, { useEffect, useState } from "react"; +import { useSelector } from "react-redux"; +import { APIDataLayer, APIDataset, APIDatasetId, APITeam, LayerLink } from "types/api_flow_types"; +import { syncValidator } from "types/validation"; +import { WizardComponentProps } from "./common"; + +const FormItem = Form.Item; + +export function ConfigureNewDataset(props: WizardComponentProps) { + const formRef = React.useRef>(null); + + const onPrev = () => { + props.setWizardContext((oldContext) => ({ + ...oldContext, + currentWizardStep: "SelectDatasets", + })); + }; + + const [isLoading, setIsLoading] = useState(false); + const activeUser = useSelector((state: OxalisState) => state.activeUser); + const isDatasetManagerOrAdmin = Utils.isUserAdminOrDatasetManager(activeUser); + const [form] = Form.useForm(); + const [selectedTeams, setSelectedTeams] = useState>([]); + + const { wizardContext } = props; + const linkedDatasets = wizardContext.datasets; + + const onRemoveLayer = (layer: LayerLink) => { + const oldLayers = form.getFieldValue(["layers"]); + const newLayers = oldLayers.filter((existingLayer: LayerLink) => existingLayer !== layer); + form.setFieldsValue({ layers: newLayers }); + }; + + const handleTransformImport = async (sourcePoints: Vector3[], targetPoints: Vector3[]) => { + const datasets = linkedDatasets; + const transformationArr = + sourcePoints.length > 0 && targetPoints.length > 0 + ? [ + { + type: "affine" as const, + matrix: flatToNestedMatrix(estimateAffineMatrix4x4(sourcePoints, targetPoints)), + }, + ] + : []; + + const newLinks: LayerLink[] = ( + _.flatMap(datasets, (dataset) => + dataset.dataSource.dataLayers.map((layer) => [dataset, layer]), + ) as [APIDataset, APIDataLayer][] + ).map( + ([dataset, dataLayer]): LayerLink => ({ + datasetId: { + owningOrganization: dataset.owningOrganization, + name: dataset.name, + }, + sourceName: dataLayer.name, + newName: dataLayer.name, + transformations: dataset === datasets[0] ? transformationArr : [], + }), + ); + form.setFieldsValue({ layers: newLinks }); + }; + + useEffect(() => { + handleTransformImport(wizardContext.sourcePoints, wizardContext.targetPoints); + }, []); + + const handleSubmit = async () => { + if (activeUser == null) { + throw new Error("Cannot create dataset without being logged in."); + } + const layers = form.getFieldValue(["layers"]); + + const uploadableDatastores = props.datastores.filter((datastore) => datastore.allowsUpload); + const datastoreToUse = uploadableDatastores[0]; + if (!datastoreToUse) { + Toast.error("Could not find datastore that allows uploading."); + return; + } + + const newDatasetName = form.getFieldValue(["name"]); + setIsLoading(true); + try { + await createDatasetComposition(datastoreToUse.url, { + newDatasetName, + targetFolderId: form.getFieldValue(["targetFolderId"]), + organizationName: activeUser.organization, + scale: linkedDatasets[1].dataSource.scale, + layers, + }); + } finally { + setIsLoading(false); + } + + props.onAdded(activeUser.organization, newDatasetName, false); + }; + + return ( + // Using Forms here only to validate fields and for easy layout +
+

Please configure the dataset that is about to be created.

+
+ + + + + + + + + + + + + + prevValues.layers !== curValues.layers}> + {({ getFieldValue }) => { + const layers = getFieldValue("layers") || []; + return ( + + Layers +
+ } + > + {layers.map((layer: LayerLink, idx: number) => ( + // the layer name may change in this view, the order does not, so idx is the right key choice here + + + + ))} + + ); + }} + + + + + + + + + + ); +} + +function LinkedLayerForm({ + layer, + index, + onRemoveLayer, + form, + datasetId, +}: { + layer: LayerLink; + index: number; + onRemoveLayer: (layer: LayerLink) => void; + form: FormInstance; + datasetId: APIDatasetId; +}) { + const layers = Form.useWatch(["layers"]); + + React.useEffect(() => { + // Always validate all fields so that in the case of duplicate layer + // names all relevant fields are properly validated. + // This is a workaround, since shouldUpdate=true on a + // FormItemWithInfo doesn't work for some reason. + form.validateFields(); + }, [layers]); + + return ( +
+
+ +
+ + + + + layers.filter((someLayer: LayerLink) => someLayer.newName === value).length <= + 1, + "Layer names must be unique.", + ), + }, + ]} + > + + + + + + + {datasetId.name} + {" "} + / {layer.sourceName} + + + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/composition_wizard/common.ts b/frontend/javascripts/admin/dataset/composition_wizard/common.ts new file mode 100644 index 00000000000..6036d5f9d04 --- /dev/null +++ b/frontend/javascripts/admin/dataset/composition_wizard/common.ts @@ -0,0 +1,61 @@ +import { getDataset } from "admin/admin_rest_api"; +import { UploadFile } from "antd"; +import Toast from "libs/toast"; +import { Vector3 } from "oxalis/constants"; +import { Store } from "oxalis/singletons"; +import { APIDataset, APIDataStore } from "types/api_flow_types"; + +export type FileList = UploadFile[]; + +export type WizardStep = + | "SelectImportType" + | "UploadFiles" + | "SelectDatasets" + | "ConfigureNewDataset"; + +export type COMPOSE_MODE = "WITHOUT_TRANSFORMS" | "WK_ANNOTATIONS" | "BIG_WARP"; +export type WizardContext = { + currentWizardStep: WizardStep; + fileList: FileList; + composeMode: COMPOSE_MODE; + datasets: APIDataset[]; + sourcePoints: Vector3[]; + targetPoints: Vector3[]; +}; + +export type WizardComponentProps = { + wizardContext: WizardContext; + setWizardContext: React.Dispatch>; + datastores: APIDataStore[]; + onAdded: ( + datasetOrganization: string, + uploadedDatasetName: string, + needsConversion?: boolean | null | undefined, + ) => Promise; +}; + +export async function tryToFetchDatasetsByName( + names: string[], + userErrorMessage: string, +): Promise { + const { activeUser } = Store.getState(); + try { + const datasets = await Promise.all( + names.map((name) => + getDataset( + { + owningOrganization: activeUser?.organization || "", + name: name, + }, + null, + { showErrorToast: false }, + ), + ), + ); + return datasets; + } catch (exception) { + console.warn(exception); + Toast.warning(userErrorMessage); + return null; + } +} diff --git a/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx new file mode 100644 index 00000000000..37384ea6624 --- /dev/null +++ b/frontend/javascripts/admin/dataset/dataset_add_compose_view.tsx @@ -0,0 +1,58 @@ +import { CardContainer } from "admin/dataset/dataset_components"; +import React, { useState } from "react"; +import { APIDataStore } from "types/api_flow_types"; +import SelectImportType from "./composition_wizard/01_select_import_type"; +import UploadFiles from "./composition_wizard/02_upload_files"; +import SelectDatasets from "./composition_wizard/03_select_datasets"; +import { ConfigureNewDataset } from "./composition_wizard/04_configure_new_dataset"; +import { WizardComponentProps, WizardContext } from "./composition_wizard/common"; + +type Props = { + onAdded: WizardComponentProps["onAdded"]; + datastores: APIDataStore[]; +}; + +const WIZARD_STEPS = { + SelectImportType: { + title: "Import type", + component: SelectImportType, + }, + UploadFiles: { + title: "Upload file(s)", + component: UploadFiles, + }, + SelectDatasets: { + title: "Select Datasets", + component: SelectDatasets, + }, + ConfigureNewDataset: { + title: "Configure New Datasets", + component: ConfigureNewDataset, + }, +} as const; + +export default function DatasetAddComposeView(props: Props) { + const [wizardContext, setWizardContext] = useState({ + currentWizardStep: "SelectImportType", + fileList: [], + composeMode: "WITHOUT_TRANSFORMS", + datasets: [], + sourcePoints: [], + targetPoints: [], + }); + const { currentWizardStep } = wizardContext; + const CurrentWizardComponent = WIZARD_STEPS[currentWizardStep].component; + + return ( +
+ + + +
+ ); +} diff --git a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx index e06e81ca944..36a187d806c 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_remote_view.tsx @@ -48,7 +48,6 @@ type OwnProps = { onAdded: ( datasetOrganization: string, uploadedDatasetName: string, - isRemoteDataset: boolean, needsConversion?: boolean | null | undefined, ) => Promise; datastores: APIDataStore[]; @@ -228,7 +227,7 @@ function DatasetAddRemoteView(props: Props) { Toast.error(`The datasource config could not be stored. ${e}`); return; } - onAdded(activeUser.organization, configJSON.id.name, true); + onAdded(activeUser.organization, configJSON.id.name); } } diff --git a/frontend/javascripts/admin/dataset/dataset_add_view.tsx b/frontend/javascripts/admin/dataset/dataset_add_view.tsx index e7cf364042b..a5105cea949 100644 --- a/frontend/javascripts/admin/dataset/dataset_add_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_add_view.tsx @@ -1,7 +1,7 @@ import type { RouteComponentProps } from "react-router-dom"; import { withRouter } from "react-router-dom"; import { Tabs, Modal, Button, Layout, TabsProps } from "antd"; -import { DatabaseOutlined, UploadOutlined } from "@ant-design/icons"; +import { CopyOutlined, DatabaseOutlined, UploadOutlined } from "@ant-design/icons"; import React, { useState } from "react"; import { connect } from "react-redux"; import type { APIDataStore } from "types/api_flow_types"; @@ -12,30 +12,40 @@ import DatasetUploadView from "admin/dataset/dataset_upload_view"; import features from "features"; import { getDatastores } from "admin/admin_rest_api"; import { useFetch } from "libs/react_helpers"; +import DatasetAddComposeView from "./dataset_add_compose_view"; const { Content, Sider } = Layout; -enum DatasetAddViewTabs { +// Used for the tab keys as well as for +// distinguishing between the add type after +// successful import. +enum DatasetAddType { UPLOAD = "upload", REMOTE = "remote", + COMPOSE = "compose", } +const addTypeToVerb: Record = { + upload: "uploaded", + remote: "added", + compose: "created", +}; function DatasetAddView({ history }: RouteComponentProps) { const datastores = useFetch(getDatastores, [], []); const [datasetName, setDatasetName] = useState(""); const [organization, setOrganization] = useState(""); const [datasetNeedsConversion, setDatasetNeedsConversion] = useState(false); - const [isRemoteDataset, setIsRemoteDataset] = useState(false); + const [datasetAddType, setImportType] = useState(DatasetAddType.UPLOAD); const handleDatasetAdded = async ( + datasetAddType: DatasetAddType, datasetOrganization: string, uploadedDatasetName: string, - isRemoteDataset: boolean, needsConversion: boolean | null | undefined, ): Promise => { setOrganization(datasetOrganization); setDatasetName(uploadedDatasetName); - setIsRemoteDataset(isRemoteDataset); + setImportType(datasetAddType); // @ts-expect-error ts-migrate(2345) FIXME: Argument of type 'boolean | null | undefined' is n... Remove this comment to see the full error message setDatasetNeedsConversion(needsConversion); }; @@ -55,7 +65,7 @@ function DatasetAddView({ history }: RouteComponentProps) { textAlign: "center", }} > - The dataset was {isRemoteDataset ? "imported" : "uploaded"} successfully + The dataset was {addTypeToVerb[datasetAddType]} successfully {datasetNeedsConversion ? " and a conversion job was started." : null}.
), - key: DatasetAddViewTabs.UPLOAD, - children: , + key: DatasetAddType.UPLOAD, + children: ( + + ), }, { label: ( @@ -124,8 +139,28 @@ function DatasetAddView({ history }: RouteComponentProps) { Add Remote Dataset ), - key: DatasetAddViewTabs.REMOTE, - children: , + key: DatasetAddType.REMOTE, + children: ( + + ), + }, + { + label: ( + + + Compose From Existing Datasets + + ), + key: DatasetAddType.COMPOSE, + children: ( + + ), }, ]; diff --git a/frontend/javascripts/admin/dataset/dataset_components.tsx b/frontend/javascripts/admin/dataset/dataset_components.tsx index d2bab29e5cc..2b6e35341c8 100644 --- a/frontend/javascripts/admin/dataset/dataset_components.tsx +++ b/frontend/javascripts/admin/dataset/dataset_components.tsx @@ -1,9 +1,13 @@ import * as React from "react"; -import { Form, Input, Select, Card } from "antd"; +import { Form, Input, Select, Card, FormInstance } from "antd"; import messages from "messages"; import { isDatasetNameValid } from "admin/admin_rest_api"; -import type { APIDataStore, APIUser } from "types/api_flow_types"; +import type { APIDataStore, APITeam, APIUser } from "types/api_flow_types"; import { syncValidator } from "types/validation"; +import { FormItemWithInfo } from "dashboard/dataset/helper_components"; +import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; +import features from "features"; + const FormItem = Form.Item; export function CardContainer({ children, @@ -43,7 +47,7 @@ export const layerNameRules = [ }, { validator: syncValidator( - (value: string) => !value.startsWith("."), + (value: string | null) => !value || !value.startsWith("."), "The name must not start with a dot.", ), }, @@ -138,3 +142,60 @@ export function DatastoreFormItem({ ); } + +export function AllowedTeamsFormItem({ + isDatasetManagerOrAdmin, + selectedTeams, + setSelectedTeams, + formRef, +}: { + isDatasetManagerOrAdmin: boolean; + selectedTeams: APITeam | Array; + setSelectedTeams: (teams: APITeam | Array) => void; + formRef: React.RefObject>; +}) { + return ( + + { + if (formRef.current == null) return; + + if (!Array.isArray(selectedTeams)) { + // Making sure that we always have an array even when only one team is selected. + selectedTeams = [selectedTeams]; + } + + formRef.current.setFieldsValue({ + initialTeams: selectedTeams, + }); + setSelectedTeams(selectedTeams); + }} + afterFetchedTeams={(fetchedTeams) => { + if (!features().isWkorgInstance) { + return; + } + + const teamOfOrganization = fetchedTeams.find((team) => team.name === team.organization); + + if (teamOfOrganization == null) { + return; + } + + if (formRef.current == null) return; + formRef.current.setFieldsValue({ + initialTeams: [teamOfOrganization], + }); + setSelectedTeams([teamOfOrganization]); + }} + /> + + ); +} diff --git a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx index 8da723c8589..0fc49920079 100644 --- a/frontend/javascripts/admin/dataset/dataset_upload_view.tsx +++ b/frontend/javascripts/admin/dataset/dataset_upload_view.tsx @@ -34,12 +34,12 @@ import messages from "messages"; import { trackAction } from "oxalis/model/helpers/analytics"; import Zip from "libs/zipjs_wrapper"; import { + AllowedTeamsFormItem, CardContainer, DatasetNameFormItem, DatastoreFormItem, } from "admin/dataset/dataset_components"; import { Vector3Input } from "libs/vector_input"; -import TeamSelectionComponent from "dashboard/dataset/team_selection_component"; import features from "features"; import { syncValidator } from "types/validation"; import { FormInstance } from "antd/lib/form"; @@ -59,7 +59,7 @@ const logRetryToAnalytics = _.throttle((datasetName: string) => { type OwnProps = { datastores: Array; withoutCard?: boolean; - onUploaded: (arg0: string, arg1: string, arg2: boolean, arg3: boolean) => Promise | void; + onUploaded: (arg0: string, arg1: string, arg2: boolean) => Promise | void; }; type StateProps = { activeUser: APIUser | null | undefined; @@ -155,7 +155,7 @@ class DatasetUploadView extends React.Component { unblock: ((...args: Array) => any) | null | undefined; blockTimeoutId: number | null = null; - formRef = React.createRef(); + formRef: React.RefObject> = React.createRef(); componentDidMount() { sendAnalyticsEvent("open_upload_view"); @@ -291,8 +291,6 @@ class DatasetUploadView extends React.Component { finishDatasetUpload(datastoreUrl, uploadInfo).then( async () => { trackAction("Upload dataset"); - await Utils.sleep(3000); // wait for 3 seconds so the server can catch up / do its thing - Toast.success(messages["dataset.upload_success"]); let maybeError; @@ -345,7 +343,6 @@ class DatasetUploadView extends React.Component { this.props.onUploaded( activeUser.organization, formValues.name, - false, this.state.needsConversion, ); } @@ -674,54 +671,12 @@ class DatasetUploadView extends React.Component { - - { - if (this.formRef.current == null) return; - - if (!Array.isArray(selectedTeams)) { - // Making sure that we always have an array even when only one team is selected. - selectedTeams = [selectedTeams]; - } - - this.formRef.current.setFieldsValue({ - initialTeams: selectedTeams, - }); - this.setState({ - selectedTeams, - }); - }} - afterFetchedTeams={(fetchedTeams) => { - if (!features().isWkorgInstance) { - return; - } - - const teamOfOrganization = fetchedTeams.find( - (team) => team.name === team.organization, - ); - - if (teamOfOrganization == null) { - return; - } - - if (this.formRef.current == null) return; - this.formRef.current.setFieldsValue({ - initialTeams: [teamOfOrganization], - }); - this.setState({ - selectedTeams: [teamOfOrganization], - }); - }} - /> - + this.setState({ selectedTeams })} + formRef={this.formRef} + /> diff --git a/frontend/javascripts/components/async_select.tsx b/frontend/javascripts/components/async_select.tsx new file mode 100644 index 00000000000..f9e4db68d7b --- /dev/null +++ b/frontend/javascripts/components/async_select.tsx @@ -0,0 +1,61 @@ +import { Select, Spin } from "antd"; +import type { SelectProps } from "antd/es/select"; +import debounce from "lodash/debounce"; +import React, { useMemo, useRef, useState } from "react"; + +// This module is inspired by the "Search and Select Users" example +// in the antd documentation (for version 4). +// Quote: +// A complete multiple select sample with remote search, debounce fetch, ajax callback order flow, and loading state. + +export interface AsyncSelectProps + extends Omit, "options" | "children"> { + fetchOptions: (search: string) => Promise; + debounceTimeout?: number; +} + +export default function AsyncSelect< + ValueType extends { key?: string; label: React.ReactNode; value: string | number } = any, +>({ fetchOptions, debounceTimeout = 400, ...props }: AsyncSelectProps) { + const [fetching, setFetching] = useState(false); + const [options, setOptions] = useState([]); + const fetchRef = useRef(0); + + const debounceFetcher = useMemo(() => { + const loadOptions = (value: string) => { + fetchRef.current += 1; + const fetchId = fetchRef.current; + setOptions([]); + setFetching(true); + + fetchOptions(value).then((newOptions) => { + if (fetchId !== fetchRef.current) { + // for fetch callback order + return; + } + + setOptions(newOptions); + setFetching(false); + }); + }; + + return debounce(loadOptions, debounceTimeout); + }, [fetchOptions, debounceTimeout]); + + return ( +