Skip to content

Commit

Permalink
Add frontend and backend part for job to materialize a volume annotat…
Browse files Browse the repository at this point in the history
…ion (#6086)

* add frontend and backend part for job to apply merger mode tracing

* renaming variable

* add annotation type as job parameter and make layer names more readable

* fix layer selection for merger mode job modal

* add job output dataset name to start job modals
make start job modals a form

* add output dataset name to nuclei & neuron & merger mode job params
Add potential volume annotation layer to params of merger mode job all potential annotation cases

* refactor starting modal for flood fills to use the same root component as the other job starting modals
- Add volume annotation layer name to flood fill job to make it work when there are multiple annotation layers

* Apply suggestions from code review

Co-authored-by: Philipp Otto <[email protected]>

* apply pr feedback

* disable jobs

* update docs

* apply pr feedback

* add changelog entry

* Update docs/volume_annotation.md

Co-authored-by: Philipp Otto <[email protected]>

* only show merger mode job button whe jobs are enabled
- and apply some feedback

* add output segmentation layer name as user input field to apply merger mode job modal

* add segmentation output layer name to apply merger mode job api call

* make merger job available for volume annotation without skeletons

* fix returnLink of materialize job

* make materialize volume anotation job  better compatible with volume layer without a fallback layer
- also incoperate other feedback

* update doc images and add merge with fallback layer section to docs

* Apply suggestions from code review

Co-authored-by: Daniel <[email protected]>

* apply review feedback
 - pass unallowed layer names directly to form item

* Update docs/volume_annotation.md

Co-authored-by: Philipp Otto <[email protected]>

* apply pr feedback

* rephrase commennt to make it clearer

Co-authored-by: Philipp Otto <[email protected]>
Co-authored-by: Daniel <[email protected]>
Co-authored-by: Florian M <[email protected]>
  • Loading branch information
4 people authored May 9, 2022
1 parent b832f98 commit 95ec9e1
Show file tree
Hide file tree
Showing 20 changed files with 888 additions and 550 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
[Commits](https://github.com/scalableminds/webknossos/compare/22.05.1...HEAD)

### Added
- Added a long-running job that applies the merging done via a merger mode tracing to a new output dataset. The job is accessible via a button next to the merger mode button once the merger mode is active. [#6086](https://github.com/scalableminds/webknossos/pull/6086)
- Added support to stream zarr files using the corresponding [zarr spec](https://zarr.readthedocs.io/en/stable/spec/v2.html#storage). [#6144](https://github.com/scalableminds/webknossos/pull/6144)

### Changed
Expand Down
67 changes: 58 additions & 9 deletions app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,10 @@ class JobsController @Inject()(jobDAO: JobDAO,
} yield Ok(js)
}

def runInferNucleiJob(organizationName: String, dataSetName: String, layerName: Option[String]): Action[AnyContent] =
def runInferNucleiJob(organizationName: String,
dataSetName: String,
layerName: String,
newDatasetName: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
Expand All @@ -142,6 +145,7 @@ class JobsController @Inject()(jobDAO: JobDAO,
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"layer_name" -> layerName,
"new_dataset_name" -> newDatasetName,
"webknossos_token" -> RpcTokenHolder.webKnossosToken,
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunNucleiInferral"
Expand All @@ -153,7 +157,8 @@ class JobsController @Inject()(jobDAO: JobDAO,
def runInferNeuronsJob(organizationName: String,
dataSetName: String,
layerName: String,
bbox: String): Action[AnyContent] =
bbox: String,
newDatasetName: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
Expand All @@ -167,6 +172,7 @@ class JobsController @Inject()(jobDAO: JobDAO,
commandArgs = Json.obj(
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"new_dataset_name" -> newDatasetName,
"layer_name" -> layerName,
"webknossos_token" -> RpcTokenHolder.webKnossosToken,
"bbox" -> bbox,
Expand All @@ -180,10 +186,11 @@ class JobsController @Inject()(jobDAO: JobDAO,
def runGlobalizeFloodfills(
organizationName: String,
dataSetName: String,
newDataSetName: Option[String],
layerName: Option[String],
annotationId: Option[String],
annotationType: Option[String],
fallbackLayerName: String,
annotationId: String,
annotationType: String,
newDatasetName: String,
volumeLayerName: Option[String]
): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
Expand All @@ -201,11 +208,13 @@ class JobsController @Inject()(jobDAO: JobDAO,
commandArgs = Json.obj(
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"new_dataset_name" -> newDataSetName,
"layer_name" -> layerName,
"fallback_layer_name" -> fallbackLayerName,
"webknossos_token" -> RpcTokenHolder.webKnossosToken,
"user_auth_token" -> userAuthToken.id,
"annotation_id" -> annotationId,
"annotation_type" -> annotationType,
"user_auth_token" -> userAuthToken.id,
"new_dataset_name" -> newDatasetName,
"volume_layer_name" -> volumeLayerName
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunGlobalizeFloodfills"
js <- jobService.publicWrites(job)
Expand Down Expand Up @@ -258,4 +267,44 @@ class JobsController @Inject()(jobDAO: JobDAO,
}
}

def runMaterializeVolumeAnnotationJob(organizationName: String,
dataSetName: String,
fallbackLayerName: String,
annotationId: String,
annotationType: String,
newDatasetName: String,
outputSegmentationLayerName: String,
mergeSegments: Boolean,
volumeLayerName: Option[String]): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
organization <- organizationDAO.findOneByName(organizationName) ?~> Messages("organization.notFound",
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ?~> "job.applyMergerMode.notAllowed.organization" ~> FORBIDDEN
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
userAuthToken <- wkSilhouetteEnvironment.combinedAuthenticatorService.findOrCreateToken(
request.identity.loginInfo)
command = "materialize_volume_annotation"
commandArgs = Json.obj(
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"fallback_layer_name" -> fallbackLayerName,
"webknossos_token" -> RpcTokenHolder.webKnossosToken,
"user_auth_token" -> userAuthToken.id,
"annotation_id" -> annotationId,
"output_segmentation_layer_name" -> outputSegmentationLayerName,
"annotation_type" -> annotationType,
"new_dataset_name" -> newDatasetName,
"merge_segments" -> mergeSegments,
"volume_layer_name" -> volumeLayerName
)
job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunApplyMergerMode"
js <- jobService.publicWrites(job)
} yield Ok(js)
}
}

}
2 changes: 1 addition & 1 deletion app/models/job/Job.scala
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ case class Job(
}
case "export_tiff" =>
Some(s"$dataStorePublicUrl/data/exports/${_id.id}/download")
case "infer_nuclei" | "infer_neurons" =>
case "infer_nuclei" | "infer_neurons" | "materialize_volume_annotation" =>
returnValue.map { resultDatasetName =>
s"/datasets/$organizationName/$resultDatasetName/view"
}
Expand Down
2 changes: 2 additions & 0 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -346,6 +346,7 @@ job.couldNotRunComputeMeshFile = Failed to start mesh file computation job.
job.couldNotRunNucleiInferral = Failed to start nuclei inferral job.
job.couldNotRunNeuronInferral = Failed to start neuron inferral job.
job.couldNotRunGlobalizeFloodfills = Failed to start job for globalizing floodfills.
job.couldNotRunApplyMergerMode = Failed to start job to apply merger mode tracing.
job.disabled = Long-running jobs are not enabled for this webKnossos instance.
jobs.worker.notFound = Could not find this worker in the database.
job.export.fileNotFound = Exported file not found. The link may be expired.
Expand All @@ -357,6 +358,7 @@ job.inferNuclei.notAllowed.organization = Currently nuclei inferral is only allo
job.inferNeurons.notAllowed.organization = Currently neuron inferral is only allowed for datasets of your own organization.
job.meshFile.notAllowed.organization = Calculating mesh files is only allowed for datasets of your own organization.
job.globalizeFloodfill.notAllowed.organization = Globalizing floodfills is only allowed for datasets of your own organization.
job.applyMergerMode.notAllowed.organization = Applying merger mode tracings is only allowed for datasets of your own organization.

agglomerateSkeleton.failed=Could not generate agglomerate skeleton.

Expand Down
Loading

0 comments on commit 95ec9e1

Please sign in to comment.