Skip to content

Commit

Permalink
Merge branch 'master' of github.com:scalableminds/webknossos into pri…
Browse files Browse the repository at this point in the history
…cing

* 'master' of github.com:scalableminds/webknossos:
  Remove superfluous datastoreName param for convertToWkw job (#6606)
  When downloading with skipVolumeData, keep volume tag (#6566)
  • Loading branch information
hotzenklotz committed Nov 8, 2022
2 parents af61892 + 3abb21a commit 85da260
Show file tree
Hide file tree
Showing 12 changed files with 42 additions and 48 deletions.
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Node positions are always handled as integers. They have always been persisted as integers by the server, anyway, but the session in which a node was created handled the position as floating point in earlier versions. [#6589](https://github.com/scalableminds/webknossos/pull/6589)
- When merging annotations, bounding boxes are no longer duplicated. [#6576](https://github.com/scalableminds/webknossos/pull/6576)
- Jobs can no longer be started on datastores without workers. [#6595](https://github.com/scalableminds/webknossos/pull/6595)
- When downloading volume annotations with volume data skipped, the nml volume tag is now included anyway (but has no location attribute in this case). [#6566](https://github.com/scalableminds/webknossos/pull/6566)

### Fixed

Expand Down
19 changes: 6 additions & 13 deletions app/controllers/JobsController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -75,26 +75,25 @@ class JobsController @Inject()(jobDAO: JobDAO,
} yield Ok(js)
}

def runConvertToWkwJob(organizationName: String,
dataSetName: String,
scale: String,
dataStoreName: String): Action[AnyContent] =
// Note that the dataset has to be registered by reserveUpload via the datastore first.
def runConvertToWkwJob(organizationName: String, dataSetName: String, scale: String): Action[AnyContent] =
sil.SecuredAction.async { implicit request =>
log(Some(slackNotificationService.noticeFailedJobRequest)) {
for {
_ <- workerService.assertDataStoreHasWorkers(dataStoreName)
organization <- organizationDAO.findOneByName(organizationName) ?~> Messages("organization.notFound",
organizationName)
_ <- bool2Fox(request.identity._organization == organization._id) ~> FORBIDDEN
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
command = "convert_to_wkw"
commandArgs = Json.obj(
"organization_name" -> organizationName,
"dataset_name" -> dataSetName,
"scale" -> scale,
"webknossos_token" -> RpcTokenHolder.webKnossosToken
)

job <- jobService.submitJob(command, commandArgs, request.identity, dataStoreName) ?~> "job.couldNotRunCubing"
job <- jobService.submitJob(command, commandArgs, request.identity, dataSet._dataStore) ?~> "job.couldNotRunCubing"
js <- jobService.publicWrites(job)
} yield Ok(js)
}
Expand All @@ -114,7 +113,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
command = "compute_mesh_file"
commandArgs = Json.obj(
"organization_name" -> organizationName,
Expand Down Expand Up @@ -142,7 +140,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
command = "infer_nuclei"
commandArgs = Json.obj(
"organization_name" -> organizationName,
Expand Down Expand Up @@ -171,7 +168,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
command = "infer_neurons"
commandArgs = Json.obj(
"organization_name" -> organizationName,
Expand Down Expand Up @@ -208,7 +204,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
command = "globalize_floodfills"
commandArgs = Json.obj(
"organization_name" -> organizationName,
Expand Down Expand Up @@ -244,7 +239,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- jobService.assertTiffExportBoundingBoxLimits(bbox)
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
userAuthToken <- wkSilhouetteEnvironment.combinedAuthenticatorService.findOrCreateToken(
request.identity.loginInfo)
command = "export_tiff"
Expand Down Expand Up @@ -287,7 +281,6 @@ class JobsController @Inject()(jobDAO: JobDAO,
dataSet <- dataSetDAO.findOneByNameAndOrganization(dataSetName, organization._id) ?~> Messages(
"dataSet.notFound",
dataSetName) ~> NOT_FOUND
_ <- workerService.assertDataStoreHasWorkers(dataSet._dataStore)
userAuthToken <- wkSilhouetteEnvironment.combinedAuthenticatorService.findOrCreateToken(
request.identity.loginInfo)
command = "materialize_volume_annotation"
Expand Down
2 changes: 1 addition & 1 deletion app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ class WKRemoteDataStoreController @Inject()(
val bearerTokenService: WebknossosBearerTokenAuthenticatorService =
wkSilhouetteEnvironment.combinedAuthenticatorService.tokenAuthenticatorService

def validateDataSetUpload(name: String, key: String, token: String): Action[ReserveUploadInformation] =
def reserveDataSetUpload(name: String, key: String, token: String): Action[ReserveUploadInformation] =
Action.async(validateJson[ReserveUploadInformation]) { implicit request =>
dataStoreService.validateAccess(name, key) { dataStore =>
val uploadInfo = request.body
Expand Down
28 changes: 14 additions & 14 deletions app/models/annotation/nml/NmlWriter.scala
Original file line number Diff line number Diff line change
Expand Up @@ -206,21 +206,21 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits {
isSingle: Boolean,
volumeFilename: Option[String],
skipVolumeData: Boolean)(implicit writer: XMLStreamWriter): Unit =
if (skipVolumeData) {
writer.writeComment(
f"A volume layer named ${volumeLayer.name} (id = $index) was omitted here while downloading this annotation without volume data.")
} else {
Xml.withinElementSync("volume") {
writer.writeAttribute("id", index.toString)
Xml.withinElementSync("volume") {
writer.writeAttribute("id", index.toString)
writer.writeAttribute("name", volumeLayer.name)
if (!skipVolumeData) {
writer.writeAttribute("location", volumeFilename.getOrElse(volumeLayer.volumeDataZipName(index, isSingle)))
writer.writeAttribute("name", volumeLayer.name)
volumeLayer.tracing match {
case Right(volumeTracing) =>
volumeTracing.fallbackLayer.foreach(writer.writeAttribute("fallbackLayer", _))
volumeTracing.largestSegmentId.foreach(id => writer.writeAttribute("largestSegmentId", id.toString))
writeVolumeSegmentMetadata(volumeTracing.segments)
case _ => ()
}
}
volumeLayer.tracing match {
case Right(volumeTracing) =>
volumeTracing.fallbackLayer.foreach(writer.writeAttribute("fallbackLayer", _))
volumeTracing.largestSegmentId.foreach(id => writer.writeAttribute("largestSegmentId", id.toString))
if (skipVolumeData) {
writer.writeComment(f"Note that volume data was omitted when downloading this annotation.")
}
writeVolumeSegmentMetadata(volumeTracing.segments)
case _ => ()
}
}

Expand Down
11 changes: 10 additions & 1 deletion app/models/job/Job.scala
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,7 @@ class JobService @Inject()(wkConf: WkConf,
userDAO: UserDAO,
multiUserDAO: MultiUserDAO,
jobDAO: JobDAO,
workerDAO: WorkerDAO,
dataStoreDAO: DataStoreDAO,
organizationDAO: OrganizationDAO,
dataSetDAO: DataSetDAO,
Expand Down Expand Up @@ -360,14 +361,22 @@ class JobService @Inject()(wkConf: WkConf,
"job_kwargs" -> job.commandArgs
)

def submitJob(command: String, commandArgs: JsObject, owner: User, dataStoreName: String): Fox[Job] =
def submitJob(command: String, commandArgs: JsObject, owner: User, dataStoreName: String)(
implicit ctx: DBAccessContext): Fox[Job] =
for {
_ <- bool2Fox(wkConf.Features.jobsEnabled) ?~> "job.disabled"
_ <- assertDataStoreHasWorkers(dataStoreName) ?~> "job.noWorkerForDatastore"
job = Job(ObjectId.generate, owner._id, dataStoreName, command, commandArgs)
_ <- jobDAO.insertOne(job)
_ = analyticsService.track(RunJobEvent(owner, command))
} yield job

private def assertDataStoreHasWorkers(dataStoreName: String)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- dataStoreDAO.findOneByName(dataStoreName)
_ <- workerDAO.findOneByDataStore(dataStoreName)
} yield ()

def assertTiffExportBoundingBoxLimits(bbox: String): Fox[Unit] =
for {
boundingBox <- BoundingBox.fromLiteral(bbox).toFox ?~> "job.export.tiff.invalidBoundingBox"
Expand Down
11 changes: 2 additions & 9 deletions app/models/job/Worker.scala
Original file line number Diff line number Diff line change
@@ -1,22 +1,21 @@
package models.job

import akka.actor.ActorSystem
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
import com.scalableminds.util.accesscontext.GlobalAccessContext
import com.scalableminds.util.mvc.Formatter
import com.scalableminds.util.tools.Fox
import com.scalableminds.webknossos.datastore.helpers.IntervalScheduler
import com.scalableminds.webknossos.schema.Tables._
import com.typesafe.scalalogging.LazyLogging
import models.binary.DataStoreDAO

import javax.inject.Inject
import oxalis.telemetry.SlackNotificationService
import play.api.inject.ApplicationLifecycle
import play.api.libs.json.{JsObject, Json}
import slick.jdbc.PostgresProfile.api._
import slick.lifted.Rep
import utils.{ObjectId, SQLClient, SQLDAO, WkConf}

import javax.inject.Inject
import scala.concurrent.ExecutionContext
import scala.concurrent.duration._

Expand Down Expand Up @@ -83,12 +82,6 @@ class WorkerService @Inject()(conf: WkConf, dataStoreDAO: DataStoreDAO, workerDA
"lastHeartBeatIsRecent" -> lastHeartBeatIsRecent(worker)
)

def assertDataStoreHasWorkers(dataStoreName: String)(implicit ctx: DBAccessContext): Fox[Unit] =
for {
_ <- dataStoreDAO.findOneByName(dataStoreName)
_ <- workerDAO.findOneByDataStore(dataStoreName) ?~> "jobs.noWorkerForDatastore"
} yield ()

}

class WorkerLivenessService @Inject()(workerService: WorkerService,
Expand Down
4 changes: 2 additions & 2 deletions conf/messages
Original file line number Diff line number Diff line change
Expand Up @@ -355,7 +355,7 @@ job.couldNotRunNeuronInferral = Failed to start neuron inferral job.
job.couldNotRunGlobalizeFloodfills = Failed to start job for globalizing floodfills.
job.couldNotRunApplyMergerMode = Failed to start job to apply merger mode tracing.
job.disabled = Long-running jobs are not enabled for this webKnossos instance.
jobs.worker.notFound = Could not find this worker in the database.
job.worker.notFound = Could not find this worker in the database.
job.export.fileNotFound = Exported file not found. The link may be expired.
job.export.tiff.invalidBoundingBox = The selected bounding box could not be parsed, must be x,y,z,width,height,depth
job.export.tiff.volumeExceeded = The volume of the selected bounding box is too large.
Expand All @@ -366,7 +366,7 @@ job.inferNeurons.notAllowed.organization = Currently neuron inferral is only all
job.meshFile.notAllowed.organization = Calculating mesh files is only allowed for datasets of your own organization.
job.globalizeFloodfill.notAllowed.organization = Globalizing floodfills is only allowed for datasets of your own organization.
job.applyMergerMode.notAllowed.organization = Applying merger mode tracings is only allowed for datasets of your own organization.
jobs.noWorkerFound = Processing jobs are not available for the datastore this dataset belongs to.
job.noWorkerForDatastore = Processing jobs are not available for the datastore this dataset belongs to.

voxelytics.disabled = Voxelytics workflow reporting and logging are not enabled for this webKnossos instance.
voxelytics.runNotFound = Workflow runs not found
Expand Down
4 changes: 2 additions & 2 deletions conf/webknossos.latest.routes
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ GET /datastores
PUT /datastores/:name/datasource controllers.WKRemoteDataStoreController.updateOne(name: String, key: String)
PUT /datastores/:name/datasources controllers.WKRemoteDataStoreController.updateAll(name: String, key: String)
PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String)
POST /datastores/:name/verifyUpload controllers.WKRemoteDataStoreController.validateDataSetUpload(name: String, key: String, token: String)
POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDataSetUpload(name: String, key: String, token: String)
POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, dataSetName: String, dataSetSizeBytes: Long)
POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String)
GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: String)
Expand Down Expand Up @@ -226,7 +226,7 @@ GET /time/user/:userId
GET /jobs/request controllers.WKRemoteWorkerController.requestJobs(key: String)
GET /jobs controllers.JobsController.list
GET /jobs/status controllers.JobsController.status
POST /jobs/run/convertToWkw/:organizationName/:dataSetName controllers.JobsController.runConvertToWkwJob(organizationName: String, dataSetName: String, scale: String, dataStoreName: String)
POST /jobs/run/convertToWkw/:organizationName/:dataSetName controllers.JobsController.runConvertToWkwJob(organizationName: String, dataSetName: String, scale: String)
POST /jobs/run/computeMeshFile/:organizationName/:dataSetName controllers.JobsController.runComputeMeshFileJob(organizationName: String, dataSetName: String, layerName: String, mag: String, agglomerateView: Option[String])
POST /jobs/run/exportTiff/:organizationName/:dataSetName controllers.JobsController.runExportTiffJob(organizationName: String, dataSetName: String, bbox: String, layerName: Option[String], annotationLayerName: Option[String], annotationId: Option[String], annotationType: Option[String], hideUnmappedIds: Option[Boolean], mappingName: Option[String], mappingType: Option[String])
POST /jobs/run/inferNuclei/:organizationName/:dataSetName controllers.JobsController.runInferNucleiJob(organizationName: String, dataSetName: String, layerName: String, newDatasetName: String)
Expand Down
3 changes: 1 addition & 2 deletions frontend/javascripts/admin/admin_rest_api.ts
Original file line number Diff line number Diff line change
Expand Up @@ -1073,10 +1073,9 @@ export async function startConvertToWkwJob(
datasetName: string,
organizationName: string,
scale: Vector3,
datastoreName: string,
): Promise<APIJob> {
return Request.receiveJSON(
`/api/jobs/run/convertToWkw/${organizationName}/${datasetName}?scale=${scale.toString()}&dataStoreName=${datastoreName}`,
`/api/jobs/run/convertToWkw/${organizationName}/${datasetName}?scale=${scale.toString()}`,
{
method: "POST",
},
Expand Down
1 change: 0 additions & 1 deletion frontend/javascripts/admin/dataset/dataset_upload_view.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -310,7 +310,6 @@ class DatasetUploadView extends React.Component<PropsWithFormAndRouter, State> {
formValues.name,
activeUser.organization,
formValues.scale,
datastore.name,
);
} catch (error) {
maybeError = error;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ Expects:
for {
isKnownUpload <- uploadService.isKnownUpload(request.body.uploadId)
_ <- if (!isKnownUpload) {
(remoteWebKnossosClient.validateDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataSet.upload.validation.failed")
(remoteWebKnossosClient.reserveDataSourceUpload(request.body, urlOrHeaderToken(token, request)) ?~> "dataSet.upload.validation.failed")
.flatMap(_ => uploadService.reserveUpload(request.body))
} else Fox.successful(())
} yield Ok
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,10 @@ class DSRemoteWebKnossosClient @Inject()(
.silent
.put(dataSources)

def validateDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] =
def reserveDataSourceUpload(info: ReserveUploadInformation, userTokenOpt: Option[String]): Fox[Unit] =
for {
userToken <- option2Fox(userTokenOpt) ?~> "validateDataSourceUpload.noUserToken"
_ <- rpc(s"$webKnossosUri/api/datastores/$dataStoreName/verifyUpload")
_ <- rpc(s"$webKnossosUri/api/datastores/$dataStoreName/reserveUpload")
.addQueryString("key" -> dataStoreKey)
.addQueryString("token" -> userToken)
.post(info)
Expand Down

0 comments on commit 85da260

Please sign in to comment.