diff --git a/app/controllers/AnnotationIOController.scala b/app/controllers/AnnotationIOController.scala index 930d95950f2..5a45b8b648d 100755 --- a/app/controllers/AnnotationIOController.scala +++ b/app/controllers/AnnotationIOController.scala @@ -337,8 +337,7 @@ class AnnotationIOController @Inject()( else volumeTracing.boundingBox for { - tracingCanHaveSegmentIndex <- canHaveSegmentIndex(organizationId, - dataset.name, + tracingCanHaveSegmentIndex <- canHaveSegmentIndex(dataset._id, fallbackLayerOpt.map(_.name), remoteDataStoreClient) elementClassProto <- fallbackLayerOpt @@ -358,13 +357,12 @@ class AnnotationIOController @Inject()( } private def canHaveSegmentIndex( - organizationId: String, - datasetName: String, + datasetId: ObjectId, fallbackLayerName: Option[String], remoteDataStoreClient: WKRemoteDataStoreClient)(implicit ec: ExecutionContext): Fox[Boolean] = fallbackLayerName match { case Some(layerName) => - remoteDataStoreClient.hasSegmentIndexFile(organizationId, datasetName, layerName) + remoteDataStoreClient.hasSegmentIndexFile(datasetId, layerName) case None => Fox.successful(true) } diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 04e39253849..e340de4310d 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -89,6 +89,7 @@ class DatasetController @Inject()(userService: UserService, analyticsService: AnalyticsService, mailchimpClient: MailchimpClient, wkExploreRemoteLayerService: WKExploreRemoteLayerService, + composeService: ComposeService, sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) extends Controller with MetadataAssertions { @@ -145,10 +146,10 @@ class DatasetController @Inject()(userService: UserService, _ <- Fox.fromBool(dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers" folderIdOpt <- Fox.runOptional(request.body.folderPath)(folderPath => folderService.getOrCreateFromPathLiteral(folderPath, request.identity._organization)) ?~> "dataset.explore.autoAdd.getFolder.failed" - _ <- wkExploreRemoteLayerService.addRemoteDatasource(dataSource, - request.body.datasetName, - request.identity, - folderIdOpt) ?~> "dataset.explore.autoAdd.failed" + _ <- wkExploreRemoteLayerService.addRemoteDatasourceToDatabase(dataSource, + request.body.datasetName, + request.identity, + folderIdOpt) ?~> "dataset.explore.autoAdd.failed" } yield Ok } @@ -490,4 +491,11 @@ class DatasetController @Inject()(userService: UserService, } } + def compose(): Action[ComposeRequest] = + sil.SecuredAction.async(validateJson[ComposeRequest]) { implicit request => + for { + (dataSource, newDatasetId) <- composeService.composeDataset(request.body, request.identity) ?~> "dataset.compose.failed" + } yield Ok(Json.obj("newDatasetId" -> newDatasetId)) + } + } diff --git a/app/controllers/UserTokenController.scala b/app/controllers/UserTokenController.scala index f5a68288e18..9d9bb975214 100644 --- a/app/controllers/UserTokenController.scala +++ b/app/controllers/UserTokenController.scala @@ -182,10 +182,19 @@ class UserTokenController @Inject()(datasetDAO: DatasetDAO, isAllowed <- datasetService.isEditableBy(dataset, Some(user)) } yield UserAccessAnswer(isAllowed) + def tryDelete: Fox[UserAccessAnswer] = + for { + _ <- Fox.fromBool(conf.Features.allowDeleteDatasets) ?~> "dataset.delete.disabled" + datasetId <- ObjectId.fromString(id) + dataset <- datasetDAO.findOne(datasetId)(GlobalAccessContext) ?~> "dataset.notFound" + user <- userBox.toFox ?~> "auth.token.noUser" + } yield UserAccessAnswer(user._organization == dataset._organization && user.isAdmin) + mode match { - case AccessMode.read => tryRead - case AccessMode.write => tryWrite - case _ => Fox.successful(UserAccessAnswer(granted = false, Some("invalid access token"))) + case AccessMode.read => tryRead + case AccessMode.write => tryWrite + case AccessMode.delete => tryDelete + case _ => Fox.successful(UserAccessAnswer(granted = false, Some("invalid access token"))) } } diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 736eb23b3a7..c75859a8d33 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -7,9 +7,9 @@ import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.{LayerMagLinkInfo, MagLinkInfo} import com.scalableminds.webknossos.datastore.models.UnfinishedUpload -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.webknossos.datastore.models.datasource.{AbstractDataLayer, DataSource, DataSourceId} import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} -import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataStoreStatus} +import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataSourceRegistrationInfo, DataStoreStatus} import com.scalableminds.webknossos.datastore.services.uploading.{ LinkedLayerIdentifier, ReserveAdditionalInformation, @@ -242,13 +242,22 @@ class WKRemoteDataStoreController @Inject()( } } - def getPaths(name: String, key: String, organizationId: String, directoryName: String): Action[AnyContent] = + def deleteVirtualDataset(name: String, key: String): Action[ObjectId] = + Action.async(validateJson[ObjectId]) { implicit request => + dataStoreService.validateAccess(name, key) { _ => + for { + dataset <- datasetDAO.findOne(request.body)(GlobalAccessContext) ~> NOT_FOUND + _ <- Fox.fromBool(dataset.isVirtual) ?~> "dataset.delete.notVirtual" ~> FORBIDDEN + _ <- datasetDAO.deleteDataset(dataset._id, onlyMarkAsDeleted = true) + } yield Ok + } + } + + def getPaths(name: String, key: String, datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { _ => for { - organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) - dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(directoryName, organization._id)( - GlobalAccessContext) + dataset <- datasetDAO.findOne(datasetId)(GlobalAccessContext) ~> NOT_FOUND layers <- datasetLayerDAO.findAllForDataset(dataset._id) magsAndLinkedMags <- Fox.serialCombined(layers)(l => datasetService.getPathsForDataLayer(dataset._id, l.name)) magLinkInfos = magsAndLinkedMags.map(_.map { case (mag, linkedMags) => MagLinkInfo(mag, linkedMags) }) @@ -270,6 +279,49 @@ class WKRemoteDataStoreController @Inject()( } + // Register a datasource from the datastore as a dataset in the database. + // This is called when adding remote virtual datasets (that should only exist in the database) + // by the data store after exploration. + def registerDataSource(name: String, + key: String, + organizationId: String, + directoryName: String, + token: String): Action[DataSourceRegistrationInfo] = + Action.async(validateJson[DataSourceRegistrationInfo]) { implicit request => + dataStoreService.validateAccess(name, key) { dataStore => + for { + user <- bearerTokenService.userForToken(token) + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + "organization.notFound", + organizationId) ~> NOT_FOUND + _ <- Fox.fromBool(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN + dataset <- datasetService.createVirtualDataset( + directoryName, + organizationId, + dataStore, + request.body.dataSource, + request.body.folderId, + user + ) + } yield Ok(dataset._id.toString) + } + } + + def updateDataSource(name: String, key: String, datasetId: ObjectId): Action[DataSource] = + Action.async(validateJson[DataSource]) { implicit request => + dataStoreService.validateAccess(name, key) { _ => + for { + _ <- datasetDAO.findOne(datasetId)(GlobalAccessContext) ~> NOT_FOUND + abstractDataSource = request.body.copy(dataLayers = request.body.dataLayers.map(AbstractDataLayer.from)) + _ <- datasetDAO.updateDataSourceByDatasetId(datasetId, + name, + abstractDataSource.hashCode(), + abstractDataSource, + isUsable = true)(GlobalAccessContext) + } yield Ok + } + } + def jobExportProperties(name: String, key: String, jobId: ObjectId): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { _ => diff --git a/app/controllers/WKRemoteTracingStoreController.scala b/app/controllers/WKRemoteTracingStoreController.scala index 11ab1792668..895a5c08aa1 100644 --- a/app/controllers/WKRemoteTracingStoreController.scala +++ b/app/controllers/WKRemoteTracingStoreController.scala @@ -8,7 +8,6 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayer -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.tracingstore.AnnotationUpdatesReport import com.scalableminds.webknossos.tracingstore.annotation.AnnotationLayerParameters import com.scalableminds.webknossos.tracingstore.tracings.TracingId @@ -16,10 +15,8 @@ import models.analytics.{AnalyticsService, UpdateAnnotationEvent, UpdateAnnotati import models.annotation.AnnotationState._ import models.annotation._ import models.dataset.{DatasetDAO, DatasetService} -import models.organization.OrganizationDAO import models.user.UserDAO import models.user.time.TimeSpanService -import play.api.i18n.Messages import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, PlayBodyParsers} import scalapb.GeneratedMessage @@ -33,7 +30,6 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore wkSilhouetteEnvironment: WkSilhouetteEnvironment, timeSpanService: TimeSpanService, datasetService: DatasetService, - organizationDAO: OrganizationDAO, userDAO: UserDAO, annotationInformationProvider: AnnotationInformationProvider, analyticsService: AnalyticsService, @@ -125,15 +121,14 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } - def dataSourceIdForAnnotation(name: String, key: String, annotationId: ObjectId): Action[AnyContent] = + def datasetIdForAnnotation(name: String, key: String, annotationId: ObjectId): Action[AnyContent] = Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext for { annotation <- annotationDAO.findOne(annotationId) ?~> "annotation.notFound" - dataset <- datasetDAO.findOne(annotation._dataset) - organization <- organizationDAO.findOne(dataset._organization) - } yield Ok(Json.toJson(DataSourceId(dataset.directoryName, organization._id))) + dataset <- datasetDAO.findOne(annotation._dataset) ?~> "dataset.notFound" + } yield Ok(Json.toJson(dataset._id)) } } @@ -151,20 +146,12 @@ class WKRemoteTracingStoreController @Inject()(tracingStoreService: TracingStore } } - def dataStoreUriForDataset(name: String, - key: String, - organizationId: Option[String], - datasetDirectory: String): Action[AnyContent] = + def dataStoreUriForDataset(name: String, key: String, datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => tracingStoreService.validateAccess(name, key) { _ => implicit val ctx: DBAccessContext = GlobalAccessContext for { - organizationIdWithFallback <- Fox.fillOption(organizationId) { - datasetDAO.getOrganizationIdForDataset(datasetDirectory)(GlobalAccessContext) - } ?~> Messages("dataset.noAccess", datasetDirectory) ~> FORBIDDEN - dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(datasetDirectory, organizationIdWithFallback) ?~> Messages( - "dataset.noAccess", - datasetDirectory) ~> FORBIDDEN + dataset <- datasetDAO.findOne(datasetId) ?~> "dataset.notFound" ~> NOT_FOUND dataStore <- datasetService.dataStoreFor(dataset) } yield Ok(Json.toJson(dataStore.url)) } diff --git a/app/models/annotation/AnnotationService.scala b/app/models/annotation/AnnotationService.scala index b8fd30a12ea..66d1695e55b 100755 --- a/app/models/annotation/AnnotationService.scala +++ b/app/models/annotation/AnnotationService.scala @@ -121,6 +121,7 @@ class AnnotationService @Inject()( private def createVolumeTracing( dataSource: DataSource, datasetOrganizationId: String, + datasetId: ObjectId, datasetDataStore: DataStore, fallbackLayer: Option[SegmentationLayer], boundingBox: Option[BoundingBox] = None, @@ -138,7 +139,7 @@ class AnnotationService @Inject()( remoteDatastoreClient = new WKRemoteDataStoreClient(datasetDataStore, rpc) fallbackLayerHasSegmentIndex <- fallbackLayer match { case Some(layer) => - remoteDatastoreClient.hasSegmentIndexFile(datasetOrganizationId, dataSource.id.directoryName, layer.name) + remoteDatastoreClient.hasSegmentIndexFile(datasetId, layer.name) case None => Fox.successful(false) } elementClassProto <- ElementClass @@ -237,6 +238,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, dataset._organization, + dataset._id, dataStore, fallbackLayer, magRestrictions = params.magRestrictions.getOrElse(MagRestrictions.empty), @@ -430,6 +432,7 @@ class AnnotationService @Inject()( volumeTracing <- createVolumeTracing( dataSource, dataset._organization, + datasetId, dataStore, fallbackLayer = fallbackLayer, boundingBox = boundingBox.flatMap { box => diff --git a/app/models/dataset/ComposeService.scala b/app/models/dataset/ComposeService.scala new file mode 100644 index 00000000000..4301d9ebafc --- /dev/null +++ b/app/models/dataset/ComposeService.scala @@ -0,0 +1,102 @@ +package models.dataset + +import com.scalableminds.util.accesscontext.DBAccessContext +import com.scalableminds.util.objectid.ObjectId +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.models.VoxelSize +import com.scalableminds.webknossos.datastore.models.datasource._ +import models.user.User +import play.api.libs.json.{Json, OFormat} + +import javax.inject.Inject +import scala.concurrent.ExecutionContext + +case class ComposeRequest( + newDatasetName: String, + targetFolderId: ObjectId, + organizationId: String, + voxelSize: VoxelSize, + layers: Seq[ComposeRequestLayer] +) + +object ComposeRequest { + implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest] +} +case class ComposeRequestLayer( + datasetId: ObjectId, + sourceName: String, + newName: String, + transformations: Seq[CoordinateTransformation] +) + +object ComposeRequestLayer { + implicit val composeLayerFormat: OFormat[ComposeRequestLayer] = Json.format[ComposeRequestLayer] +} + +class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDAO, datasetService: DatasetService)( + implicit ec: ExecutionContext) + extends FoxImplicits { + + def composeDataset(composeRequest: ComposeRequest, user: User)( + implicit ctx: DBAccessContext): Fox[(DataSource, ObjectId)] = + for { + _ <- Fox.assertTrue(isComposable(composeRequest)) ?~> "Datasets are not composable, they are not on the same data store" + dataSource <- createDatasource(composeRequest, composeRequest.newDatasetName, composeRequest.organizationId) + dataStore <- dataStoreDAO.findOneWithUploadsAllowed + dataset <- datasetService.createVirtualDataset(composeRequest.newDatasetName, + composeRequest.organizationId, + dataStore, + dataSource, + Some(composeRequest.targetFolderId.toString), + user) + + } yield (dataSource, dataset._id) + + private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer)( + implicit ctx: DBAccessContext): Fox[DataLayer] = + for { + dataset <- datasetDAO.findOne(composeLayer.datasetId) ?~> "Dataset not found" + dataSource <- datasetService.fullDataSourceFor(dataset) + usableDataSource <- dataSource.toUsable.toFox ?~> "Dataset not usable" + layer <- usableDataSource.dataLayers.find(_.name == composeLayer.sourceName).toFox + applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => + cOpt match { + case Some(c) => Some(c ++ composeLayer.transformations.toList) + case None => Some(composeLayer.transformations.toList) + } + editedLayer: DataLayer <- layer match { + case l: DataLayerWithMagLocators => + Fox.successful( + l.mapped(name = composeLayer.newName, + coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations))) + case _ => Fox.failure("Unsupported layer type for composition: " + layer.getClass.getSimpleName) + } + } yield editedLayer + + private def isComposable(composeRequest: ComposeRequest)(implicit ctx: DBAccessContext): Fox[Boolean] = + // Check that all datasets are on the same data store + // Using virtual datasets, we should also be able to compose datasets using non-file paths from different data + // stores, however, the data store is only stored for each dataset and not per mag. + for { + _ <- Fox.fromBool(composeRequest.layers.nonEmpty) ?~> "Cannot compose dataset with no layers" + datasetIds = composeRequest.layers.map(_.datasetId).distinct + datasets <- Fox.serialCombined(datasetIds)(datasetDAO.findOne(_)) + dataStores = datasets.map(_._dataStore) + } yield { + dataStores.distinct.size == 1 + } + + private def createDatasource(composeRequest: ComposeRequest, datasetDirectoryName: String, organizationId: String)( + implicit ctx: DBAccessContext): Fox[DataSource] = + for { + layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_)) + dataSource = GenericDataSource( + DataSourceId(datasetDirectoryName, organizationId), + layers, + composeRequest.voxelSize, + None + ) + + } yield dataSource + +} diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index dc2269cb6ae..2fb34121954 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -62,6 +62,7 @@ case class Dataset(_id: ObjectId, directoryName: String, isPublic: Boolean, isUsable: Boolean, + isVirtual: Boolean, name: String, voxelSize: Option[VoxelSize], sharingToken: Option[String], @@ -145,6 +146,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA r.directoryname, r.ispublic, r.isusable, + r.isvirtual, r.name, voxelSize, r.sharingtoken, @@ -611,7 +613,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA _id, _dataStore, _organization, _publication, _uploader, _folder, inboxSourceHash, defaultViewConfiguration, adminViewConfiguration, - description, directoryName, isPublic, isUsable, + description, directoryName, isPublic, isUsable, isVirtual, name, voxelSizeFactor, voxelSizeUnit, status, sharingToken, sortingKey, metadata, tags, created, isDeleted @@ -620,7 +622,7 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA ${d._id}, ${d._dataStore}, ${d._organization}, ${d._publication}, ${d._uploader}, ${d._folder}, ${d.inboxSourceHash}, $defaultViewConfiguration, $adminViewConfiguration, - ${d.description}, ${d.directoryName}, ${d.isPublic}, ${d.isUsable}, + ${d.description}, ${d.directoryName}, ${d.isPublic}, ${d.isUsable}, ${d.isVirtual}, ${d.name}, ${d.voxelSize.map(_.factor)}, ${d.voxelSize.map(_.unit)}, ${d.status.take(1024)}, ${d.sharingToken}, ${d.sortingKey}, ${d.metadata}, ${d.tags}, ${d.created}, ${d.isDeleted} @@ -655,8 +657,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA unreportedStatus: String, inactiveStatusList: List[String]): Fox[Unit] = { val inclusionPredicate = - if (existingDatasetIds.isEmpty) q"TRUE" - else q"_id NOT IN ${SqlToken.tupleFromList(existingDatasetIds)}" + if (existingDatasetIds.isEmpty) q"NOT isVirtual" + else q"_id NOT IN ${SqlToken.tupleFromList(existingDatasetIds)} AND NOT isVirtual" val statusNotAlreadyInactive = q"status NOT IN ${SqlToken.tupleFromList(inactiveStatusList)}" val deleteMagsQuery = q"""DELETE FROM webknossos.dataset_mags @@ -762,25 +764,17 @@ class DatasetMagsDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionConte layer.magsOpt match { case Some(mags) => mags.map(mag => { - q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, axisOrder, channelIndex, credentialId) - VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.axisOrder + q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, path, axisOrder, channelIndex, credentialId) + VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.path}, ${mag.axisOrder .map(Json.toJson(_))}, ${mag.channelIndex}, ${mag.credentialId}) """.asUpdate }) case None => - layer.wkwResolutionsOpt match { - case Some(resolutions) => - resolutions.map(wkwResolution => { - q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, cubeLength) - VALUES ($datasetId, ${layer.name}, ${wkwResolution.resolution}, ${wkwResolution.cubeLength})""".asUpdate - }) - case None => - layer.resolutions.distinct.map { mag: Vec3Int => - { - q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag) + layer.resolutions.distinct.map { mag: Vec3Int => + { + q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag) VALUES($datasetId, ${layer.name}, $mag)""".asUpdate - } - } + } } } diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index cf7c5241554..cb98c0092fa 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -1,6 +1,6 @@ package models.dataset -import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -9,6 +9,8 @@ import com.scalableminds.webknossos.datastore.dataformats.layers.{ N5SegmentationLayer, PrecomputedDataLayer, PrecomputedSegmentationLayer, + WKWDataLayer, + WKWSegmentationLayer, Zarr3DataLayer, Zarr3SegmentationLayer, ZarrDataLayer, @@ -23,6 +25,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ AbstractDataLayer, AbstractSegmentationLayer, DataFormat, + DataSource, DataSourceId, GenericDataSource, DataLayerLike => DataLayer @@ -50,7 +53,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetLastUsedTimesDAO: DatasetLastUsedTimesDAO, datasetDataLayerDAO: DatasetLayerDAO, datasetMagsDAO: DatasetMagsDAO, - datasetLayerAttachmentsDAO: DatasetLayerAttachmentsDAO, teamDAO: TeamDAO, folderDAO: FolderDAO, dataStoreService: DataStoreService, @@ -63,7 +65,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, with LazyLogging { private val unreportedStatus = datasetDAO.unreportedStatus private val notYetUploadedStatus = "Not yet fully uploaded." - private val inactiveStatusList = List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus) + private val inactiveStatusList = + List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus) def assertValidDatasetName(name: String): Fox[Unit] = for { @@ -97,6 +100,28 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield newDataset } + def createVirtualDataset(datasetName: String, + organizationId: String, + dataStore: DataStore, + dataSource: DataSource, + folderId: Option[String], + user: User): Fox[Dataset] = + for { + _ <- assertValidDatasetName(datasetName) + isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)( + GlobalAccessContext) + _ <- Fox.fromBool(!isDatasetNameAlreadyTaken) ?~> "dataset.name.alreadyTaken" + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> "organization.notFound" + folderId <- ObjectId.fromString(folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" + _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" + newDatasetId = ObjectId.generate + abstractDataSource = dataSource.copy(dataLayers = dataSource.dataLayers.map(AbstractDataLayer.from)) + dataset <- createDataset(dataStore, newDatasetId, datasetName, abstractDataSource, isVirtual = true) + datasetId = dataset._id + _ <- datasetDAO.updateFolder(datasetId, folderId)(GlobalAccessContext) + _ <- addUploader(dataset, user._id)(GlobalAccessContext) + } yield dataset + def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: String)( implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] = datasetDAO.findAllCompactWithSearch( @@ -114,7 +139,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetId: ObjectId, datasetName: String, dataSource: InboxDataSource, - publication: Option[ObjectId] = None + publication: Option[ObjectId] = None, + isVirtual: Boolean = false ): Fox[Dataset] = { implicit val ctx: DBAccessContext = GlobalAccessContext val metadata = @@ -144,6 +170,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, directoryName = dataSource.id.directoryName, isPublic = false, isUsable = dataSource.isUsable, + isVirtual = isVirtual, name = datasetName, voxelSize = dataSource.voxelSizeOpt, sharingToken = None, @@ -259,8 +286,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } } else Fox.successful(None) - def deactivateUnreportedDataSources(existingDatasetIds: List[ObjectId], dataStore: DataStore): Fox[Unit] = - datasetDAO.deactivateUnreported(existingDatasetIds, dataStore.name, unreportedStatus, inactiveStatusList) + def deactivateUnreportedDataSources(reportedDatasetIds: List[ObjectId], dataStore: DataStore): Fox[Unit] = + datasetDAO.deactivateUnreported(reportedDatasetIds, dataStore.name, unreportedStatus, inactiveStatusList) def getSharingToken(datasetId: ObjectId)(implicit ctx: DBAccessContext): Fox[String] = { @@ -293,7 +320,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, // Returns a JSON that includes all properties of the data source and of data layers to read the dataset def fullDataSourceFor(dataset: Dataset): Fox[InboxDataSource] = (for { - dataLayers <- findLayersForDatasetWithMags(dataset._id) + dataLayers <- findLayersForDataset(dataset._id) dataSourceId = DataSourceId(dataset.directoryName, dataset._organization) } yield { if (dataset.isUsable) @@ -304,10 +331,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, Fox.successful(UnusableDataSource[DataLayer](dataSourceId, dataset.status, dataset.voxelSize)) }).flatten - private def findLayersForDatasetWithMags(datasetId: ObjectId): Fox[List[DataLayer]] = + private def findLayersForDataset(datasetId: ObjectId): Fox[List[DataLayer]] = for { layers <- datasetDataLayerDAO.findAllForDataset(datasetId) - _ <- Fox.fromBool(!layers.flatMap(_.dataFormatOpt).contains(DataFormat.wkw)) ?~> "WKW data format not supported in this context, only datasets with MagLocators are supported" layerNamesAndMags <- datasetMagsDAO.findAllByDatasetId(datasetId) layersWithMags <- Fox.serialCombined(layers) { layer => tryo { @@ -325,14 +351,23 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, attachmentsOpt, _, numChannels, - dataFormat, - _) => + dataFormat) => dataFormat match { case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWDataLayer( + name, + category, + boundingBox, + mags, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedDataLayer( name, @@ -407,14 +442,24 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, attachmentsOpt, _, numChannels, - dataFormat, - _) => + dataFormat) => dataFormat match { case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWSegmentationLayer( + name, + boundingBox, + mags, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedSegmentationLayer( name, diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index b593d21cbd7..a5c2a1037de 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -2,6 +2,7 @@ package models.dataset import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.explore.{ ExploreRemoteDatasetRequest, @@ -9,21 +10,19 @@ import com.scalableminds.webknossos.datastore.explore.{ ExploreRemoteLayerParameters } import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, RawCuboidRequest} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, GenericDataSource} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.DirectoryStorageReport import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import play.api.libs.json.JsObject import play.utils.UriEncoding -import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLogging { - private lazy val hasSegmentIndexFileCache: AlfuCache[(String, String, String), Boolean] = + private lazy val hasSegmentIndexFileCache: AlfuCache[(ObjectId, String), Boolean] = AlfuCache(timeToLive = 1 minute) def getDataLayerThumbnail(dataset: Dataset, @@ -36,8 +35,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin val targetMagBoundingBox = mag1BoundingBox / mag logger.debug( s"Thumbnail called for: ${dataset._id}, organization: ${dataset._organization}, directoryName: ${dataset.directoryName}, Layer: $dataLayerName") - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$dataLayerName/thumbnail.jpg") + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/thumbnail.jpg") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .addQueryString("mag" -> mag.toMagLiteral()) .addQueryString("x" -> mag1BoundingBox.topLeft.x.toString) @@ -60,16 +58,14 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin additionalCoordinates: Option[Seq[AdditionalCoordinate]]): Fox[Array[Byte]] = { val targetMagBoundingBox = mag1BoundingBox / mag logger.debug(s"Fetching raw data. Mag $mag, mag1 bbox: $mag1BoundingBox, target-mag bbox: $targetMagBoundingBox") - rpc( - s"${dataStore.url}/data/datasets/${urlEncode(dataset._organization)}/${urlEncode(dataset.directoryName)}/layers/$layerName/readData") + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$layerName/readData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .postJsonWithBytesResponse( RawCuboidRequest(mag1BoundingBox.topLeft, targetMagBoundingBox.size, mag, additionalCoordinates)) } def findPositionWithData(dataset: Dataset, dataLayerName: String): Fox[JsObject] = - rpc( - s"${dataStore.url}/data/datasets/${dataset._organization}/${dataset.directoryName}/layers/$dataLayerName/findData") + rpc(s"${dataStore.url}/data/datasets/${dataset._id}/layers/$dataLayerName/findData") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .getWithJsonResponse[JsObject] @@ -82,25 +78,12 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .silent .getWithJsonResponse[List[DirectoryStorageReport]] - def addDataSource(organizationId: String, - datasetName: String, - dataSource: GenericDataSource[DataLayer], - folderId: Option[ObjectId], - userToken: String): Fox[Unit] = - for { - _ <- rpc(s"${dataStore.url}/data/datasets/$organizationId/$datasetName") - .addQueryString("token" -> userToken) - .addQueryStringOptional("folderId", folderId.map(_.toString)) - .postJson(dataSource) - } yield () - - def hasSegmentIndexFile(organizationId: String, datasetName: String, layerName: String)( - implicit ec: ExecutionContext): Fox[Boolean] = { - val cacheKey = (organizationId, datasetName, layerName) + def hasSegmentIndexFile(datasetId: ObjectId, layerName: String)(implicit ec: ExecutionContext): Fox[Boolean] = { + val cacheKey = (datasetId, layerName) hasSegmentIndexFileCache.getOrLoad( cacheKey, k => - rpc(s"${dataStore.url}/data/datasets/${k._1}/${k._2}/layers/${k._3}/hasSegmentIndex") + rpc(s"${dataStore.url}/data/datasets/${k._1}/layers/${k._2}/hasSegmentIndex") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .silent .getWithJsonResponse[Boolean] @@ -117,7 +100,7 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin def updateDatasetInDSCache(datasetId: String): Fox[Unit] = for { - _ <- rpc(s"${dataStore.url}/data/wkDatasets/$datasetId") + _ <- rpc(s"${dataStore.url}/data/datasets/$datasetId") .addQueryString("token" -> RpcTokenHolder.webknossosToken) .delete() } yield () diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 272ea5e9550..a963a8b2fef 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -105,17 +105,21 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" } yield credentialId - def addRemoteDatasource(dataSource: GenericDataSource[DataLayer], - datasetName: String, - user: User, - folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = + def addRemoteDatasourceToDatabase(dataSource: GenericDataSource[DataLayer], + datasetName: String, + user: User, + folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = for { - organization <- organizationDAO.findOne(user._organization) dataStore <- dataStoreDAO.findOneWithUploadsAllowed + organizationId = user._organization _ <- datasetService.assertValidDatasetName(datasetName) - client = new WKRemoteDataStoreClient(dataStore, rpc) - userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) - _ <- client.addDataSource(organization._id, datasetName, dataSource, folderId, userToken) + _ <- datasetService.createVirtualDataset( + dataSource.id.directoryName, + organizationId, + dataStore, + dataSource, + folderId.map(_.toString), + user + ) } yield () - } diff --git a/conf/evolutions/137-virtual-datasets.sql b/conf/evolutions/137-virtual-datasets.sql new file mode 100644 index 00000000000..993d477ae1a --- /dev/null +++ b/conf/evolutions/137-virtual-datasets.sql @@ -0,0 +1,13 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 136, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +DROP VIEW IF EXISTS webknossos.datasets_; +ALTER TABLE webknossos.datasets ADD COLUMN IF NOT EXISTS isVirtual BOOLEAN NOT NULL DEFAULT FALSE; +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; + +ALTER TABLE webknossos.dataset_mags DROP COLUMN IF EXISTS cubelength; + +UPDATE webknossos.releaseInformation SET schemaVersion = 137; + +COMMIT TRANSACTION; diff --git a/conf/evolutions/reversions/137-virtual-datasets.sql b/conf/evolutions/reversions/137-virtual-datasets.sql new file mode 100644 index 00000000000..ff6adaac40f --- /dev/null +++ b/conf/evolutions/reversions/137-virtual-datasets.sql @@ -0,0 +1,13 @@ +START TRANSACTION; + +do $$ begin ASSERT (select schemaVersion from webknossos.releaseInformation) = 137, 'Previous schema version mismatch'; end; $$ LANGUAGE plpgsql; + +DROP VIEW IF EXISTS webknossos.datasets_; +ALTER TABLE webknossos.datasets DROP COLUMN IF EXISTS isVirtual; +CREATE VIEW webknossos.datasets_ AS SELECT * FROM webknossos.datasets WHERE NOT isDeleted; + +ALTER TABLE webknossos.dataset_mags ADD COLUMN IF NOT EXISTS cubelength INTEGER; + +UPDATE webknossos.releaseInformation SET schemaVersion = 136; + +COMMIT TRANSACTION; diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index b095dfe6775..ca2aaec6b62 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -93,6 +93,7 @@ POST /datasets/:datasetId/layers/:layer/segmentAnythingMask PUT /datasets/:datasetId/clearThumbnailCache controllers.DatasetController.removeFromThumbnailCache(datasetId: ObjectId) GET /datasets/:datasetName/isValidNewName controllers.DatasetController.isValidNewName(datasetName: String) GET /datasets/:datasetId controllers.DatasetController.read(datasetId: ObjectId, sharingToken: Option[String]) +POST /datasets/compose controllers.DatasetController.compose() # Folders GET /folders/root controllers.FolderController.getRoot() @@ -108,13 +109,16 @@ GET /datastores PUT /datastores/:name/datasource controllers.WKRemoteDataStoreController.updateOne(name: String, key: String) PUT /datastores/:name/datasources controllers.WKRemoteDataStoreController.updateAll(name: String, key: String) PUT /datastores/:name/datasources/paths controllers.WKRemoteDataStoreController.updatePaths(name: String, key: String) -GET /datastores/:name/datasources/:organizationId/:directoryName/paths controllers.WKRemoteDataStoreController.getPaths(name: String, key: String, organizationId: String, directoryName: String) +GET /datastores/:name/datasources/:datasetId/paths controllers.WKRemoteDataStoreController.getPaths(name: String, key: String, datasetId: ObjectId) GET /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.getDataSource(name: String, key: String, datasetId: ObjectId) +POST /datastores/:name/datasources/:organizationId/:directoryName controllers.WKRemoteDataStoreController.registerDataSource(name: String, key: String, organizationId: String, directoryName: String, token: String) +PUT /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.updateDataSource(name: String, key: String, datasetId: ObjectId) PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) POST /datastores/:name/reportDatasetUpload controllers.WKRemoteDataStoreController.reportDatasetUpload(name: String, key: String, token: String, datasetDirectoryName: String, datasetSizeBytes: Long, needsConversion: Boolean, viaAddRoute: Boolean) POST /datastores/:name/deleteDataset controllers.WKRemoteDataStoreController.deleteDataset(name: String, key: String) +POST /datastores/:name/deleteVirtualDataset controllers.WKRemoteDataStoreController.deleteVirtualDataset(name: String, key: String) GET /datastores/:name/jobExportProperties controllers.WKRemoteDataStoreController.jobExportProperties(name: String, key: String, jobId: ObjectId) GET /datastores/:name/findCredential controllers.WKRemoteDataStoreController.findCredential(name: String, key: String, credentialId: ObjectId) POST /datastores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaDatastore(name: String, key: String, token: Option[String]) @@ -129,9 +133,9 @@ POST /tracingstores/:name/updateAnnotation POST /tracingstores/:name/validateUserAccess controllers.UserTokenController.validateAccessViaTracingstore(name: String, key: String, token: Option[String]) PUT /tracingstores/:name controllers.TracingStoreController.update(name: String) GET /tracingstores/:name/dataSource controllers.WKRemoteTracingStoreController.dataSourceForAnnotation(name: String, key: String, annotationId: ObjectId) -GET /tracingstores/:name/dataSourceId controllers.WKRemoteTracingStoreController.dataSourceIdForAnnotation(name: String, key: String, annotationId: ObjectId) +GET /tracingstores/:name/datasetId controllers.WKRemoteTracingStoreController.datasetIdForAnnotation(name: String, key: String, annotationId: ObjectId) GET /tracingstores/:name/annotationId controllers.WKRemoteTracingStoreController.annotationIdForTracing(name: String, key: String, tracingId: String) -GET /tracingstores/:name/dataStoreUri/:datasetDirectoryName controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, organizationId: Option[String], datasetDirectoryName: String) +GET /tracingstores/:name/dataStoreUri/:datasetId controllers.WKRemoteTracingStoreController.dataStoreUriForDataset(name: String, key: String, datasetId: ObjectId) POST /tracingstores/:name/createTracing controllers.WKRemoteTracingStoreController.createTracing(name: String, key: String, annotationId: ObjectId, previousVersion: Long) # User access tokens for datastore authentication diff --git a/conf/webknossos.versioned.routes b/conf/webknossos.versioned.routes index 7faa44ce744..39a1e214766 100644 --- a/conf/webknossos.versioned.routes +++ b/conf/webknossos.versioned.routes @@ -3,7 +3,8 @@ # example: assume, the features route has changed, introducing v2. The older v1 needs to be provided in the legacyApiController # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait -# version log:updateDatasetV8 +# version log + # changed in v10: Datasets are accessed by their id, not their name and organization id. This leads to changes to the datastore routes. # changed in v9: Datasets are now identified by their id, not their name. The routes now need to pass a dataset id instead of a name and organization id tuple. # Requests to the TracingStore and DatasStore need to address a dataset based on its directoryName and organization id. # changed in v8: Datasets' name was renamed to id and the displayName is now named name. @@ -14,6 +15,7 @@ # new in v3: annotation info and finish request now take timestamp # new in v2: annotation json contains visibility enum instead of booleans +-> /v10/ webknossos.latest.Routes -> /v9/ webknossos.latest.Routes # v8: support changes to v9 diff --git a/docs/data/concepts.md b/docs/data/concepts.md index 46eb20d609e..5b34f6b7e1e 100644 --- a/docs/data/concepts.md +++ b/docs/data/concepts.md @@ -1,4 +1,3 @@ - # High-Level Concepts ## Datasets, Cubes, and Buckets @@ -24,7 +23,7 @@ A WEBKNOSSOS dataset can contain several `color` and `segmentation` layers which ## Magnification Steps and Downsampling -To enable zooming within huge datasets in WEBKNOSSOS, dataset layers usually contain multiple magnification steps (also called mags, mipmaps, image pyramids or resolutions). +To enable zooming within huge datasets in WEBKNOSSOS, dataset layers usually contain multiple magnification steps (also called mags, mipmaps or resolutions). `1` is the magnification step with the finest resolution, i.e. the original data. `2` is downsampled by a factor of two in all dimensions and therefore only is an eighth of the file size of the original data. Downsampling is done in power-of-two steps: `1, 2, 4, 8, 16, 32, 64, …` @@ -51,7 +50,7 @@ The underlying data type limits the maximum number of IDs: ## Dataset Metadata -For each dataset, we stored metadata in a `datasource-properties.json` file. +For each dataset, we store metadata in a `datasource-properties.json` file. See below for the [full specification](#dataset-metadata-specification). This is an example: @@ -70,12 +69,12 @@ This is an example: "height" : 1024, "depth" : 1024 }, - "wkwResolutions" : [ - { "resolution": 1, "cubeLength": 1024 }, - { "resolution": [ 2, 2, 1 ], "cubeLength": 1024 }, - { "resolution": [ 4, 4, 1 ], "cubeLength": 1024 }, - { "resolution": [ 8, 8, 1 ], "cubeLength": 1024 }, - { "resolution": [ 16, 16, 2 ], "cubeLength": 1024 }, + "mags" : [ + { "mag": [1, 1, 1], "path": "my_team/great_dataset/color/1" }, + { "mag": [2, 2, 1], "path": "my_team/great_dataset/color/2" }, + { "mag": [4, 4, 1], "path": "my_team/great_dataset/color/4" }, + { "mag": [8, 8, 1], "path": "my_team/great_dataset/color/8" }, + { "mag": [16, 16, 2], "path": "my_team/great_dataset/color/16" } ], "elementClass" : "uint8", "dataFormat" : "wkw" @@ -87,13 +86,11 @@ This is an example: "height" : 1024, "depth" : 1024 }, - "wkwResolutions" : [ { - "resolution" : 1, - "cubeLength" : 1024 - }, { - "resolution" : [ 2, 2, 1 ], - "cubeLength" : 1024 - } ], + "mags" : [ + { "mag" : [1, 1, 1], "path": "my_team/great_dataset/segmentation/1" }, + { "mag" : [2, 2, 1], "path": "my_team/great_dataset/segmentation/2" } + ], + "cubeLength": 1024, "elementClass" : "uint32", "largestSegmentId" : 1000000000, "category" : "segmentation", @@ -103,10 +100,9 @@ This is an example: } ``` -Note that the `resolutions` property within the elements of `wkwResolutions` can be an array of length 3. -The three components within such a resolution denote the scaling factor for x, y, and z. -The term "magnifications" is used synonymously for resolutions throughout the UI. -At the moment, WebKnossos guarantees correct rendering of data with non-uniform resolution factors only if the z-component between two resolutions changes by a factor of 1 or 2. +Note that the `mag` property within the elements of `mags` is always an array of length 3, denoting the scaling factor for x, y, and z. The `path` property specifies the location of the data for each magnification step. +The term "magnifications" is used synonymously for mags throughout the UI. +At the moment, WebKnossos guarantees correct rendering of data with non-uniform mag factors only if the z-component between two mags changes by a factor of 1 or 2. Most users do not create these metadata files manually. When using the [WEBKNOSSOS CLI](https://docs.webknossos.org/cli), a metadata file is automatically generated. Alternatively, you can create and edit WEBKNOSSOS datasets using the [WEBKNOSSOS Python library](https://github.com/scalableminds/webknossos-libs/). @@ -126,9 +122,9 @@ WEBKNOSSOS requires several metadata properties for each dataset to properly dis + `dataLayers.category`: Either `color` for raw data or `segmentation` for segmentation layers. + `dataLayers.boundingBox`: The position and size of the data that is contained in this layer. `topLeft` holds the `min_x,min_y,min_z` position, `width` is `max_x - min_x`, `height` is `max_y - min_y` and `depth` is `max_z - min_z`. - + `dataLayers.wkwResolutions`: Holds information about the available magnification steps of the layer. - * `dataLayers.wkwResolutions.resolution`: Either a scalar integer (e.g., `1`, `2` or `4`) or a 3-tuple (e.g., `2, 2, 1`) for non-uniform magnifications. - * `dataLayers.wkwResolutions.cubeLength`: The cube size of the WKW cube files. Usually is `1024`. + + `dataLayers.mags`: Holds information about the available magnification steps of the layer. + * `dataLayers.mags.mag`: A 3-tuple (e.g., `[1, 1, 1]`, `[2, 2, 1]`) for uniform or non-uniform magnifications. + * `dataLayers.mags.path`: The path to the directory containing the data for this magnification step. + `dataLayers.elementClass`: The underlying datatype of the layer, e.g., `uint8`, `uint16`, `uint24` (rgb), `uint32`, `uint64`, `float` (32-bit) or `double` (64-bit). + `dataLayers.largestSegmentId`: The highest ID that is currently used in the respective segmentation layer. This is required for volume annotations where new objects with incrementing IDs are created. Only applies to segmentation layers. diff --git a/frontend/javascripts/admin/api/mesh.ts b/frontend/javascripts/admin/api/mesh.ts index d7604fb001d..a1d77bdad5d 100644 --- a/frontend/javascripts/admin/api/mesh.ts +++ b/frontend/javascripts/admin/api/mesh.ts @@ -1,5 +1,5 @@ import Request from "libs/request"; -import type { APIDataSourceId, APIMeshFileInfo } from "types/api_types"; +import type { APIMeshFileInfo } from "types/api_types"; import type { Vector3, Vector4 } from "viewer/constants"; import { doWithToken } from "./token"; @@ -28,7 +28,7 @@ type ListMeshChunksRequest = { export function getMeshfileChunksForSegment( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, meshFile: APIMeshFileInfo, segmentId: number, @@ -56,7 +56,7 @@ export function getMeshfileChunksForSegment( segmentId, }; return Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes/chunks?${params}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${layerName}/meshes/chunks?${params}`, { data: payload, showErrorToast: false, @@ -78,13 +78,13 @@ type MeshChunkDataRequestList = { export function getMeshfileChunkData( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, batchDescription: MeshChunkDataRequestList, ): Promise { return doWithToken(async (token) => { const dracoDataChunks = await Request.sendJSONReceiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes/chunks/data?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${layerName}/meshes/chunks/data?token=${token}`, { data: batchDescription, useWebworkerForArrayBuffer: true, diff --git a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx index 9392b61b452..4d9698de948 100644 --- a/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx +++ b/frontend/javascripts/admin/dataset/composition_wizard/04_configure_new_dataset.tsx @@ -72,10 +72,6 @@ export function ConfigureNewDataset(props: WizardComponentProps) { ).map( ([dataset, dataLayer]): LayerLink => ({ datasetId: dataset.id, - dataSourceId: { - directoryName: dataset.directoryName, - owningOrganization: dataset.owningOrganization, - }, datasetName: dataset.name, sourceName: dataLayer.name, newName: dataLayer.name, @@ -168,7 +164,7 @@ export function ConfigureNewDataset(props: WizardComponentProps) { const newDatasetName = form.getFieldValue(["name"]); setIsLoading(true); try { - const { newDatasetId } = await createDatasetComposition(datastoreToUse.url, { + const { newDatasetId } = await createDatasetComposition({ // keep identifying dataset at orgaId & directoryPath as this is a datastore request. newDatasetName, targetFolderId: form.getFieldValue(["targetFolderId"]), diff --git a/frontend/javascripts/admin/rest_api.ts b/frontend/javascripts/admin/rest_api.ts index b13fc86a1ab..06363b1722a 100644 --- a/frontend/javascripts/admin/rest_api.ts +++ b/frontend/javascripts/admin/rest_api.ts @@ -840,13 +840,12 @@ export async function getAnnotationProto( export function hasSegmentIndexInDataStore( dataStoreUrl: string, - datasetDirectoryName: string, + datasetId: string, dataLayerName: string, - organizationId: string, ) { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${organizationId}/${datasetDirectoryName}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, + `${dataStoreUrl}/data/datasets/${datasetId}/layers/${dataLayerName}/hasSegmentIndex?token=${token}`, ), ); } @@ -977,24 +976,21 @@ export async function getDatasets( export function readDatasetDatasource(dataset: APIDataset): Promise { return doWithToken((token) => Request.receiveJSON( - `${dataset.dataStore.url}/data/datasets/${dataset.owningOrganization}/${dataset.directoryName}/readInboxDataSource?token=${token}`, + `${dataset.dataStore.url}/data/datasets/${dataset.id}/readInboxDataSource?token=${token}`, ), ); } export async function updateDatasetDatasource( - datasetDirectoryName: string, dataStoreUrl: string, datasource: APIDataSource, + datasetId: string, ): Promise { await doWithToken((token) => - Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${datasource.id.team}/${datasetDirectoryName}?token=${token}`, - { - data: datasource, - method: "PUT", - }, - ), + Request.sendJSONReceiveJSON(`${dataStoreUrl}/data/datasets/${datasetId}?token=${token}`, { + data: datasource, + method: "PUT", + }), ); } @@ -1106,22 +1102,11 @@ type DatasetCompositionArgs = { }; export function createDatasetComposition( - datastoreUrl: string, payload: DatasetCompositionArgs, ): Promise { - // Formatting the dataSourceId to the old format so that the backend can parse it. - // And removing the datasetId as the datastore cannot use it. - const updatedLayers = payload.layers.map(({ dataSourceId, datasetId, ...rest }) => ({ - ...rest, - dataSourceId: { name: dataSourceId.directoryName, team: dataSourceId.owningOrganization }, - })); - const payloadWithUpdatedLayers = { - ...payload, - layers: updatedLayers, - }; return doWithToken((token) => - Request.sendJSONReceiveJSON(`${datastoreUrl}/data/datasets/compose?token=${token}`, { - data: payloadWithUpdatedLayers, + Request.sendJSONReceiveJSON(`/api/datasets/compose?token=${token}`, { + data: payload, }), ); } @@ -1330,6 +1315,7 @@ export async function triggerDatasetCheck(datastoreHost: string): Promise export async function triggerDatasetClearCache( datastoreHost: string, dataSourceId: APIDataSourceId, + datasetId: string, layerName?: string, ): Promise { await doWithToken((token) => { @@ -1339,7 +1325,7 @@ export async function triggerDatasetClearCache( params.set("layerName", layerName); } return Request.triggerRequest( - `/data/triggers/reload/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}?${params}`, + `/data/triggers/reload/${dataSourceId.owningOrganization}/${datasetId}?${params}`, { host: datastoreHost, method: "POST", @@ -1348,18 +1334,12 @@ export async function triggerDatasetClearCache( }); } -export async function deleteDatasetOnDisk( - datastoreHost: string, - dataSourceId: APIDataSourceId, -): Promise { +export async function deleteDatasetOnDisk(datastoreHost: string, datasetId: string): Promise { await doWithToken((token) => - Request.triggerRequest( - `/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/deleteOnDisk?token=${token}`, - { - host: datastoreHost, - method: "DELETE", - }, - ), + Request.triggerRequest(`/data/datasets/${datasetId}/deleteOnDisk?token=${token}`, { + host: datastoreHost, + method: "DELETE", + }), ); } @@ -1371,7 +1351,7 @@ export async function triggerDatasetClearThumbnailCache(datasetId: string): Prom export async function clearCache(dataset: APIMaybeUnimportedDataset, layerName?: string) { return Promise.all([ - triggerDatasetClearCache(dataset.dataStore.url, dataset, layerName), + triggerDatasetClearCache(dataset.dataStore.url, dataset, dataset.id, layerName), triggerDatasetClearThumbnailCache(dataset.id), ]); } @@ -1395,7 +1375,7 @@ export async function revokeDatasetSharingToken(datasetId: string): Promise { const { position, mag } = await doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/findData?token=${token}`, + `${datastoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/findData?token=${token}`, ), ); return { @@ -1430,12 +1410,12 @@ export async function findDataPositionForVolumeTracing( export async function getHistogramForLayer( datastoreUrl: string, - dataSourceId: APIDataSourceId, + datasetId: string, layerName: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/histogram?token=${token}`, + `${datastoreUrl}/data/datasets/${datasetId}/layers/${layerName}/histogram?token=${token}`, { showErrorToast: false }, ), ); @@ -1443,25 +1423,25 @@ export async function getHistogramForLayer( export async function getMappingsForDatasetLayer( datastoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/mappings?token=${token}`, + `${datastoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/mappings?token=${token}`, ), ); } export function fetchMapping( datastoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, mappingName: string, ): Promise { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/mappings/${mappingName}?token=${token}`, + `${datastoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/mappings/${mappingName}?token=${token}`, ), ); } @@ -1507,12 +1487,12 @@ export function getPositionForSegmentInAgglomerate( export async function getAgglomeratesForDatasetLayer( datastoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${datastoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates?token=${token}`, + `${datastoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/agglomerates?token=${token}`, ), ); } @@ -1970,14 +1950,14 @@ export function getBucketPositionsForAdHocMesh( export function getAgglomerateSkeleton( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, mappingId: string, agglomerateId: number, ): Promise { return doWithToken((token) => Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/skeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/agglomerates/${mappingId}/skeleton/${agglomerateId}?token=${token}`, // The webworker code cannot do proper error handling and always expects an array buffer from the server. // The webworker code cannot do proper error handling and always expects an array buffer from the server. // However, the server might send an error json instead of an array buffer. Therefore, don't use the webworker code. { @@ -1990,7 +1970,7 @@ export function getAgglomerateSkeleton( export async function getAgglomeratesForSegmentsFromDatastore( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, mappingId: string, segmentIds: Array, @@ -2003,7 +1983,7 @@ export async function getAgglomeratesForSegmentsFromDatastore Request.receiveArraybuffer( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?${params}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?${params}`, { method: "POST", body: segmentIdBuffer, @@ -2088,12 +2068,12 @@ export function getEditableAgglomerateSkeleton( export async function getMeshfilesForDatasetLayer( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, ): Promise> { const meshFiles: Array = await doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/meshes?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/meshes?token=${token}`, ), ); @@ -2109,19 +2089,19 @@ export async function getMeshfilesForDatasetLayer( // ### Connectomes export function getConnectomeFilesForDatasetLayer( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, ): Promise> { return doWithToken((token) => Request.receiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/connectomes?token=${token}`, ), ); } export function getSynapsesOfAgglomerates( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, connectomeFile: string, agglomerateIds: Array, @@ -2133,7 +2113,7 @@ export function getSynapsesOfAgglomerates( > { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/connectomes/synapses?token=${token}`, { data: { connectomeFile, @@ -2146,7 +2126,7 @@ export function getSynapsesOfAgglomerates( function getSynapseSourcesOrDestinations( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2154,7 +2134,7 @@ function getSynapseSourcesOrDestinations( ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/${srcOrDst}?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/connectomes/synapses/${srcOrDst}?token=${token}`, { data: { connectomeFile, @@ -2177,14 +2157,14 @@ export function getSynapseDestinations(...args: any): Promise> { export function getSynapsePositions( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, connectomeFile: string, synapseIds: Array, ): Promise> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/positions?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/connectomes/synapses/positions?token=${token}`, { data: { connectomeFile, @@ -2197,7 +2177,7 @@ export function getSynapsePositions( export function getSynapseTypes( dataStoreUrl: string, - dataSourceId: APIDataSourceId, + dataset: APIDataset, layerName: string, connectomeFile: string, synapseIds: Array, @@ -2207,7 +2187,7 @@ export function getSynapseTypes( }> { return doWithToken((token) => Request.sendJSONReceiveJSON( - `${dataStoreUrl}/data/datasets/${dataSourceId.owningOrganization}/${dataSourceId.directoryName}/layers/${layerName}/connectomes/synapses/types?token=${token}`, + `${dataStoreUrl}/data/datasets/${dataset.id}/layers/${layerName}/connectomes/synapses/types?token=${token}`, { data: { connectomeFile, diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx index bb42fa4e915..25a1878a884 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_action_view.tsx @@ -163,7 +163,7 @@ function DatasetActionView(props: Props) { return; } - await deleteDatasetOnDisk(dataset.dataStore.url, dataset); + await deleteDatasetOnDisk(dataset.dataStore.url, dataset.id); Toast.success( messages["dataset.delete_success"]({ diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx index 9fb89dfb133..12f68dfeb5b 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_data_tab.tsx @@ -339,10 +339,6 @@ function SimpleDatasetForm({ } function getMags(layer: DataLayer) { - if ("wkwResolutions" in layer) { - return layer.wkwResolutions.map((res) => res.resolution); - } - return layer.mags.map((res) => res.mag); } diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx index 1feebf653b7..9d5e81beba8 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_delete_tab.tsx @@ -40,13 +40,9 @@ const DatasetSettingsDeleteTab = ({ datasetId, navigate }: Props) => { if (!deleteDataset) { return; } - const dataSourceId = { - owningOrganization: dataset.owningOrganization, - directoryName: dataset.directoryName, - }; setIsDeleting(true); - await deleteDatasetOnDisk(dataset.dataStore.url, dataSourceId); + await deleteDatasetOnDisk(dataset.dataStore.url, dataset.id); Toast.success( messages["dataset.delete_success"]({ datasetName: dataset.name, diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx index a69f9283680..af627c2a582 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_view.tsx @@ -366,7 +366,7 @@ class DatasetSettingsView extends React.PureComponent ) : null /* null case should never be rendered as tabs are only rendered when the dataset is loaded. */ diff --git a/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx b/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx index 3ac2277deb6..b30a3ae2a7f 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_settings_viewconfig_tab.tsx @@ -23,7 +23,7 @@ import messages, { settingsTooltips, } from "messages"; import { useMemo, useState } from "react"; -import type { APIDataSourceId } from "types/api_types"; +import type { APIDataset } from "types/api_types"; import { getDefaultLayerViewConfiguration } from "types/schemas/dataset_view_configuration.schema"; import { syncValidator, validateLayerViewConfigurationObjectJSON } from "types/validation"; import { BLEND_MODES } from "viewer/constants"; @@ -34,64 +34,68 @@ import { FormItemWithInfo, jsonEditStyle } from "./helper_components"; const FormItem = Form.Item; export default function DatasetSettingsViewConfigTab(props: { - dataSourceId: APIDataSourceId; + dataset: APIDataset | null | undefined; dataStoreURL: string | undefined; }) { - const { dataSourceId, dataStoreURL } = props; + const { dataset, dataStoreURL } = props; const [availableMappingsPerLayerCache, setAvailableMappingsPerLayer] = useState< Record >({}); const validateDefaultMappings = useMemo( - () => async (configStr: string, dataStoreURL: string, dataSourceId: APIDataSourceId) => { - let config = {} as DatasetConfiguration["layers"]; - try { - config = JSON.parse(configStr); - } catch (e: any) { - return Promise.reject(new Error("Invalid JSON format for : " + e.message)); - } - const layerNamesWithDefaultMappings = Object.keys(config).filter( - (layerName) => config[layerName].mapping != null, - ); - - const maybeMappingRequests = layerNamesWithDefaultMappings.map(async (layerName) => { - if (layerName in availableMappingsPerLayerCache) { - return availableMappingsPerLayerCache[layerName]; - } + () => + async (configStr: string, dataStoreURL: string, dataset: APIDataset | null | undefined) => { + let config = {} as DatasetConfiguration["layers"]; try { - const jsonAndAgglomerateMappings = await Promise.all([ - getMappingsForDatasetLayer(dataStoreURL, dataSourceId, layerName), - getAgglomeratesForDatasetLayer(dataStoreURL, dataSourceId, layerName), - ]); - setAvailableMappingsPerLayer((prev) => ({ - ...prev, - [layerName]: jsonAndAgglomerateMappings, - })); - return jsonAndAgglomerateMappings; + config = JSON.parse(configStr); } catch (e: any) { - console.error(e); - throw new Error(messages["mapping.loading_failed"](layerName)); + return Promise.reject(new Error("Invalid JSON format for : " + e.message)); } - }); - const mappings = await Promise.all(maybeMappingRequests); - const errors = layerNamesWithDefaultMappings - .map((layerName, index) => { - const [mappingsForLayer, agglomeratesForLayer] = mappings[index]; - const mappingType = config[layerName]?.mapping?.type; - const mappingName = config[layerName]?.mapping?.name; - const doesMappingExist = - mappingType === "HDF5" - ? agglomeratesForLayer.some((agglomerate) => agglomerate === mappingName) - : mappingsForLayer.some((mapping) => mapping === mappingName); - return doesMappingExist - ? null - : `The mapping "${mappingName}" of type "${mappingType}" does not exist for layer ${layerName}.`; - }) - .filter((error) => error != null); - if (errors.length > 0) { - throw new Error("The following mappings are invalid: " + errors.join("\n")); - } - }, + const layerNamesWithDefaultMappings = Object.keys(config).filter( + (layerName) => config[layerName].mapping != null, + ); + + const maybeMappingRequests = layerNamesWithDefaultMappings.map(async (layerName) => { + if (layerName in availableMappingsPerLayerCache) { + return availableMappingsPerLayerCache[layerName]; + } + try { + if (!dataset) { + throw new Error("Dataset is not defined."); + } + const jsonAndAgglomerateMappings = await Promise.all([ + getMappingsForDatasetLayer(dataStoreURL, dataset, layerName), + getAgglomeratesForDatasetLayer(dataStoreURL, dataset, layerName), + ]); + setAvailableMappingsPerLayer((prev) => ({ + ...prev, + [layerName]: jsonAndAgglomerateMappings, + })); + return jsonAndAgglomerateMappings; + } catch (e: any) { + console.error(e); + throw new Error(messages["mapping.loading_failed"](layerName)); + } + }); + const mappings = await Promise.all(maybeMappingRequests); + const errors = layerNamesWithDefaultMappings + .map((layerName, index) => { + const [mappingsForLayer, agglomeratesForLayer] = mappings[index]; + const mappingType = config[layerName]?.mapping?.type; + const mappingName = config[layerName]?.mapping?.name; + const doesMappingExist = + mappingType === "HDF5" + ? agglomeratesForLayer.some((agglomerate) => agglomerate === mappingName) + : mappingsForLayer.some((mapping) => mapping === mappingName); + return doesMappingExist + ? null + : `The mapping "${mappingName}" of type "${mappingType}" does not exist for layer ${layerName}.`; + }) + .filter((error) => error != null); + if (errors.length > 0) { + throw new Error("The following mappings are invalid: " + errors.join("\n")); + } + }, [availableMappingsPerLayerCache], ); @@ -305,7 +309,7 @@ export default function DatasetSettingsViewConfigTab(props: { Promise.all([ validateLayerViewConfigurationObjectJSON(_rule, config), dataStoreURL - ? validateDefaultMappings(config, dataStoreURL, dataSourceId) + ? validateDefaultMappings(config, dataStoreURL, dataset) : Promise.resolve(), ]), }, diff --git a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts index 643984a3cbd..59bc7a78079 100644 --- a/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts +++ b/frontend/javascripts/test/backend-snapshot-tests/datasets.e2e.ts @@ -96,30 +96,36 @@ describe("Dataset API (E2E)", () => { // expect(true).toBe(true); // }); + async function getTestDatasetId(datasetName: string = "test-dataset"): Promise { + let datasets = await api.getActiveDatasetsOfMyOrganization(); + datasets = _.sortBy(datasets, (d) => d.name); + const dataset = datasets.find((d) => d.name === datasetName); + if (!dataset) { + throw new Error(`Dataset with name ${datasetName} not found`); + } + return dataset.id; + } + it("Zarr streaming", async () => { - const zarrAttributesResponse = await fetch( - "/data/zarr/Organization_X/test-dataset/segmentation/.zattrs", - { - headers: new Headers(), - }, - ); + const datasetId = await getTestDatasetId(); + const zarrAttributesResponse = await fetch(`/data/zarr/${datasetId}/segmentation/.zattrs`, { + headers: new Headers(), + }); const zarrAttributes = await zarrAttributesResponse.text(); expect(zarrAttributes).toMatchSnapshot(); - const rawDataResponse = await fetch( - "/data/zarr/Organization_X/test-dataset/segmentation/1/0.1.1.0", - { - headers: new Headers(), - }, - ); + const rawDataResponse = await fetch(`/data/zarr/${datasetId}/segmentation/1/0.1.1.0`, { + headers: new Headers(), + }); const bytes = await rawDataResponse.arrayBuffer(); const base64 = btoa(String.fromCharCode(...new Uint8Array(bytes.slice(-128)))); expect(base64).toMatchSnapshot(); }); it("Zarr 3 streaming", async () => { + const datasetId = await getTestDatasetId(); const zarrJsonResp = await fetch( - "/data/zarr3_experimental/Organization_X/test-dataset/segmentation/zarr.json", + `/data/zarr3_experimental/${datasetId}/segmentation/zarr.json`, { headers: new Headers(), }, @@ -128,7 +134,7 @@ describe("Dataset API (E2E)", () => { expect(zarrJson).toMatchSnapshot(); const rawDataResponse = await fetch( - "/data/zarr3_experimental/Organization_X/test-dataset/segmentation/1/0.1.1.0", + `/data/zarr3_experimental/${datasetId}/segmentation/1/0.1.1.0`, { headers: new Headers(), }, @@ -139,8 +145,9 @@ describe("Dataset API (E2E)", () => { }); it("Dataset Paths", async () => { + const datasetId = await getTestDatasetId(); const paths = await fetch( - "/api/datastores/localhost/datasources/Organization_X/test-dataset/paths?key=something-secure", + `/api/datastores/localhost/datasources/${datasetId}/paths?key=something-secure`, ); const pathsJson = await paths.json(); diff --git a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts index a0f0332a66f..25030df6d90 100644 --- a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts +++ b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts @@ -36,6 +36,7 @@ vi.mock("viewer/store", () => ({ default: { getState: () => ({ dataset: { + id: "datasetId", name: "dataset", directoryName: "datasetPath", dataStore: { @@ -158,11 +159,11 @@ describe("wkstore_adapter", () => { expect(RequestMock.sendJSONReceiveArraybufferWithHeaders).toHaveBeenCalledTimes(2); expect(RequestMock.sendJSONReceiveArraybufferWithHeaders).toHaveBeenCalledWith( - "url/data/datasets/organization/datasetPath/layers/color/data?token=token", + "url/data/datasets/datasetId/layers/color/data?token=token", expect.anything(), ); expect(RequestMock.sendJSONReceiveArraybufferWithHeaders).toHaveBeenCalledWith( - "url/data/datasets/organization/datasetPath/layers/color/data?token=token2", + "url/data/datasets/datasetId/layers/color/data?token=token2", expect.anything(), ); }); @@ -194,7 +195,7 @@ describe("wkstore_adapter", () => { layer, }) => { const { batch } = prepare(); - const expectedUrl = "url/data/datasets/organization/datasetPath/layers/color/data?token=token2"; + const expectedUrl = "url/data/datasets/datasetId/layers/color/data?token=token2"; const expectedOptions = createExpectedOptions(); await requestWithFallback(layer, batch).then(() => { @@ -214,7 +215,7 @@ describe("wkstore_adapter", () => { setFourBit(true); // test four bit color and 8 bit seg const { batch } = prepare(); - const expectedUrl = "url/data/datasets/organization/datasetPath/layers/color/data?token=token2"; + const expectedUrl = "url/data/datasets/datasetId/layers/color/data?token=token2"; const expectedOptions = createExpectedOptions(true); const RequestMock = vi.mocked(Request); @@ -233,8 +234,7 @@ describe("wkstore_adapter", () => { }) => { setFourBit(true); const { batch } = prepare(); - const expectedUrl = - "url/data/datasets/organization/datasetPath/layers/segmentation/data?token=token2"; + const expectedUrl = "url/data/datasets/datasetId/layers/segmentation/data?token=token2"; const expectedOptions = createExpectedOptions(false); const RequestMock = vi.mocked(Request); diff --git a/frontend/javascripts/types/api_types.ts b/frontend/javascripts/types/api_types.ts index 9e8aa9e4f26..59c9b28d3ed 100644 --- a/frontend/javascripts/types/api_types.ts +++ b/frontend/javascripts/types/api_types.ts @@ -118,7 +118,6 @@ export type APISkeletonLayer = { category: "skeleton"; name: string }; export type LayerLink = { datasetId: string; - dataSourceId: APIDataSourceId; datasetName: string; sourceName: string; newName: string; diff --git a/frontend/javascripts/types/schemas/datasource.schema.ts b/frontend/javascripts/types/schemas/datasource.schema.ts index 51eff9b2b1a..c976381d665 100644 --- a/frontend/javascripts/types/schemas/datasource.schema.ts +++ b/frontend/javascripts/types/schemas/datasource.schema.ts @@ -49,12 +49,12 @@ export default { boundingBox: { $ref: "#/definitions/types::BoundingBox", }, - wkwResolutions: { + mags: { type: "array", items: { type: "object", properties: { - resolution: { + mag: { anyOf: [ { type: "number", @@ -64,15 +64,27 @@ export default { }, ], }, - cubeLength: { - type: "number", + path: { + type: "string", + }, + credentials: { + type: "object", + properties: { + user: { type: "string" }, + password: { type: "string" }, + }, + required: ["user", "password"], + }, + axisOrder: { + type: "object", + additionalProperties: { type: "number" }, }, }, - required: ["resolution", "cubeLength"], + required: ["mag"], }, }, }, - required: ["dataFormat", "boundingBox", "wkwResolutions"], + required: ["dataFormat", "mags"], }, "types::DataLayerZarrPartial": { title: "DataLayerZarr", diff --git a/frontend/javascripts/types/schemas/datasource.types.ts b/frontend/javascripts/types/schemas/datasource.types.ts index 1764947a7d1..80dbe1a2b20 100644 --- a/frontend/javascripts/types/schemas/datasource.types.ts +++ b/frontend/javascripts/types/schemas/datasource.types.ts @@ -14,13 +14,8 @@ type BoundingBox = { height: number; depth: number; }; -type DataLayerWKWPartial = { +type DataLayerWKWPartial = BaseRemoteLayer & { dataFormat: "wkw"; - boundingBox: BoundingBox; - wkwResolutions: Array<{ - resolution: number | Vector3; - cubeLength: number; - }>; }; type AxisKey = "x" | "y" | "z" | "c"; diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts index 84ee056fa4d..f293490881a 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts @@ -97,16 +97,12 @@ export async function requestWithFallback( batch: Array, ): Promise | null | undefined>> { const state = Store.getState(); - const datasetDirectoryName = state.dataset.directoryName; - const organization = state.dataset.owningOrganization; + const datasetId = state.dataset.id; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.annotation.tracingStore.url; const getDataStoreUrl = (optLayerName?: string) => - `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${ - optLayerName || layerInfo.name - }`; - + `${dataStoreHost}/data/datasets/${datasetId}/layers/${optLayerName || layerInfo.name}`; const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; const maybeVolumeTracing = diff --git a/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts b/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts index 1fdce225278..9d3bc29c951 100644 --- a/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/load_histogram_data_saga.ts @@ -37,7 +37,7 @@ function* loadHistogramForLayer(layerName: string): Saga { let histogram; try { - histogram = yield* call(getHistogramForLayer, dataset.dataStore.url, dataset, layerName); + histogram = yield* call(getHistogramForLayer, dataset.dataStore.url, dataset.id, layerName); if (!Array.isArray(histogram) || histogram.length === 0) { yield* put(setHistogramDataForLayerAction(layerName, null)); diff --git a/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts b/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts index d9d709d5fdf..3cfeccd47f9 100644 --- a/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts +++ b/frontend/javascripts/viewer/model/sagas/meshes/precomputed_mesh_saga.ts @@ -284,7 +284,7 @@ function* _getChunkLoadingDescriptors( const segmentInfo = yield* call( meshApi.getMeshfileChunksForSegment, dataset.dataStore.url, - dataset, + dataset.id, getBaseSegmentationName(segmentationLayer), meshFile, segmentId, @@ -364,7 +364,7 @@ function* loadPrecomputedMeshesInChunksForLod( const dataForChunks = yield* call( meshApi.getMeshfileChunkData, dataset.dataStore.url, - dataset, + dataset.id, getBaseSegmentationName(segmentationLayer), { meshFileName: meshFile.name, diff --git a/frontend/javascripts/viewer/view/right-border-tabs/connectome_tab/connectome_view.tsx b/frontend/javascripts/viewer/view/right-border-tabs/connectome_tab/connectome_view.tsx index fc374862aeb..ccd2f031606 100644 --- a/frontend/javascripts/viewer/view/right-border-tabs/connectome_tab/connectome_view.tsx +++ b/frontend/javascripts/viewer/view/right-border-tabs/connectome_tab/connectome_view.tsx @@ -12,12 +12,7 @@ import Toast from "libs/toast"; import { diffArrays, map3, safeZipObject, unique } from "libs/utils"; import React from "react"; import { connect } from "react-redux"; -import type { - APIConnectomeFile, - APIDataSourceId, - APIDataset, - APISegmentationLayer, -} from "types/api_types"; +import type { APIConnectomeFile, APIDataset, APISegmentationLayer } from "types/api_types"; import { TreeTypeEnum, type Vector3 } from "viewer/constants"; import Constants, { MappingStatusEnum } from "viewer/constants"; import getSceneController from "viewer/controller/scene_controller_provider"; @@ -355,7 +350,7 @@ class ConnectomeView extends React.Component { activeAgglomerateIds.length === 0 ) return; - const fetchProperties: [string, APIDataSourceId, string, string] = [ + const fetchProperties: [string, APIDataset, string, string] = [ dataset.dataStore.url, dataset, getBaseSegmentationName(segmentationLayer), diff --git a/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx b/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx index 8656b7f2b82..6860691aa9b 100644 --- a/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx +++ b/frontend/javascripts/viewer/view/right-border-tabs/segments_tab/segments_view_helper.tsx @@ -76,9 +76,8 @@ export async function hasSegmentIndex( if (maybeVolumeTracing == null) { segmentIndexInDataStore = await hasSegmentIndexInDataStore( dataset.dataStore.url, - dataset.directoryName, + dataset.id, visibleSegmentationLayer.name, - dataset.owningOrganization, ); } return ( diff --git a/test/db/dataSets.csv b/test/db/dataSets.csv index 515f63d2f69..091a4ae2cb1 100644 --- a/test/db/dataSets.csv +++ b/test/db/dataSets.csv @@ -1,7 +1,7 @@ -_id,_dataStore,_organization,_publication,_uploader,_folder,inboxSourceHash,defaultViewConfiguration,adminViewConfiguration,description,directoryName,isPublic,isUsable,name,voxelSizeFactor,voxelSizeUnit,status,sharingToken,logoUrl,sortingKey,metadata,tags,created,isDeleted -'570b9f4e4bb848d0885ee711','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-06-28_Cortex',f,f,'2012-06-28_Cortex',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.082Z','[]',{},'2016-04-11T12:57:50.082Z',f -'570b9f4e4bb848d0885ee712','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'Experiment_001',f,f,'Experiment_001',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.079Z','[]',{},'2016-04-11T12:57:50.079Z',f -'570b9f4e4bb848d0885ee713','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-09-28_ex145_07x2',f,f,'2012-09-28_ex145_07x2',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.080Z','[]',{},'2016-04-11T12:57:50.080Z',f -'570b9fd34bb848d0885ee716','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'rgb',f,f,'rgb',,,'No longer available on datastore.',,,'2016-04-11T13:00:03.792Z',[],{},'2016-04-11T13:00:03.792Z',f -'59e9cfbdba632ac2ab8b23b3','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'confocal-multi_knossos',f,t,'confocal-multi_knossos','(22,22,44.599998474121094)','nanometer','',,,'2017-10-20T10:28:13.763Z','[{"key": "key","type": "number","value": 4}]',{},'2017-10-20T10:28:13.763Z',f -'59e9cfbdba632ac2ab8b23b5','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'l4_sample',t,t,'l4_sample','(11.239999771118164,11.239999771118164,28)','nanometer','',,,'2017-10-20T10:28:13.789Z','[]',{},'2017-10-20T10:28:13.789Z',f +_id,_dataStore,_organization,_publication,_uploader,_folder,inboxSourceHash,defaultViewConfiguration,adminViewConfiguration,description,directoryName,isPublic,isUsable,isVirtual,name,voxelSizeFactor,voxelSizeUnit,status,sharingToken,logoUrl,sortingKey,metadata,tags,created,isDeleted +'570b9f4e4bb848d0885ee711','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-06-28_Cortex',f,f,f,'2012-06-28_Cortex',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.082Z','[]',{},'2016-04-11T12:57:50.082Z',f +'570b9f4e4bb848d0885ee712','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'Experiment_001',f,f,f,'Experiment_001',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.079Z','[]',{},'2016-04-11T12:57:50.079Z',f +'570b9f4e4bb848d0885ee713','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'2012-09-28_ex145_07x2',f,f,f,'2012-09-28_ex145_07x2',,,'No longer available on datastore.',,,'2016-04-11T12:57:50.080Z','[]',{},'2016-04-11T12:57:50.080Z',f +'570b9fd34bb848d0885ee716','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'rgb',f,f,f,'rgb',,,'No longer available on datastore.',,,'2016-04-11T13:00:03.792Z',[],{},'2016-04-11T13:00:03.792Z',f +'59e9cfbdba632ac2ab8b23b3','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'confocal-multi_knossos',f,t,f,'confocal-multi_knossos','(22,22,44.599998474121094)','nanometer','',,,'2017-10-20T10:28:13.763Z','[{"key": "key","type": "number","value": 4}]',{},'2017-10-20T10:28:13.763Z',f +'59e9cfbdba632ac2ab8b23b5','localhost','Organization_X',,,'570b9f4e4bb848d0885ea917',,,,,'l4_sample',t,t,f,'l4_sample','(11.239999771118164,11.239999771118164,28)','nanometer','',,,'2017-10-20T10:28:13.789Z','[]',{},'2017-10-20T10:28:13.789Z',f diff --git a/tools/postgres/schema.sql b/tools/postgres/schema.sql index bf470e2dd2f..d37023310bf 100644 --- a/tools/postgres/schema.sql +++ b/tools/postgres/schema.sql @@ -21,7 +21,7 @@ CREATE TABLE webknossos.releaseInformation ( schemaVersion BIGINT NOT NULL ); -INSERT INTO webknossos.releaseInformation(schemaVersion) values(136); +INSERT INTO webknossos.releaseInformation(schemaVersion) values(137); COMMIT TRANSACTION; @@ -107,6 +107,7 @@ CREATE TABLE webknossos.datasets( name TEXT NOT NULL, isPublic BOOLEAN NOT NULL DEFAULT FALSE, isUsable BOOLEAN NOT NULL DEFAULT FALSE, + isVirtual BOOLEAN NOT NULL DEFAULT FALSE, directoryName TEXT NOT NULL, voxelSizeFactor webknossos.VECTOR3, voxelSizeUnit webknossos.LENGTH_UNIT, @@ -189,7 +190,6 @@ CREATE TABLE webknossos.dataset_mags( hasLocalData BOOLEAN NOT NULL DEFAULT FALSE, axisOrder JSONB CONSTRAINT axisOrder_requiredKeys CHECK (axisOrder ? 'x' AND axisOrder ? 'y'), channelIndex INT, - cubeLength INT, credentialId TEXT, PRIMARY KEY (_dataset, dataLayerName, mag) ); diff --git a/unreleased_changes/8708.md b/unreleased_changes/8708.md new file mode 100644 index 00000000000..a7c29f86fd5 --- /dev/null +++ b/unreleased_changes/8708.md @@ -0,0 +1,10 @@ +### Changed +- A new directory is no longer created for new remote or composed datasets. + +### Breaking Changes +- Datasets are now referenced by their ID instead of the combination of the organization and the directory name. +- Newly uploaded datasets do not use the `wkwResolutions` property anymore, but instead use `mags` like imported remote datasets. +- Interacting with newly created datasets requires libraries to support API version of 10 or higher. + +### Postgres Evolutions +- [137-virtual-datasets.sql](conf/evolutions/137-virtual-datasets.sql) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala index 4ff41ea6eaa..4ef577b43cb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/DataStoreModule.scala @@ -64,5 +64,6 @@ class DataStoreModule extends AbstractModule { bind(classOf[RemoteSourceDescriptorService]).asEagerSingleton() bind(classOf[ChunkCacheService]).asEagerSingleton() bind(classOf[DatasetCache]).asEagerSingleton() + bind(classOf[ZarrStreamingService]).asEagerSingleton() } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index 7c2fd129b55..3ee50bc6e68 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -4,6 +4,7 @@ import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.image.{Color, JPEGWriter} +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -32,7 +33,7 @@ import java.nio.{ByteBuffer, ByteOrder} import scala.concurrent.ExecutionContext class BinaryDataController @Inject()( - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, config: DataStoreConfig, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, @@ -51,38 +52,32 @@ class BinaryDataController @Inject()( (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def requestViaWebknossos( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String - ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $organizationId/$datasetDirectoryName/$dataLayerName took ${formatDuration(duration)}." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + def requestViaWebknossos(datasetId: ObjectId, dataLayerName: String): Action[List[WebknossosDataRequest]] = + Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $datasetId/$dataLayerName took ${formatDuration(duration)}." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } } - } /** * Handles requests for raw binary data via HTTP GET. */ def requestRawCuboid( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, // Mag1 coordinates of the top-left corner of the bounding box x: Int, @@ -98,12 +93,9 @@ class BinaryDataController @Inject()( halfByte: Boolean, mappingName: Option[String] ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" dataRequest = DataRequest( VoxelPosition(x, y, z, magParsed), @@ -117,39 +109,30 @@ class BinaryDataController @Inject()( } } - def requestRawCuboidPost( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String - ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + def requestRawCuboidPost(datasetId: ObjectId, dataLayerName: String): Action[RawCuboidRequest] = + Action.async(validateJson[RawCuboidRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } } - } /** * Handles a request for raw binary data via a HTTP GET. Used by knossos. */ - def requestViaKnossos(organizationId: String, - datasetDirectoryName: String, + def requestViaKnossos(datasetId: ObjectId, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND dataRequest = DataRequest( VoxelPosition(x * cubeSize * mag, y * cubeSize * mag, z * cubeSize * mag, Vec3Int(mag, mag, mag)), cubeSize, @@ -161,8 +144,7 @@ class BinaryDataController @Inject()( } } - def thumbnailJpeg(organizationId: String, - datasetDirectoryName: String, + def thumbnailJpeg(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, @@ -175,12 +157,9 @@ class BinaryDataController @Inject()( intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" dataRequest = DataRequest( @@ -217,17 +196,14 @@ class BinaryDataController @Inject()( } def mappingJson( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName) result <- mappingService.handleMappingRequest(mappingRequest) @@ -238,16 +214,12 @@ class BinaryDataController @Inject()( /** * Handles ad-hoc mesh requests. */ - def requestAdHocMesh(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[WebknossosAdHocMeshRequest] = + def requestAdHocMesh(datasetId: ObjectId, dataLayerName: String): Action[WebknossosAdHocMeshRequest] = Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" adHocMeshRequest = AdHocMeshRequest( Some(dataSource.id), @@ -280,27 +252,22 @@ class BinaryDataController @Inject()( private def formatNeighborList(neighbors: List[Int]): String = "[" + neighbors.mkString(", ") + "]" - def findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def findData(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) } } - def histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def histogram(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ?~> Messages( "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", dataLayerName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala index 272c7cabacb..140d5099f4a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DSMeshController.scala @@ -1,7 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject -import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.mesh.{ DSFullMeshService, @@ -20,7 +20,7 @@ class DSMeshController @Inject()( accessTokenService: DataStoreAccessTokenService, meshFileService: MeshFileService, fullMeshService: DSFullMeshService, - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, val dsRemoteWebknossosClient: DSRemoteWebknossosClient, val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, val binaryDataServiceHolder: BinaryDataServiceHolder @@ -30,21 +30,17 @@ class DSMeshController @Inject()( override def allowRemoteOrigin: Boolean = true - def listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + def listMeshFiles(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileInfos <- meshFileService.listMeshFiles(dataSource.id, dataLayer) } yield Ok(Json.toJson(meshFileInfos)) } } - def listMeshChunksForSegment(organizationId: String, - datasetDirectoryName: String, + def listMeshChunksForSegment(datasetId: ObjectId, dataLayerName: String, /* If targetMappingName is set, assume that meshFile contains meshes for the oversegmentation. Collect mesh chunks of all *unmapped* segment ids @@ -55,12 +51,9 @@ class DSMeshController @Inject()( targetMappingName: Option[String], editableMappingTracingId: Option[String]): Action[ListMeshChunksRequest] = Action.async(validateJson[ListMeshChunksRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, request.body.meshFileName) mappingNameForMeshFile <- meshFileService.mappingNameForMeshFile(meshFileKey) segmentIds: Seq[Long] <- segmentIdsForAgglomerateIdIfNeeded( @@ -77,16 +70,11 @@ class DSMeshController @Inject()( } } - def readMeshChunk(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[MeshChunkDataRequestList] = + def readMeshChunk(datasetId: ObjectId, dataLayerName: String): Action[MeshChunkDataRequestList] = Action.async(validateJson[MeshChunkDataRequestList]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, request.body.meshFileName) (data, encoding) <- meshFileService.readMeshChunk(meshFileKey, request.body.requests) ?~> "mesh.file.loadChunk.failed" } yield { @@ -97,17 +85,12 @@ class DSMeshController @Inject()( } } - def loadFullMeshStl(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[FullMeshRequest] = + def loadFullMeshStl(datasetId: ObjectId, dataLayerName: String): Action[FullMeshRequest] = Action.async(validateJson[FullMeshRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - data: Array[Byte] <- fullMeshService.loadFor(organizationId, - datasetDirectoryName, - dataLayerName, - request.body) ?~> "mesh.file.loadChunk.failed" + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + data: Array[Byte] <- fullMeshService.loadFor(dataSource, dataLayer, request.body) ?~> "mesh.file.loadChunk.failed" } yield Ok(data) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index f55638b663e..9352a80f2d5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -1,9 +1,11 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox +import com.scalableminds.util.tools.{Box, Empty, Failure, Fox, FoxImplicits, Full} import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.explore.{ ExploreRemoteDatasetRequest, @@ -16,15 +18,12 @@ import com.scalableminds.webknossos.datastore.helpers.{ SegmentIndexData, SegmentStatisticsParameters } -import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSource import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.services._ import com.scalableminds.webknossos.datastore.services.mesh.{MeshFileService, MeshMappingHelper} import com.scalableminds.webknossos.datastore.services.segmentindex.SegmentIndexFileService import com.scalableminds.webknossos.datastore.services.uploading._ import com.scalableminds.webknossos.datastore.storage.DataVaultService -import com.scalableminds.util.tools.Box.tryo -import com.scalableminds.util.tools.{Box, Empty, Failure, Full} import com.scalableminds.webknossos.datastore.services.connectome.{ ByAgglomerateIdsRequest, BySynapseIdsRequest, @@ -34,7 +33,6 @@ import com.scalableminds.webknossos.datastore.services.connectome.{ import com.scalableminds.webknossos.datastore.services.mapping.AgglomerateService import play.api.data.Form import play.api.data.Forms.{longNumber, nonEmptyText, number, tuple} -import play.api.i18n.Messages import play.api.libs.Files import play.api.libs.json.Json import play.api.mvc.{Action, AnyContent, MultipartFormData, PlayBodyParsers} @@ -58,30 +56,23 @@ class DataSourceController @Inject()( datasetErrorLoggingService: DSDatasetErrorLoggingService, exploreRemoteLayerService: ExploreRemoteLayerService, uploadService: UploadService, - composeService: ComposeService, meshFileService: MeshFileService, val dsRemoteWebknossosClient: DSRemoteWebknossosClient, val dsRemoteTracingstoreClient: DSRemoteTracingstoreClient, )(implicit bodyParsers: PlayBodyParsers, ec: ExecutionContext) extends Controller - with MeshMappingHelper { + with MeshMappingHelper + with FoxImplicits { override def allowRemoteOrigin: Boolean = true - def readInboxDataSource(organizationId: String, datasetDirectoryName: String): Action[AnyContent] = - Action.async { implicit request => - { - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - // Read directly from file, not from repository to ensure recent changes are seen - val dataSource: InboxDataSource = - dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName), - organizationId) - Ok(Json.toJson(dataSource)) - } - } + def readInboxDataSource(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + dataSource <- refreshDataSource(datasetId) + } yield Ok(Json.toJson(dataSource)) } + } def triggerInboxCheckBlocking(): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { @@ -252,46 +243,41 @@ class DataSourceController @Inject()( } def listMappings( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - addNoCacheHeaderFallback( - Ok(Json.toJson(dataSourceService.exploreMappings(organizationId, datasetDirectoryName, dataLayerName)))) + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + dataSource <- datasetCache.getById(datasetId) + dataSourceId = dataSource.id // We would ideally want to use datasetId here as well, but mappings are not accessed by datasetId yet. + exploredMappings = dataSourceService.exploreMappings(dataSourceId.organizationId, + dataSourceId.directoryName, + dataLayerName) + } yield addNoCacheHeaderFallback(Ok(Json.toJson(exploredMappings))) } } def listAgglomerates( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateList = agglomerateService.listAgglomeratesFiles(dataSource.id, dataLayer) } yield Ok(Json.toJson(agglomerateList)) } } def generateAgglomerateSkeleton( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) skeleton <- agglomerateService .generateSkeleton(agglomerateFileKey, agglomerateId) ?~> "agglomerateSkeleton.failed" @@ -300,18 +286,14 @@ class DataSourceController @Inject()( } def agglomerateGraph( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) agglomerateGraph <- agglomerateService .generateAgglomerateGraph(agglomerateFileKey, agglomerateId) ?~> "agglomerateGraph.failed" @@ -320,18 +302,14 @@ class DataSourceController @Inject()( } def positionForSegmentViaAgglomerateFile( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String, segmentId: Long ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) position <- agglomerateService .positionForSegmentId(agglomerateFileKey, segmentId) ?~> "getSegmentPositionFromAgglomerateFile.failed" @@ -340,17 +318,13 @@ class DataSourceController @Inject()( } def largestAgglomerateId( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) largestAgglomerateId: Long <- agglomerateService.largestAgglomerateId(agglomerateFileKey) } yield Ok(Json.toJson(largestAgglomerateId)) @@ -358,17 +332,13 @@ class DataSourceController @Inject()( } def agglomerateIdsForSegmentIds( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mappingName: String ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND agglomerateFileKey <- agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, mappingName) agglomerateIds: Seq[Long] <- agglomerateService.agglomerateIdsForSegmentIds( agglomerateFileKey, @@ -378,44 +348,33 @@ class DataSourceController @Inject()( } } - def update(organizationId: String, datasetDirectoryName: String): Action[DataSource] = + def update(datasetId: ObjectId): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.writeDataSource(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId)) { for { - dataSource <- dataSourceRepository.get(DataSourceId(datasetDirectoryName, organizationId)).toFox ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - _ <- dataSourceService.updateDataSource(request.body.copy(id = dataSource.id), expectExisting = true) + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + updatedDataSource = request.body.copy(id = dataSource.id) + // While some data sources are still stored on disk, we need to update the data source on disk if it exists. + // If no datasource were on disk, it would make sense to remove this route and let the frontend directly call WK. + _ <- if (dataSourceService.existsOnDisk(dataSource.id.organizationId, dataSource.id.directoryName)) { + dataSourceService.updateDataSourceOnDisk(updatedDataSource, expectExisting = true) + } else + dsRemoteWebknossosClient.updateDataSource(updatedDataSource, datasetId) } yield Ok } } - // Stores a remote dataset in the database. + // Called by the frontend after the user has set datasetName / FolderId of an explored dataSource + // This route adds this data source to the WK database def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { - reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( - ReserveUploadInformation( - uploadId = "", // Set by core backend - name = datasetName, - organization = organizationId, - totalFileCount = 1, - filePaths = None, - totalFileSizeInBytes = None, - layersToLink = None, - initialTeams = List.empty, - folderId = folderId, - requireUniqueName = Some(false), - ) - ) ?~> "dataset.upload.validation.failed" - datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId) - _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) - uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload(datasourceId, - 0L, - needsConversion = false, - viaAddRoute = true) ?~> "reportUpload.failed" - } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetId)) + _ <- Fox.successful(()) + dataSourceId = DataSourceId(datasetName, organizationId) + dataSource = request.body.copy(id = dataSourceId) + datasetId <- dsRemoteWebknossosClient.registerDataSource(dataSource, dataSourceId, folderId) ?~> "dataset.add.failed" + } yield Ok(Json.obj("newDatasetId" -> datasetId)) } } @@ -448,10 +407,10 @@ class DataSourceController @Inject()( } } - private def clearCachesOfDataSource(organizationId: String, - datasetDirectoryName: String, - layerName: Option[String]): InboxDataSource = { - val dataSourceId = DataSourceId(datasetDirectoryName, organizationId) + private def clearCachesOfDataSource(dataSource: DataSource, layerName: Option[String]): Unit = { + val dataSourceId = dataSource.id + val organizationId = dataSourceId.organizationId + val datasetDirectoryName = dataSourceId.directoryName val (closedAgglomerateFileHandleCount, clearedBucketProviderCount, removedChunksCount) = binaryDataServiceHolder.binaryDataService.clearCache(organizationId, datasetDirectoryName, layerName) val closedMeshFileHandleCount = @@ -460,83 +419,62 @@ class DataSourceController @Inject()( segmentIndexFileService.clearCache(dataSourceId, layerName) val closedConnectomeFileHandleCount = connectomeFileService.clearCache(dataSourceId, layerName) - val reloadedDataSource: InboxDataSource = dataSourceService.dataSourceFromDir( - dataSourceService.dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName), - organizationId) datasetErrorLoggingService.clearForDataset(organizationId, datasetDirectoryName) - val clearedVaultCacheEntriesOpt = dataSourceService.invalidateVaultCache(reloadedDataSource, layerName) + val clearedVaultCacheEntriesOpt = dataSourceService.invalidateVaultCache(dataSource, layerName) clearedVaultCacheEntriesOpt.foreach { clearedVaultCacheEntries => logger.info( s"Cleared caches for ${layerName.map(l => s"layer '$l' of ").getOrElse("")}dataset $organizationId/$datasetDirectoryName: closed $closedAgglomerateFileHandleCount agglomerate file handles, $closedMeshFileHandleCount mesh file handles, $closedSegmentIndexFileHandleCount segment index file handles, $closedConnectomeFileHandleCount connectome file handles, removed $clearedBucketProviderCount bucketProviders, $clearedVaultCacheEntries vault cache entries and $removedChunksCount image chunk cache entries.") } - reloadedDataSource } - def reload(organizationId: String, - datasetDirectoryName: String, - layerName: Option[String] = None): Action[AnyContent] = + def reload(organizationId: String, datasetId: ObjectId, layerName: Option[String] = None): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources(organizationId)) { - val reloadedDataSource = clearCachesOfDataSource(organizationId, datasetDirectoryName, layerName) for { - _ <- dataSourceRepository.updateDataSource(reloadedDataSource) + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + _ = clearCachesOfDataSource(dataSource, layerName) + reloadedDataSource <- refreshDataSource(datasetId) } yield Ok(Json.toJson(reloadedDataSource)) } } - def deleteOnDisk(organizationId: String, datasetDirectoryName: String): Action[AnyContent] = + def deleteOnDisk(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => - val dataSourceId = DataSourceId(datasetDirectoryName, organizationId) - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataSource(dataSourceId)) { - tryo(clearCachesOfDataSource(organizationId, datasetDirectoryName, None)) + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.deleteDataset(datasetId)) { for { - _ <- dataSourceService.deleteOnDisk( - organizationId, - datasetDirectoryName, - reason = Some("the user wants to delete the dataset")) ?~> "dataset.delete.failed" - _ <- dataSourceRepository.removeDataSource(dataSourceId) // also frees the name in the wk-side database + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + dataSourceId = dataSource.id + _ <- if (dataSourceService.existsOnDisk(dataSourceId.organizationId, dataSourceId.directoryName)) { + for { + _ <- dataSourceService.deleteOnDisk( + dataSourceId.organizationId, + dataSourceId.directoryName, + Some(datasetId), + reason = Some("the user wants to delete the dataset")) ?~> "dataset.delete.failed" + _ <- dataSourceRepository.removeDataSource(dataSourceId) // also frees the name in the wk-side database + } yield () + } else { + dsRemoteWebknossosClient.deleteVirtualDataset(datasetId) + } } yield Ok } } - def compose(): Action[ComposeRequest] = - Action.async(validateJson[ComposeRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.administrateDataSources(request.body.organizationId)) { - for { - _ <- Fox.serialCombined(request.body.layers.map(_.dataSourceId).toList)(id => - accessTokenService.assertUserAccess(UserAccessRequest.readDataSources(id))) - (dataSource, newDatasetId) <- composeService.composeDataset(request.body) - _ <- dataSourceRepository.updateDataSource(dataSource) - } yield Ok(Json.obj("newDatasetId" -> newDatasetId)) - } - } - - def listConnectomeFiles(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def listConnectomeFiles(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND connectomeFileInfos <- connectomeFileService.listConnectomeFiles(dataSource.id, dataLayer) } yield Ok(Json.toJson(connectomeFileInfos)) } } - def getSynapsesForAgglomerates(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[ByAgglomerateIdsRequest] = + def getSynapsesForAgglomerates(datasetId: ObjectId, dataLayerName: String): Action[ByAgglomerateIdsRequest] = Action.async(validateJson[ByAgglomerateIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id, dataLayer, request.body.connectomeFile) @@ -545,20 +483,16 @@ class DataSourceController @Inject()( } } - def getSynapticPartnerForSynapses(organizationId: String, - datasetDirectoryName: String, + def getSynapticPartnerForSynapses(datasetId: ObjectId, dataLayerName: String, direction: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { directionValidated <- SynapticPartnerDirection .fromString(direction) .toFox ?~> "could not parse synaptic partner direction" - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id, dataLayer, request.body.connectomeFile) @@ -569,16 +503,11 @@ class DataSourceController @Inject()( } } - def getSynapsePositions(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[BySynapseIdsRequest] = + def getSynapsePositions(datasetId: ObjectId, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id, dataLayer, request.body.connectomeFile) @@ -587,16 +516,11 @@ class DataSourceController @Inject()( } } - def getSynapseTypes(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[BySynapseIdsRequest] = + def getSynapseTypes(datasetId: ObjectId, dataLayerName: String): Action[BySynapseIdsRequest] = Action.async(validateJson[BySynapseIdsRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND meshFileKey <- connectomeFileService.lookUpConnectomeFileKey(dataSource.id, dataLayer, request.body.connectomeFile) @@ -605,16 +529,11 @@ class DataSourceController @Inject()( } } - def checkSegmentIndexFile(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[AnyContent] = + def checkSegmentIndexFile(datasetId: ObjectId, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKeyBox <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer).shiftBox } yield Ok(Json.toJson(segmentIndexFileKeyBox.isDefined)) } @@ -625,17 +544,13 @@ class DataSourceController @Inject()( * * @return List of bucketPositions as positions (not indices) of 32³ buckets in mag */ - def getSegmentIndex(organizationId: String, - datasetDirectoryName: String, + def getSegmentIndex(datasetId: ObjectId, dataLayerName: String, segmentId: String): Action[GetSegmentIndexParameters] = Action.async(validateJson[GetSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) segmentIds <- segmentIdsForAgglomerateIdIfNeeded( dataSource.id, @@ -665,16 +580,11 @@ class DataSourceController @Inject()( * * @return List of bucketPositions as indices of 32³ buckets (in target mag) */ - def querySegmentIndex(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = + def querySegmentIndex(datasetId: ObjectId, dataLayerName: String): Action[GetMultipleSegmentIndexParameters] = Action.async(validateJson[GetMultipleSegmentIndexParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) segmentIdsAndBucketPositions <- Fox.serialCombined(request.body.segmentIds) { segmentOrAgglomerateId => for { @@ -698,16 +608,11 @@ class DataSourceController @Inject()( } } - def getSegmentVolume(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[SegmentStatisticsParameters] = + def getSegmentVolume(datasetId: ObjectId, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) agglomerateFileKeyOpt <- Fox.runOptional(request.body.mappingName)( agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, _)) @@ -725,16 +630,11 @@ class DataSourceController @Inject()( } } - def getSegmentBoundingBox(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String): Action[SegmentStatisticsParameters] = + def getSegmentBoundingBox(datasetId: ObjectId, dataLayerName: String): Action[SegmentStatisticsParameters] = Action.async(validateJson[SegmentStatisticsParameters]) { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND segmentIndexFileKey <- segmentIndexFileService.lookUpSegmentIndexFileKey(dataSource.id, dataLayer) agglomerateFileKeyOpt <- Fox.runOptional(request.body.mappingName)( agglomerateService.lookUpAgglomerateFileKey(dataSource.id, dataLayer, _)) @@ -782,11 +682,31 @@ class DataSourceController @Inject()( } } - def invalidateCache(datasetId: String): Action[AnyContent] = Action.async { implicit request => + def invalidateCache(datasetId: ObjectId): Action[AnyContent] = Action.async { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.writeDataset(datasetId)) { datasetCache.invalidateCache(datasetId) Future.successful(Ok) } } + private def refreshDataSource(datasetId: ObjectId)(implicit tc: TokenContext): Fox[DataSource] = + for { + dataSourceInDB <- datasetCache.getById(datasetId) ~> NOT_FOUND + dataSourceId = dataSourceInDB.id + dataSourceFromDir <- Fox.runIf( + dataSourceService.existsOnDisk(dataSourceId.organizationId, dataSourceId.directoryName)) { + dataSourceService + .dataSourceFromDir( + dataSourceService.dataBaseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.directoryName), + dataSourceId.organizationId) + .toUsable + .toFox + } + _ <- dataSourceFromDir match { + case Some(ds) => dsRemoteWebknossosClient.updateDataSource(ds, datasetId) + case _ => Fox.successful(()) + } + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + } yield dataSource + } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala new file mode 100644 index 00000000000..5c974913ab9 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/LegacyController.scala @@ -0,0 +1,565 @@ +package com.scalableminds.webknossos.datastore.controllers + +import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.image.{Color, JPEGWriter} +import com.scalableminds.util.time.Instant +import com.scalableminds.util.tools.Box.tryo +import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.DataStoreConfig +import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper +import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders +import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} +import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ +import com.scalableminds.webknossos.datastore.models.{ + DataRequest, + RawCuboidRequest, + VoxelPosition, + WebknossosAdHocMeshRequest, + WebknossosDataRequest +} +import com.scalableminds.webknossos.datastore.models.datasource.{ + Category, + DataLayer, + DataSourceId, + GenericDataSource, + SegmentationLayer +} +import com.scalableminds.webknossos.datastore.models.requests.{ + DataServiceDataRequest, + DataServiceMappingRequest, + DataServiceRequestSettings +} +import com.scalableminds.webknossos.datastore.services.mapping.MappingService +import com.scalableminds.webknossos.datastore.services.mesh.{ + AdHocMeshRequest, + AdHocMeshService, + AdHocMeshServiceHolder, + DSFullMeshService, + FullMeshRequest +} +import com.scalableminds.webknossos.datastore.services.{ + BinaryDataService, + BinaryDataServiceHolder, + DSRemoteTracingstoreClient, + DSRemoteWebknossosClient, + DataSourceRepository, + DataStoreAccessTokenService, + FindDataService, + UserAccessRequest, + ZarrStreamingService +} +import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService +import play.api.i18n.Messages +import play.api.libs.json.Json +import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} + +import java.io.ByteArrayOutputStream +import java.nio.{ByteBuffer, ByteOrder} +import scala.concurrent.ExecutionContext +import scala.concurrent.duration.DurationInt + +class LegacyController @Inject()( + dataSourceRepository: DataSourceRepository, + accessTokenService: DataStoreAccessTokenService, + binaryDataServiceHolder: BinaryDataServiceHolder, + remoteWebknossosClient: DSRemoteWebknossosClient, + remoteTracingstoreClient: DSRemoteTracingstoreClient, + mappingService: MappingService, + config: DataStoreConfig, + slackNotificationService: DSSlackNotificationService, + adHocMeshServiceHolder: AdHocMeshServiceHolder, + findDataService: FindDataService, + zarrStreamingService: ZarrStreamingService, + fullMeshService: DSFullMeshService +)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) + extends Controller + with Zarr3OutputHelper + with MissingBucketHeaders { + + // BINARY DATA ROUTES + + override def allowRemoteOrigin: Boolean = true + + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService + adHocMeshServiceHolder.dataStoreAdHocMeshConfig = + (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) + val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService + + def requestViaWebknossosV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String + ): Action[List[WebknossosDataRequest]] = Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + logTime(slackNotificationService.noticeSlowRequest) { + val t = Instant.now + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + duration = Instant.since(t) + _ = if (duration > (10 seconds)) + logger.info( + s"Complete data request for $organizationId/$datasetDirectoryName/$dataLayerName took ${formatDuration(duration)}." + + request.body.headOption + .map(firstReq => s" First of ${request.body.size} requests was $firstReq") + .getOrElse("")) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + } + + /** + * Handles requests for raw binary data via HTTP GET. + */ + def requestRawCuboidV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + // Mag1 coordinates of the top-left corner of the bounding box + x: Int, + y: Int, + z: Int, + // Target-mag size of the bounding box + width: Int, + height: Int, + depth: Int, + // Mag in three-component format (e.g. 1-1-1 or 16-16-8) + mag: String, + // If true, use lossy compression by sending only half-bytes of the data + halfByte: Boolean, + mappingName: Option[String] + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + dataRequest = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth, + DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) + ) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + def requestRawCuboidPostV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String + ): Action[RawCuboidRequest] = Action.async(validateJson[RawCuboidRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + (data, indices) <- requestData(dataSource.id, dataLayer, request.body) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + /** + * Handles a request for raw binary data via a HTTP GET. Used by knossos. + */ + def requestViaKnossosV9(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: Int, + x: Int, + y: Int, + z: Int, + cubeSize: Int): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + dataRequest = DataRequest( + VoxelPosition(x * cubeSize * mag, y * cubeSize * mag, z * cubeSize * mag, Vec3Int(mag, mag, mag)), + cubeSize, + cubeSize, + cubeSize + ) + (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) + } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) + } + } + + def thumbnailJpegV9(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + x: Int, + y: Int, + z: Int, + width: Int, + height: Int, + mag: String, + mappingName: Option[String], + intensityMin: Option[Double], + intensityMax: Option[Double], + color: Option[String], + invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND + magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" + dataRequest = DataRequest( + VoxelPosition(x, y, z, magParsed), + width, + height, + depth = 1, + DataServiceRequestSettings(appliedAgglomerate = mappingName) + ) + (data, _) <- requestData(dataSource.id, dataLayer, dataRequest) + intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max))) + layerColor = color.flatMap(Color.fromHTML) + params = ImageCreatorParameters( + dataLayer.elementClass, + useHalfBytes = false, + slideWidth = width, + slideHeight = height, + imagesPerRow = 1, + blackAndWhite = false, + intensityRange = intensityRange, + isSegmentation = dataLayer.category == Category.segmentation, + color = layerColor, + invertColor = invertColor + ) + dataWithFallback = if (data.length == 0) + new Array[Byte](width * height * dataLayer.bytesPerElement) + else data + spriteSheet <- ImageCreator.spriteSheetFor(dataWithFallback, params).toFox ?~> "image.create.failed" + firstSheet <- spriteSheet.pages.headOption.toFox ?~> "image.page.failed" + outputStream = new ByteArrayOutputStream() + _ = new JPEGWriter().writeToOutputStream(firstSheet.image)(outputStream) + } yield Ok(outputStream.toByteArray).as(jpegMimeType) + } + } + + def mappingJsonV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mappingName: String + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound") + mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName) + result <- mappingService.handleMappingRequest(mappingRequest) + } yield Ok(result) + } + } + + /** + * Handles ad-hoc mesh requests. + */ + def requestAdHocMeshV9(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String): Action[WebknossosAdHocMeshRequest] = + Action.async(validateJson[WebknossosAdHocMeshRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" + adHocMeshRequest = AdHocMeshRequest( + Some(dataSource.id), + segmentationLayer, + request.body.cuboid(dataLayer), + request.body.segmentId, + request.body.voxelSizeFactorInUnit, + tokenContextForRequest(request), + request.body.mapping, + request.body.mappingType, + request.body.additionalCoordinates, + request.body.findNeighbors, + ) + // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three + // consecutive 3D points (i.e., nine floats) form a triangle. + // There are no shared vertices between triangles. + (vertices, neighbors) <- adHocMeshService.requestAdHocMeshViaActor(adHocMeshRequest) + } yield { + // We need four bytes for each float + val responseBuffer = ByteBuffer.allocate(vertices.length * 4).order(ByteOrder.LITTLE_ENDIAN) + responseBuffer.asFloatBuffer().put(vertices) + Ok(responseBuffer.array()).withHeaders(getNeighborIndices(neighbors): _*) + } + } + } + + private def getNeighborIndices(neighbors: List[Int]) = + List("NEIGHBORS" -> formatNeighborList(neighbors), "Access-Control-Expose-Headers" -> "NEIGHBORS") + + private def formatNeighborList(neighbors: List[Int]): String = + "[" + neighbors.mkString(", ") + "]" + + def findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) + } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) + } + } + + def histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) + listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", + dataLayerName) + } yield Ok(Json.toJson(listOfHistograms)) + } + } + + private def requestData( + dataSourceId: DataSourceId, + dataLayer: DataLayer, + dataRequests: DataRequestCollection + )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { + val requests = + dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) + binaryDataService.handleDataRequests(requests) + } + + // ZARR ROUTES + + /** + * Serve .zattrs file for a dataset + * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) + */ + def requestZAttrsV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = "", + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + header = zarrStreamingService.getHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) + } + } + + def requestZarrJsonV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = "", + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) + } + } + + /** + * Zarr-specific datasource-properties.json file for a datasource. + * Note that the result here is not necessarily equal to the file used in the underlying storage. + */ + def requestDataSourceV9( + organizationId: String, + datasetDirectoryName: String, + zarrVersion: Int, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) + } yield Ok(Json.toJson(zarrSource)) + } + } + + def requestRawZarrCubeV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + coordinates: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(result) + } + } + + def requestZArrayV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zarrHeader)) + } + } + + def requestZarrJsonForMagV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) + } + } + + def requestDataLayerDirectoryContentsV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), + contents + )).withHeaders() + + } + } + + def requestDataLayerMagDirectoryContentsV9( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + mag: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), + contents + )).withHeaders() + } + } + + def requestDataSourceDirectoryContentsV9( + organizationId: String, + datasetDirectoryName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + dataSource <- dataSourceRepository + .findUsable(DataSourceId(datasetDirectoryName, organizationId)) + .toFox ~> NOT_FOUND + files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + s"$organizationId/$datasetDirectoryName", + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files + )) + } + } + + def requestZGroupV9(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String = ""): Action[AnyContent] = + Action.async { implicit request => + accessTokenService.validateAccessFromTokenContextForSyncBlock( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + Ok(zarrStreamingService.zGroupJson) + } + } + + // MESH ROUTES + + def loadFullMeshStl(organizationId: String, + datasetDirectoryName: String, + dataLayerName: String): Action[FullMeshRequest] = + Action.async(validateJson[FullMeshRequest]) { implicit request => + accessTokenService.validateAccessFromTokenContext( + UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + for { + (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, + datasetDirectoryName, + dataLayerName) ~> NOT_FOUND + data: Array[Byte] <- fullMeshService.loadFor(dataSource, dataLayer, request.body) ?~> "mesh.file.loadChunk.failed" + + } yield Ok(data) + } + } + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala deleted file mode 100644 index 209d000e79e..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/WKDatasetController.scala +++ /dev/null @@ -1,199 +0,0 @@ -package com.scalableminds.webknossos.datastore.controllers - -import com.google.inject.Inject -import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.geometry.Vec3Int -import com.scalableminds.util.image.{Color, JPEGWriter} -import com.scalableminds.util.objectid.ObjectId -import com.scalableminds.util.time.Instant -import com.scalableminds.util.tools.Fox -import com.scalableminds.webknossos.datastore.helpers.MissingBucketHeaders -import com.scalableminds.webknossos.datastore.image.{ImageCreator, ImageCreatorParameters} -import com.scalableminds.webknossos.datastore.models.DataRequestCollection._ -import com.scalableminds.webknossos.datastore.models.{ - DataRequest, - RawCuboidRequest, - VoxelPosition, - WebknossosDataRequest -} -import com.scalableminds.webknossos.datastore.models.requests.{DataServiceDataRequest, DataServiceRequestSettings} -import com.scalableminds.webknossos.datastore.models.datasource.{Category, DataLayer, DataSourceId} -import com.scalableminds.webknossos.datastore.services.{ - BinaryDataService, - BinaryDataServiceHolder, - DataStoreAccessTokenService, - DatasetCache, - FindDataService, - UserAccessRequest -} -import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService -import play.api.i18n.Messages -import play.api.libs.json.Json -import play.api.mvc.{Action, AnyContent, PlayBodyParsers, RawBuffer} - -import java.io.ByteArrayOutputStream -import scala.concurrent.ExecutionContext -import scala.concurrent.duration.DurationInt - -/** - * This is equivalent to the BinaryDataController for Datasets by DatasetId - */ -class WKDatasetController @Inject()( - accessTokenService: DataStoreAccessTokenService, - binaryDataServiceHolder: BinaryDataServiceHolder, - findDataService: FindDataService, - slackNotificationService: DSSlackNotificationService, - datasetCache: DatasetCache -)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers) - extends Controller - with MissingBucketHeaders { - - val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService - - def requestViaWebknossos(datasetId: String, dataLayerName: String): Action[List[WebknossosDataRequest]] = - Action.async(validateJson[List[WebknossosDataRequest]]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - logTime(slackNotificationService.noticeSlowRequest) { - val t = Instant.now - for { - datasetId <- ObjectId.fromString(datasetId) - dataSource <- datasetCache.getById(datasetId) - dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ?~> "Data layer not found" ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - duration = Instant.since(t) - _ = if (duration > (10 seconds)) - logger.info( - s"Complete data request for $datasetId/$dataLayerName took ${formatDuration(duration)}." - + request.body.headOption - .map(firstReq => s" First of ${request.body.size} requests was $firstReq") - .getOrElse("")) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - } - - def requestRawCuboid(datasetId: String, - dataLayerName: String, - x: Int, - y: Int, - z: Int, - width: Int, - height: Int, - depth: Int, - mag: String, - halfByte: Boolean, - mappingName: Option[String]): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetId <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - dataRequest = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth, - DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName) - ) - (data, indices) <- requestData(dataSource.id, dataLayer, dataRequest) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - - def requestRawCuboidPost(datasetId: String, dataLayerName: String): Action[RawCuboidRequest] = - Action.async(validateJson[RawCuboidRequest]) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetId <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND - (data, indices) <- requestData(dataSource.id, dataLayer, request.body) - } yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*) - } - } - - def thumbnailJpeg(datasetId: String, - dataLayerName: String, - x: Int, - y: Int, - z: Int, - width: Int, - height: Int, - mag: String, - mappingName: Option[String], - intensityMin: Option[Double], - intensityMax: Option[Double], - color: Option[String], - invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int.fromMagLiteral(mag).toFox ?~> "malformedMag" - dataRequest = DataRequest( - VoxelPosition(x, y, z, magParsed), - width, - height, - depth = 1, - DataServiceRequestSettings(appliedAgglomerate = mappingName) - ) - (data, _) <- requestData(dataSource.id, dataLayer, dataRequest) - intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max))) - layerColor = color.flatMap(Color.fromHTML) - params = ImageCreatorParameters( - dataLayer.elementClass, - useHalfBytes = false, - slideWidth = width, - slideHeight = height, - imagesPerRow = 1, - blackAndWhite = false, - intensityRange = intensityRange, - isSegmentation = dataLayer.category == Category.segmentation, - color = layerColor, - invertColor = invertColor - ) - dataWithFallback = if (data.length == 0) - new Array[Byte](width * height * dataLayer.bytesPerElement) - else data - spriteSheet <- ImageCreator.spriteSheetFor(dataWithFallback, params).toFox ?~> "image.create.failed" - firstSheet <- spriteSheet.pages.headOption.toFox ?~> "image.page.failed" - outputStream = new ByteArrayOutputStream() - _ = new JPEGWriter().writeToOutputStream(firstSheet.image)(outputStream) - } yield Ok(outputStream.toByteArray).as(jpegMimeType) - } - } - - def findData(datasetId: String, dataLayerName: String): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ~> NOT_FOUND - positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer) - } yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2))) - } - } - - def histogram(datasetId: String, dataLayerName: String): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { - for { - datasetIdValidated <- ObjectId.fromString(datasetId) - (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetIdValidated, dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName) - listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed", - dataLayerName) - } yield Ok(Json.toJson(listOfHistograms)) - } - } - - private def requestData( - dataSourceId: DataSourceId, - dataLayer: DataLayer, - dataRequests: DataRequestCollection - )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { - val requests = - dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings)) - binaryDataService.handleDataRequests(requests) - } -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index f0a3007d00e..9bf15de0682 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -2,40 +2,30 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.layers.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} -import com.scalableminds.webknossos.datastore.dataformats.zarr.{Zarr3OutputHelper, ZarrCoordinatesParser} -import com.scalableminds.webknossos.datastore.datareaders.zarr.{ - NgffGroupHeader, - NgffMetadata, - NgffMetadataV0_5, - ZarrHeader -} -import com.scalableminds.webknossos.datastore.datareaders.zarr3.{Zarr3ArrayHeader, NgffZarr3GroupHeader} +import com.scalableminds.webknossos.datastore.dataformats.zarr.Zarr3OutputHelper +import com.scalableminds.webknossos.datastore.datareaders.zarr.{NgffMetadata, NgffMetadataV0_5} +import com.scalableminds.webknossos.datastore.datareaders.zarr3.NgffZarr3GroupHeader import com.scalableminds.webknossos.datastore.models.annotation.{AnnotationLayer, AnnotationLayerType, AnnotationSource} import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.models.requests.{ - Cuboid, - DataServiceDataRequest, - DataServiceRequestSettings -} -import com.scalableminds.webknossos.datastore.models.VoxelPosition import com.scalableminds.webknossos.datastore.services._ -import play.api.i18n.{Messages, MessagesProvider} -import play.api.libs.json.{JsValue, Json} +import play.api.i18n.Messages +import play.api.libs.json.Json import play.api.mvc._ import scala.concurrent.ExecutionContext import com.scalableminds.webknossos.datastore.datareaders.AxisOrder class ZarrStreamingController @Inject()( - dataSourceRepository: DataSourceRepository, + datasetCache: DatasetCache, accessTokenService: DataStoreAccessTokenService, binaryDataServiceHolder: BinaryDataServiceHolder, remoteWebknossosClient: DSRemoteWebknossosClient, remoteTracingstoreClient: DSRemoteTracingstoreClient, + zarrStreamingService: ZarrStreamingService )(implicit ec: ExecutionContext) extends Controller with Zarr3OutputHelper { @@ -51,40 +41,26 @@ class ZarrStreamingController @Inject()( * Uses the OME-NGFF standard (see https://ngff.openmicroscopy.org/latest/) */ def requestZAttrs( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - omeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags) - } yield Ok(Json.toJson(omeNgffHeader)) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + header = zarrStreamingService.getHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) } } def requestZarrJson( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String = "", ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - omeNgffHeaderV0_5 = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, - dataSource.scale, - dataLayer.sortedMags, - dataLayer.additionalAxes) - zarr3GroupHeader = NgffZarr3GroupHeader(3, "group", omeNgffHeaderV0_5) - } yield Ok(Json.toJson(zarr3GroupHeader)) + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + header = zarrStreamingService.getGroupHeader(dataSource, dataLayer) + } yield Ok(Json.toJson(header)) } } @@ -100,10 +76,8 @@ class ZarrStreamingController @Inject()( }, orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadata.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags) @@ -124,10 +98,8 @@ class ZarrStreamingController @Inject()( }, orElse = annotationSource => for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer( - annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceOmeNgffHeader = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayerName, dataSource.scale, dataLayer.sortedMags, @@ -142,19 +114,13 @@ class ZarrStreamingController @Inject()( * Note that the result here is not necessarily equal to the file used in the underlying storage. */ def requestDataSource( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, zarrVersion: Int, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ~> NOT_FOUND - dataLayers = dataSource.dataLayers - zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) - zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) + dataSource <- datasetCache.getById(datasetId) ~> NOT_FOUND + zarrSource = zarrStreamingService.getZarrDataSource(dataSource, zarrVersion) } yield Ok(Json.toJson(zarrSource)) } } @@ -217,9 +183,7 @@ class ZarrStreamingController @Inject()( relevantTokenContext = if (annotationSource.accessViaPrivateLink) TokenContext(Some(accessToken)) else tokenContextForRequest volumeAnnotationLayers = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume) - dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) - .toFox ~> NOT_FOUND + dataSource <- datasetCache.getById(annotationSource.datasetId) ?~> Messages("dataSource.notFound") ~> NOT_FOUND dataSourceLayers = dataSource.dataLayers .filter(dL => !volumeAnnotationLayers.exists(_.name == dL.name)) .map(convertLayerToZarrLayer(_, zarrVersion)) @@ -235,15 +199,16 @@ class ZarrStreamingController @Inject()( } def requestRawZarrCube( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - rawZarrCube(organizationId, datasetDirectoryName, dataLayerName, mag, coordinates) + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + result <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(result) } } @@ -261,104 +226,40 @@ class ZarrStreamingController @Inject()( relevantTokenContext) .map(Ok(_)), orElse = annotationSource => - rawZarrCube(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - mag, - coordinates) + for { + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + zarrCube <- zarrStreamingService.rawZarrCube(dataSource, dataLayer, mag, coordinates) + } yield Ok(zarrCube) ) } - private def rawZarrCube( - organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, - coordinates: String, - )(implicit m: MessagesProvider, tc: TokenContext): Fox[Result] = - for { - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> SERVICE_UNAVAILABLE - reorderedAdditionalAxes = dataLayer.additionalAxes.map(reorderAdditionalAxes) - (x, y, z, additionalCoordinates) <- ZarrCoordinatesParser.parseNDimensionalDotCoordinates( - coordinates, - reorderedAdditionalAxes) ?~> "zarr.invalidChunkCoordinates" ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - cubeSize = DataLayer.bucketLength - request = DataServiceDataRequest( - Some(dataSource.id), - dataLayer, - Cuboid( - topLeft = VoxelPosition(x * cubeSize * magParsed.x, - y * cubeSize * magParsed.y, - z * cubeSize * magParsed.z, - magParsed), - width = cubeSize, - height = cubeSize, - depth = cubeSize - ), - DataServiceRequestSettings(halfByte = false, additionalCoordinates = additionalCoordinates) - ) - (data, notFoundIndices) <- binaryDataService.handleDataRequests(List(request)) - _ <- Fox.fromBool(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" ~> NOT_FOUND - } yield Ok(data) - def requestZArray( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - zArray(organizationId, datasetDirectoryName, dataLayerName, mag) + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (_, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + zarrHeader <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zarrHeader)) } } - private def zArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String)( - implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - zarrHeader = ZarrHeader.fromLayer(dataLayer, magParsed) - } yield Ok(Json.toJson(zarrHeader)) - def requestZarrJsonForMag( - organizationId: String, - datasetDirectoryName: String, + datasetId: ObjectId, dataLayerName: String, mag: String, ): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - zarrJsonForMag(organizationId, datasetDirectoryName, dataLayerName, mag) + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) } } - private def zarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String)( - implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) - } yield Ok(Json.toJson(zarrHeader)) - def zArrayPrivateLink(accessToken: String, dataLayerName: String, mag: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( @@ -369,7 +270,11 @@ class ZarrStreamingController @Inject()( .getZArray(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => - zArray(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + for { + (_, dataLayer) <- datasetCache.getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages( + "dataSource.notFound") ~> NOT_FOUND + zArray <- zarrStreamingService.getZArray(dataLayer, mag) + } yield Ok(Json.toJson(zArray)) ) } @@ -383,7 +288,11 @@ class ZarrStreamingController @Inject()( .getZarrJson(annotationLayer.tracingId, mag, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(z => Ok(Json.toJson(z))), orElse = annotationSource => - zarrJsonForMag(annotationSource.organizationId, annotationSource.datasetDirectoryName, dataLayerName, mag) + for { + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + zarrJson <- zarrStreamingService.requestZarrJsonForMag(dataSource, dataLayer, mag) + } yield Ok(Json.toJson(zarrJson)) ) } @@ -403,190 +312,143 @@ class ZarrStreamingController @Inject()( } } yield result - def requestDataLayerMagDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - dataLayerMagDirectoryContents(organizationId, datasetDirectoryName, dataLayerName, mag, zarrVersion) - } + def requestDataLayerDirectoryContents( + datasetId: ObjectId, + dataLayerName: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s".format(datasetId, dataLayerName), + contents + )).withHeaders() + } + } - private def dataLayerMagDirectoryContents(organizationId: String, - datasetDirectoryName: String, + def dataLayerDirectoryContentsPrivateLink(accessToken: String, dataLayerName: String, - mag: String, - zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ~> NOT_FOUND - magParsed <- Vec3Int - .fromMagLiteral(mag, allowScalar = true) - .toFox ?~> Messages("dataLayer.invalidMag", mag) ~> NOT_FOUND - _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND - additionalEntries = if (zarrVersion == 2) List(ZarrHeader.FILENAME_DOT_ZARRAY) - else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - "%s/%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName, mag), - additionalEntries - )).withHeaders() - - def dataLayerMagDirectoryContentsPrivateLink(accessToken: String, - dataLayerName: String, - mag: String, - zarrVersion: Int): Action[AnyContent] = + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, dataLayerName, ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerMagDirectoryContents(annotationLayer.tracingId, - mag, - annotationSource.tracingStoreUrl, - zarrVersion)(relevantTokenContext) + .getDataLayerDirectoryContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( + relevantTokenContext) .map( layers => Ok( views.html.datastoreZarrDatasourceDir( - "Combined Annotation Route", + "Tracingstore", s"${annotationLayer.tracingId}", layers )).withHeaders()), orElse = annotationSource => - dataLayerMagDirectoryContents(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - mag, - zarrVersion) + for { + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + content <- zarrStreamingService.dataLayerDirectoryContents(dataSource, dataLayer, zarrVersion) + } yield Ok(Json.toJson(content)) ) } - def requestDataLayerDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - dataLayerDirectoryContents(organizationId, datasetDirectoryName, dataLayerName, zarrVersion) + def requestDataLayerMagDirectoryContents( + datasetId: ObjectId, + dataLayerName: String, + mag: String, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + (dataSource, dataLayer) <- datasetCache.getWithLayer(datasetId, dataLayerName) ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + "%s/%s/%s".format(datasetId, dataLayerName, mag), + contents + )).withHeaders() } } - private def dataLayerDirectoryContents(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - zarrVersion: Int)(implicit m: MessagesProvider): Fox[Result] = - for { - (_, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) ?~> Messages( - "dataSource.notFound") ~> NOT_FOUND - mags = dataLayer.sortedMags - additionalFiles = if (zarrVersion == 2) - List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) - else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - "%s/%s/%s".format(organizationId, datasetDirectoryName, dataLayerName), - additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true)) - )).withHeaders() - - def dataLayerDirectoryContentsPrivateLink(accessToken: String, - dataLayerName: String, - zarrVersion: Int): Action[AnyContent] = + def dataLayerMagDirectoryContentsPrivateLink(accessToken: String, + dataLayerName: String, + mag: String, + zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( accessToken, dataLayerName, ifIsAnnotationLayer = (annotationLayer, annotationSource, relevantTokenContext) => remoteTracingstoreClient - .getDataLayerDirectoryContents(annotationLayer.tracingId, annotationSource.tracingStoreUrl, zarrVersion)( - relevantTokenContext) + .getDataLayerMagDirectoryContents(annotationLayer.tracingId, + mag, + annotationSource.tracingStoreUrl, + zarrVersion)(relevantTokenContext) .map( layers => Ok( views.html.datastoreZarrDatasourceDir( - "Tracingstore", + "Combined Annotation Route", s"${annotationLayer.tracingId}", layers )).withHeaders()), orElse = annotationSource => - dataLayerDirectoryContents(annotationSource.organizationId, - annotationSource.datasetDirectoryName, - dataLayerName, - zarrVersion) + for { + (dataSource, dataLayer) <- datasetCache + .getWithLayer(annotationSource.datasetId, dataLayerName) ?~> Messages("dataSource.notFound") ~> NOT_FOUND + contents <- zarrStreamingService.dataLayerMagDirectoryContents(dataSource, dataLayer, mag, zarrVersion) + } yield Ok(Json.toJson(contents)) ) } - def requestDataSourceDirectoryContents(organizationId: String, - datasetDirectoryName: String, - zarrVersion: Int): Action[AnyContent] = - Action.async { implicit request => - accessTokenService.validateAccessFromTokenContext( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - for { - dataSource <- dataSourceRepository - .findUsable(DataSourceId(datasetDirectoryName, organizationId)) - .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND - layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) - additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) - else List.empty - } yield - Ok( - views.html.datastoreZarrDatasourceDir( - "Datastore", - s"$organizationId/$datasetDirectoryName", - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ additionalVersionDependantFiles ++ layerNames - )) - } + def requestDataSourceDirectoryContents( + datasetId: ObjectId, + zarrVersion: Int + ): Action[AnyContent] = Action.async { implicit request => + accessTokenService.validateAccessFromTokenContext(UserAccessRequest.readDataset(datasetId)) { + for { + dataSource <- datasetCache.getById(datasetId) + files <- zarrStreamingService.dataSourceDirectoryContents(dataSource, zarrVersion) + } yield + Ok( + views.html.datastoreZarrDatasourceDir( + "Datastore", + datasetId.toString, + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) ++ files + )) } + } def dataSourceDirectoryContentsPrivateLink(accessToken: String, zarrVersion: Int): Action[AnyContent] = Action.async { implicit request => for { - annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) - dataSource <- dataSourceRepository - .findUsable(DataSourceId(annotationSource.datasetDirectoryName, annotationSource.organizationId)) - .toFox ?~> Messages("dataSource.notFound") ~> NOT_FOUND - annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) - dataSourceLayerNames = dataSource.dataLayers - .map((dataLayer: DataLayer) => dataLayer.name) - .filter(!annotationLayerNames.contains(_)) - layerNames = annotationLayerNames ++ dataSourceLayerNames - additionalEntries = if (zarrVersion == 2) - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON, NgffGroupHeader.FILENAME_DOT_ZGROUP) - else - List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) + contents <- zarrStreamingService.dataSourceDirectoryContentsPrivateLink(accessToken, zarrVersion) } yield Ok( views.html.datastoreZarrDatasourceDir( "Combined datastore and tracingstore directory", s"$accessToken", - additionalEntries ++ layerNames + contents )) } - def requestZGroup(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String = ""): Action[AnyContent] = + def requestZGroup(datasetId: ObjectId, dataLayerName: String = ""): Action[AnyContent] = Action.async { implicit request => - accessTokenService.validateAccessFromTokenContextForSyncBlock( - UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { - Ok(zGroupJson) + accessTokenService.validateAccessFromTokenContextForSyncBlock(UserAccessRequest.readDataset(datasetId)) { + Ok(zarrStreamingService.zGroupJson) } } - private def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) - def zGroupPrivateLink(accessToken: String, dataLayerName: String): Action[AnyContent] = Action.async { implicit request => ifIsAnnotationLayerOrElse( @@ -596,7 +458,7 @@ class ZarrStreamingController @Inject()( remoteTracingstoreClient .getZGroup(annotationLayer.tracingId, annotationSource.tracingStoreUrl)(relevantTokenContext) .map(Ok(_)), - orElse = _ => Fox.successful(Ok(zGroupJson)) + orElse = _ => Fox.successful(Ok(zarrStreamingService.zGroupJson)) ) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index 73780c0c373..e2d3326d6ad 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, Datas import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{Format, JsError, JsResult, JsSuccess, JsValue, Json, OFormat} import ucar.ma2.{Array => MultiArray} case class WKWResolution(resolution: Vec3Int, @@ -15,13 +15,15 @@ case class WKWResolution(resolution: Vec3Int, credentialId: Option[String] = None) { def toMagLocator: MagLocator = MagLocator(mag = resolution, path = path, credentialId = credentialId) -} +} object WKWResolution extends MagFormatHelper { implicit val jsonFormat: OFormat[WKWResolution] = Json.format[WKWResolution] + + def defaultCubeSize = 1024 } -trait WKWLayer extends DataLayer { +trait WKWLayer extends DataLayerWithMagLocators { val dataFormat: DataFormat.Value = DataFormat.wkw @@ -30,14 +32,7 @@ trait WKWLayer extends DataLayer { sharedChunkContentsCache: Option[AlfuCache[String, MultiArray]]): BucketProvider = new DatasetArrayBucketProvider(this, dataSourceId, remoteSourceDescriptorServiceOpt, sharedChunkContentsCache) - def wkwResolutions: List[WKWResolution] - - def mags: List[MagLocator] = wkwResolutions.map(_.toMagLocator) - - def resolutions: List[Vec3Int] = wkwResolutions.map(_.resolution) - - def lengthOfUnderlyingCubes(mag: Vec3Int): Int = - wkwResolutions.find(_.resolution == mag).map(_.cubeLength).getOrElse(0) + override def lengthOfUnderlyingCubes(mag: Vec3Int): Int = WKWResolution.defaultCubeSize } @@ -45,23 +40,62 @@ case class WKWDataLayer( name: String, category: Category.Value, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, defaultViewConfiguration: Option[LayerViewConfiguration] = None, adminViewConfiguration: Option[LayerViewConfiguration] = None, coordinateTransformations: Option[List[CoordinateTransformation]] = None, additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends WKWLayer +) extends WKWLayer { + override def resolutions: List[Vec3Int] = mags.map(_.mag) +} object WKWDataLayer { - implicit val jsonFormat: OFormat[WKWDataLayer] = Json.format[WKWDataLayer] + implicit val jsonFormat: Format[WKWDataLayer] = new Format[WKWDataLayer] { + def reads(json: JsValue): JsResult[WKWDataLayer] = + for { + mags: List[MagLocator] <- (json \ "mags").validate[List[MagLocator]] match { + case JsSuccess(value, _) => JsSuccess(value) + case JsError(_) => + (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(_.toMagLocator)) + case JsError(_) => JsError("Either 'mags' or 'wkwResolutions' must be provided") + } + } + name <- (json \ "name").validate[String] + category <- (json \ "category").validate[Category.Value] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWDataLayer( + name, + category, + boundingBox, + mags, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWDataLayer): JsValue = + Json.writes[WKWDataLayer].writes(layer) + } } case class WKWSegmentationLayer( name: String, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, mappings: Option[Set[String]], largestSegmentId: Option[Long] = None, @@ -71,8 +105,49 @@ case class WKWSegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None ) extends SegmentationLayer - with WKWLayer + with WKWLayer { + override def resolutions: List[Vec3Int] = mags.map(_.mag) +} object WKWSegmentationLayer { - implicit val jsonFormat: OFormat[WKWSegmentationLayer] = Json.format[WKWSegmentationLayer] + implicit val jsonFormat: Format[WKWSegmentationLayer] = new Format[WKWSegmentationLayer] { + def reads(json: JsValue): JsResult[WKWSegmentationLayer] = + for { + mags: List[MagLocator] <- (json \ "mags").validate[List[MagLocator]] match { + case JsSuccess(value, _) => JsSuccess(value) + case JsError(_) => + (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(_.toMagLocator)) + case JsError(_) => JsError("Either 'mags' or 'wkwResolutions' must be provided") + } + } + name <- (json \ "name").validate[String] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + largestSegmentId <- (json \ "largestSegmentId").validateOpt[Long] + mappings <- (json \ "mappings").validateOpt[Set[String]] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWSegmentationLayer( + name, + boundingBox, + mags, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWSegmentationLayer): JsValue = + Json.writes[WKWSegmentationLayer].writes(layer) + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/DatasetDeleter.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/DatasetDeleter.scala index 2e44ef0bc35..7935812ecb4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/DatasetDeleter.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/helpers/DatasetDeleter.scala @@ -1,4 +1,5 @@ package com.scalableminds.webknossos.datastore.helpers +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.models.datasource.{ DataLayerWithMagLocators, @@ -21,10 +22,20 @@ import scala.concurrent.ExecutionContext trait DatasetDeleter extends LazyLogging with DirectoryConstants with FoxImplicits { def dataBaseDir: Path - def deleteOnDisk(organizationId: String, - datasetName: String, - isInConversion: Boolean = false, - reason: Option[String] = None)(implicit ec: ExecutionContext): Fox[Unit] = { + def existsOnDisk(organizationId: String, datasetDirectoryName: String, isInConversion: Boolean = false): Boolean = { + val dataSourcePath = + if (isInConversion) dataBaseDir.resolve(organizationId).resolve(forConversionDir).resolve(datasetDirectoryName) + else dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName) + + Files.exists(dataSourcePath) + } + + def deleteOnDisk( + organizationId: String, + datasetName: String, + datasetId: Option[ObjectId], // Is only set for datasets that are already registered in WK. In this case, we query WK using this id for symlink paths and move them. + isInConversion: Boolean = false, + reason: Option[String] = None)(implicit ec: ExecutionContext): Fox[Unit] = { @tailrec def deleteWithRetry(sourcePath: Path, targetPath: Path, retryCount: Int = 0): Fox[Unit] = try { @@ -63,7 +74,7 @@ trait DatasetDeleter extends LazyLogging with DirectoryConstants with FoxImplici else dataBaseDir.resolve(organizationId).resolve(datasetName) for { - _ <- moveSymlinks(organizationId, datasetName) ?~> "Failed to remake symlinks" + _ <- Fox.runOptional(datasetId)(d => moveSymlinks(organizationId, datasetName, d)) ?~> "Failed to remake symlinks" _ <- moveToTrash(organizationId, datasetName, dataSourcePath, reason) } yield () } @@ -72,10 +83,11 @@ trait DatasetDeleter extends LazyLogging with DirectoryConstants with FoxImplici // Handle references to layers and mags that are deleted - private def moveSymlinks(organizationId: String, datasetName: String)(implicit ec: ExecutionContext) = + private def moveSymlinks(organizationId: String, datasetName: String, datasetId: ObjectId)( + implicit ec: ExecutionContext) = for { dataSourceId <- Fox.successful(DataSourceId(datasetName, organizationId)) - layersAndLinkedMags <- remoteWebknossosClient.fetchPaths(dataSourceId) + layersAndLinkedMags <- remoteWebknossosClient.fetchPaths(datasetId) exceptionBoxes = layersAndLinkedMags.map(layerMagLinkInfo => handleLayerSymlinks(dataSourceId, layerMagLinkInfo.layerName, layerMagLinkInfo.magLinkInfos.toList)) _ <- Fox.assertNoFailure(exceptionBoxes) ?~> "Failed to move symlinks" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 3dd8f210964..fef482e9833 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -12,7 +12,6 @@ import com.scalableminds.webknossos.datastore.dataformats.layers.{ PrecomputedDataLayer, PrecomputedSegmentationLayer, WKWDataLayer, - WKWResolution, WKWSegmentationLayer, Zarr3DataLayer, Zarr3SegmentationLayer, @@ -266,15 +265,6 @@ trait DataLayerLike { case layer: Zarr3SegmentationLayer => layer.numChannels case _ => None } - - def wkwResolutionsOpt: Option[List[WKWResolution]] = this match { - case layer: AbstractDataLayer => layer.wkwResolutions - case layer: AbstractSegmentationLayer => layer.wkwResolutions - case layer: WKWDataLayer => Some(layer.wkwResolutions) - case layer: WKWSegmentationLayer => Some(layer.wkwResolutions) - case _ => None - } - } object DataLayerLike { @@ -490,6 +480,22 @@ trait DataLayerWithMagLocators extends DataLayer { name = name, coordinateTransformations = coordinateTransformations ) + case l: WKWDataLayer => + l.copy( + boundingBox = boundingBoxMapping(l.boundingBox), + defaultViewConfiguration = defaultViewConfigurationMapping(l.defaultViewConfiguration), + mags = l.mags.map(magMapping), + name = name, + coordinateTransformations = coordinateTransformations + ) + case l: WKWSegmentationLayer => + l.copy( + boundingBox = boundingBoxMapping(l.boundingBox), + defaultViewConfiguration = defaultViewConfigurationMapping(l.defaultViewConfiguration), + mags = l.mags.map(magMapping), + name = name, + coordinateTransformations = coordinateTransformations + ) case _ => throw new Exception("Encountered unsupported layer format") } @@ -503,6 +509,8 @@ trait DataLayerWithMagLocators extends DataLayer { case layer: ZarrSegmentationLayer => layer.mags case layer: Zarr3DataLayer => layer.mags case layer: Zarr3SegmentationLayer => layer.mags + case layer: WKWDataLayer => layer.mags + case layer: WKWSegmentationLayer => layer.mags case _ => throw new Exception("Encountered unsupported layer format") } @@ -527,7 +535,6 @@ case class AbstractDataLayer( mags: Option[List[MagLocator]] = None, numChannels: Option[Int] = None, dataFormat: Option[DataFormat.Value] = None, - wkwResolutions: Option[List[WKWResolution]] = None, ) extends DataLayerLike object AbstractDataLayer { @@ -546,8 +553,7 @@ object AbstractDataLayer { layer.attachments, layer.magsOpt, layer.numChannelsOpt, - layer.dataFormatOpt, - layer.wkwResolutionsOpt + layer.dataFormatOpt ) implicit val jsonFormat: OFormat[AbstractDataLayer] = Json.format[AbstractDataLayer] @@ -569,7 +575,6 @@ case class AbstractSegmentationLayer( mags: Option[List[MagLocator]] = None, numChannels: Option[Int] = None, dataFormat: Option[DataFormat.Value] = None, - wkwResolutions: Option[List[WKWResolution]] = None, ) extends SegmentationLayerLike object AbstractSegmentationLayer { @@ -591,7 +596,6 @@ object AbstractSegmentationLayer { layer.magsOpt, layer.numChannelsOpt, layer.dataFormatOpt, - layer.wkwResolutionsOpt ) implicit val jsonFormat: OFormat[AbstractSegmentationLayer] = Json.format[AbstractSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala index 848baba4bcf..ef68bc011b8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AccessTokenService.scala @@ -43,11 +43,17 @@ object UserAccessRequest { def readDataset(datasetId: String): UserAccessRequest = UserAccessRequest(DataSourceId(datasetId, ""), AccessResourceType.dataset, AccessMode.read) + def readDataset(datasetId: ObjectId): UserAccessRequest = + UserAccessRequest(DataSourceId(datasetId.toString, ""), AccessResourceType.dataset, AccessMode.read) + + def deleteDataset(datasetId: ObjectId): UserAccessRequest = + UserAccessRequest(DataSourceId(datasetId.toString, ""), AccessResourceType.dataset, AccessMode.delete) + def writeDataSource(dataSourceId: DataSourceId): UserAccessRequest = UserAccessRequest(dataSourceId, AccessResourceType.datasource, AccessMode.write) - def writeDataset(datasetId: String): UserAccessRequest = - UserAccessRequest(DataSourceId(datasetId, ""), AccessResourceType.dataset, AccessMode.write) + def writeDataset(datasetId: ObjectId): UserAccessRequest = + UserAccessRequest(DataSourceId(datasetId.toString, ""), AccessResourceType.dataset, AccessMode.write) def readTracing(tracingId: String): UserAccessRequest = UserAccessRequest(DataSourceId(tracingId, ""), AccessResourceType.tracing, AccessMode.read) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index d730d01677d..2311fe50e5b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -6,13 +6,14 @@ import com.google.inject.name.Named import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.{IntervalScheduler, LayerMagLinkInfo} import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.{ @@ -49,6 +50,12 @@ object MagPathInfo { implicit val jsonFormat: OFormat[MagPathInfo] = Json.format[MagPathInfo] } +case class DataSourceRegistrationInfo(dataSource: DataSource, folderId: Option[String]) + +object DataSourceRegistrationInfo { + implicit val jsonFormat: OFormat[DataSourceRegistrationInfo] = Json.format[DataSourceRegistrationInfo] +} + trait RemoteWebknossosClient { def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } @@ -120,9 +127,8 @@ class DSRemoteWebknossosClient @Inject()( .silent .putJson(dataSourcePaths) - def fetchPaths(dataSourceId: DataSourceId): Fox[List[LayerMagLinkInfo]] = - rpc( - s"$webknossosUri/api/datastores/$dataStoreName/datasources/${dataSourceId.organizationId}/${dataSourceId.directoryName}/paths") + def fetchPaths(datasetId: ObjectId): Fox[List[LayerMagLinkInfo]] = + rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasources/${datasetId}/paths") .addQueryString("key" -> dataStoreKey) .getWithJsonResponse[List[LayerMagLinkInfo]] @@ -135,11 +141,35 @@ class DSRemoteWebknossosClient @Inject()( .postJsonWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) } yield reserveUploadInfo + def registerDataSource(dataSource: DataSource, dataSourceId: DataSourceId, folderId: Option[String])( + implicit tc: TokenContext): Fox[String] = + for { + _ <- Fox.successful(()) + info = DataSourceRegistrationInfo(dataSource, folderId) + response <- rpc( + s"$webknossosUri/api/datastores/$dataStoreName/datasources/${dataSourceId.organizationId}/${dataSourceId.directoryName}") + .addQueryString("key" -> dataStoreKey) + .withTokenFromContext + .postJson[DataSourceRegistrationInfo](info) + datasetId = response.body + } yield datasetId + + def updateDataSource(dataSource: DataSource, datasetId: ObjectId)(implicit tc: TokenContext): Fox[_] = + rpc(s"$webknossosUri/api/datastores/$dataStoreName/datasources/${datasetId.toString}") + .addQueryString("key" -> dataStoreKey) + .withTokenFromContext + .putJson(dataSource) + def deleteDataSource(id: DataSourceId): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset") .addQueryString("key" -> dataStoreKey) .postJson(id) + def deleteVirtualDataset(id: ObjectId): Fox[_] = + rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteVirtualDataset") + .addQueryString("key" -> dataStoreKey) + .postJson(id) + def getJobExportProperties(jobId: String): Fox[JobExportProperties] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/jobExportProperties") .addQueryString("jobId" -> jobId) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index 2fcc38e2641..cffb1c96278 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -269,7 +269,7 @@ class DataSourceService @Inject()( } } - def updateDataSource(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = { + def updateDataSourceOnDisk(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = { val organizationDir = dataBaseDir.resolve(dataSource.id.organizationId) val dataSourcePath = organizationDir.resolve(dataSource.id.directoryName) for { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala index 6e22f215db8..c9529c6471f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DatasetCache.scala @@ -26,6 +26,6 @@ class DatasetCache @Inject()(remoteWebknossosClient: DSRemoteWebknossosClient)(i dataLayer <- dataSource.getDataLayer(dataLayerName).toFox ?~> "Data layer not found" } yield (dataSource, dataLayer) - def invalidateCache(datasetId: String): Unit = cache.remove(ObjectId(datasetId)) + def invalidateCache(datasetId: ObjectId): Unit = cache.remove(datasetId) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala new file mode 100644 index 00000000000..54813a70cc5 --- /dev/null +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/ZarrStreamingService.scala @@ -0,0 +1,224 @@ +package com.scalableminds.webknossos.datastore.services + +import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext +import com.scalableminds.util.geometry.Vec3Int +import com.scalableminds.util.tools.{Fox, FoxImplicits} +import com.scalableminds.webknossos.datastore.dataformats.MagLocator +import com.scalableminds.webknossos.datastore.dataformats.layers.{ZarrDataLayer, ZarrLayer, ZarrSegmentationLayer} +import com.scalableminds.webknossos.datastore.dataformats.zarr.{Zarr3OutputHelper, ZarrCoordinatesParser} +import com.scalableminds.webknossos.datastore.datareaders.zarr._ +import com.scalableminds.webknossos.datastore.datareaders.zarr3.{NgffZarr3GroupHeader, Zarr3ArrayHeader} +import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType +import com.scalableminds.webknossos.datastore.models.datasource._ +import com.scalableminds.webknossos.datastore.models.requests._ +import com.scalableminds.webknossos.datastore.models.VoxelPosition +import play.api.i18n.{Messages, MessagesProvider} +import play.api.libs.json.{JsValue, Json} + +import scala.concurrent.ExecutionContext +import com.scalableminds.webknossos.datastore.datareaders.AxisOrder + +class ZarrStreamingService @Inject()( + datasetCache: DatasetCache, + binaryDataServiceHolder: BinaryDataServiceHolder, + remoteWebknossosClient: DSRemoteWebknossosClient, +)(implicit ec: ExecutionContext) + extends Zarr3OutputHelper + with FoxImplicits { + + val binaryDataService: BinaryDataService = binaryDataServiceHolder.binaryDataService + + def getHeader( + dataSource: DataSource, + dataLayer: DataLayer, + ): NgffMetadata = + NgffMetadata.fromNameVoxelSizeAndMags(dataLayer.name, dataSource.scale, dataLayer.sortedMags) + + def getGroupHeader( + dataSource: DataSource, + dataLayer: DataLayer + ): NgffZarr3GroupHeader = { + val omeNgffHeaderV0_5 = NgffMetadataV0_5.fromNameVoxelSizeAndMags(dataLayer.name, + dataSource.scale, + dataLayer.sortedMags, + dataLayer.additionalAxes) + + val zarr3GroupHeader = NgffZarr3GroupHeader(3, "group", omeNgffHeaderV0_5) + zarr3GroupHeader + } + + def zGroupJson: JsValue = Json.toJson(NgffGroupHeader(zarr_format = 2)) + + def getZarrDataSource( + dataSource: DataSource, + zarrVersion: Int + ): DataSource = { + val dataLayers = dataSource.dataLayers + val zarrLayers = dataLayers.map(convertLayerToZarrLayer(_, zarrVersion)) + val zarrSource = GenericDataSource[DataLayer](dataSource.id, zarrLayers, dataSource.scale) + zarrSource + } + + private def convertLayerToZarrLayer(layer: DataLayer, zarrVersion: Int): ZarrLayer = { + val dataFormat = if (zarrVersion == 2) DataFormat.zarr else DataFormat.zarr3 + layer match { + case s: SegmentationLayer => + val rank = s.additionalAxes.map(_.length).getOrElse(0) + 4 + ZarrSegmentationLayer( + s.name, + s.boundingBox, + s.elementClass, + mags = s.sortedMags.map( + m => + MagLocator(m, + Some(s"./${s.name}/${m.toMagLiteral(allowScalar = true)}"), + None, + Some(AxisOrder.cAdditionalxyz(rank)), + None, + None)), + mappings = s.mappings, + largestSegmentId = s.largestSegmentId, + numChannels = Some(if (s.elementClass == ElementClass.uint24) 3 else 1), + defaultViewConfiguration = s.defaultViewConfiguration, + adminViewConfiguration = s.adminViewConfiguration, + coordinateTransformations = s.coordinateTransformations, + additionalAxes = s.additionalAxes.map(reorderAdditionalAxes), + dataFormat = dataFormat + ) + case d: DataLayer => + val rank = d.additionalAxes.map(_.length).getOrElse(0) + 4 + ZarrDataLayer( + d.name, + d.category, + d.boundingBox, + d.elementClass, + mags = d.sortedMags.map( + m => + MagLocator(m, + Some(s"./${d.name}/${m.toMagLiteral(allowScalar = true)}"), + None, + Some(AxisOrder.cAdditionalxyz(rank)), + None, + None)), + numChannels = Some(if (d.elementClass == ElementClass.uint24) 3 else 1), + defaultViewConfiguration = d.defaultViewConfiguration, + adminViewConfiguration = d.adminViewConfiguration, + coordinateTransformations = d.coordinateTransformations, + additionalAxes = d.additionalAxes.map(reorderAdditionalAxes), + dataFormat = dataFormat + ) + } + } + + def rawZarrCube( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String, + coordinates: String + )(implicit m: MessagesProvider, tc: TokenContext): Fox[Array[Byte]] = + for { + _ <- Fox.successful(()) + reorderedAdditionalAxes = dataLayer.additionalAxes.map(reorderAdditionalAxes) + (x, y, z, additionalCoordinates) <- ZarrCoordinatesParser.parseNDimensionalDotCoordinates( + coordinates, + reorderedAdditionalAxes) ?~> "zarr.invalidChunkCoordinates" + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + cubeSize = DataLayer.bucketLength + request = DataServiceDataRequest( + Some(dataSource.id), + dataLayer, + Cuboid( + topLeft = VoxelPosition(x * cubeSize * magParsed.x, + y * cubeSize * magParsed.y, + z * cubeSize * magParsed.z, + magParsed), + width = cubeSize, + height = cubeSize, + depth = cubeSize + ), + DataServiceRequestSettings(halfByte = false, additionalCoordinates = additionalCoordinates) + ) + (data, notFoundIndices) <- binaryDataService.handleDataRequests(List(request)) + _ <- Fox.fromBool(notFoundIndices.isEmpty) ~> "zarr.chunkNotFound" + } yield data + + def getZArray( + dataLayer: DataLayer, + mag: String + )(implicit m: MessagesProvider): Fox[ZarrHeader] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + } yield ZarrHeader.fromLayer(dataLayer, magParsed) + + def requestZarrJsonForMag( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String + )(implicit m: MessagesProvider): Fox[Zarr3ArrayHeader] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + zarrHeader = Zarr3ArrayHeader.fromDataLayer(dataLayer, magParsed) + } yield zarrHeader + + def dataLayerDirectoryContents( + dataSource: DataSource, + dataLayer: DataLayer, + zarrVersion: Int + ): Fox[List[String]] = + for { + _ <- Fox.successful(()) + mags = dataLayer.sortedMags + additionalFiles = if (zarrVersion == 2) + List(NgffMetadata.FILENAME_DOT_ZATTRS, NgffGroupHeader.FILENAME_DOT_ZGROUP) + else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) + } yield (additionalFiles ++ mags.map(_.toMagLiteral(allowScalar = true))) + + def dataLayerMagDirectoryContents( + dataSource: DataSource, + dataLayer: DataLayer, + mag: String, + zarrVersion: Int + )(implicit m: MessagesProvider): Fox[List[String]] = + for { + magParsed <- Vec3Int.fromMagLiteral(mag, allowScalar = true).toFox ?~> Messages("dataLayer.invalidMag", mag) + dataLayerName = dataLayer.name + _ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) + additionalEntries = if (zarrVersion == 2) List(ZarrHeader.FILENAME_DOT_ZARRAY) + else List(Zarr3ArrayHeader.FILENAME_ZARR_JSON) + } yield additionalEntries + + def dataSourceDirectoryContents( + dataSource: DataSource, + zarrVersion: Int + ): Fox[List[String]] = + for { + _ <- Fox.successful(()) + layerNames = dataSource.dataLayers.map((dataLayer: DataLayer) => dataLayer.name) + additionalVersionDependantFiles = if (zarrVersion == 2) List(NgffGroupHeader.FILENAME_DOT_ZGROUP) + else List.empty + } yield (layerNames ++ additionalVersionDependantFiles) + + def dataSourceDirectoryContentsPrivateLink(accessToken: String, zarrVersion: Int)( + implicit tc: TokenContext): Fox[List[String]] = + for { + annotationSource <- remoteWebknossosClient.getAnnotationSource(accessToken) + dataSource <- datasetCache.getById(annotationSource.datasetId) + annotationLayerNames = annotationSource.annotationLayers.filter(_.typ == AnnotationLayerType.Volume).map(_.name) + dataSourceLayerNames = dataSource.dataLayers + .map((dataLayer: DataLayer) => dataLayer.name) + .filter(!annotationLayerNames.contains(_)) + layerNames = annotationLayerNames ++ dataSourceLayerNames + additionalEntries = if (zarrVersion == 2) + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON, NgffGroupHeader.FILENAME_DOT_ZGROUP) + else + List(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) + } yield additionalEntries ++ layerNames + +} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala index c577a4b636a..f66aa668de7 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/mesh/DSFullMeshService.scala @@ -6,7 +6,7 @@ import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig -import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, SegmentationLayer} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, SegmentationLayer} import com.scalableminds.webknossos.datastore.models.requests.Cuboid import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, VoxelPosition} import com.scalableminds.webknossos.datastore.services._ @@ -52,29 +52,21 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, (binaryDataService, mappingService, config.Datastore.AdHocMesh.timeout, config.Datastore.AdHocMesh.actorPoolSize) val adHocMeshService: AdHocMeshService = adHocMeshServiceHolder.dataStoreAdHocMeshService - def loadFor(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + def loadFor(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = if (fullMeshRequest.meshFileName.isDefined) - loadFullMeshFromMeshFile(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) + loadFullMeshFromMeshFile(dataSource, dataLayer, fullMeshRequest) else - loadFullMeshFromAdHoc(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) + loadFullMeshFromAdHoc(dataSource, dataLayer, fullMeshRequest) - private def loadFullMeshFromAdHoc(organizationId: String, - datasetName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" seedPosition <- fullMeshRequest.seedPosition.toFox ?~> "seedPosition.neededForAdHoc" - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetName, - dataLayerName) segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation" before = Instant.now verticesForChunks <- getAllAdHocChunks(dataSource, @@ -119,17 +111,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, } yield allVertices } - private def loadFullMeshFromMeshFile(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, - m: MessagesProvider, - tc: TokenContext): Fox[Array[Byte]] = + private def loadFullMeshFromMeshFile(dataSource: DataSource, dataLayer: DataLayer, fullMeshRequest: FullMeshRequest)( + implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { before <- Instant.nowFox - (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, - datasetDirectoryName, - dataLayerName) meshFileName <- fullMeshRequest.meshFileName.toFox ?~> "mesh.meshFileName.required" meshFileKey <- meshFileService.lookUpMeshFileKey(dataSource.id, dataLayer, meshFileName) mappingNameForMeshFile <- meshFileService.mappingNameForMeshFile(meshFileKey) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala deleted file mode 100644 index d886bec727b..00000000000 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/ComposeService.scala +++ /dev/null @@ -1,130 +0,0 @@ -package com.scalableminds.webknossos.datastore.services.uploading - -import com.scalableminds.util.accesscontext.TokenContext -import com.scalableminds.util.io.PathUtils -import com.scalableminds.util.tools.{Fox, FoxImplicits} -import com.scalableminds.webknossos.datastore.dataformats.layers.{WKWDataLayer, WKWSegmentationLayer} -import com.scalableminds.webknossos.datastore.models.VoxelSize -import com.scalableminds.webknossos.datastore.models.datasource._ -import com.scalableminds.webknossos.datastore.services.{ - DSRemoteWebknossosClient, - DataSourceRepository, - DataSourceService -} -import play.api.libs.json.{Json, OFormat} - -import java.nio.charset.StandardCharsets -import java.nio.file.{Files, Path} -import javax.inject.Inject -import scala.concurrent.ExecutionContext - -case class ComposeRequest( - newDatasetName: String, - targetFolderId: String, - organizationId: String, - voxelSize: VoxelSize, - layers: Seq[ComposeRequestLayer] -) - -object ComposeRequest { - implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest] -} -case class ComposeRequestLayer( - dataSourceId: DataSourceId, - sourceName: String, - newName: String, - transformations: Seq[CoordinateTransformation] -) - -object ComposeRequestLayer { - implicit val composeLayerFormat: OFormat[ComposeRequestLayer] = Json.format[ComposeRequestLayer] -} - -class ComposeService @Inject()(dataSourceRepository: DataSourceRepository, - remoteWebknossosClient: DSRemoteWebknossosClient, - dataSourceService: DataSourceService, - datasetSymlinkService: DatasetSymlinkService)(implicit ec: ExecutionContext) - extends FoxImplicits { - - val dataBaseDir: Path = datasetSymlinkService.dataBaseDir - - private def uploadDirectory(organizationId: String, datasetDirectoryName: String): Path = - dataBaseDir.resolve(organizationId).resolve(datasetDirectoryName) - - def composeDataset(composeRequest: ComposeRequest)(implicit tc: TokenContext): Fox[(DataSource, String)] = - for { - _ <- dataSourceService.assertDataDirWritable(composeRequest.organizationId) - reserveUploadInfo = ReserveUploadInformation( - "", - composeRequest.newDatasetName, - composeRequest.organizationId, - 1, - None, - None, - None, - List(), - Some(composeRequest.targetFolderId), - requireUniqueName = Some(false) - ) - reservedAdditionalInfo <- remoteWebknossosClient.reserveDataSourceUpload(reserveUploadInfo) ?~> "Failed to reserve upload." - directory = uploadDirectory(composeRequest.organizationId, reservedAdditionalInfo.directoryName) - _ = PathUtils.ensureDirectory(directory) - dataSource <- createDatasource(composeRequest, - reservedAdditionalInfo.directoryName, - composeRequest.organizationId, - directory) - properties = Json.toJson(dataSource).toString().getBytes(StandardCharsets.UTF_8) - _ = Files.write(directory.resolve(GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON), properties) - } yield (dataSource, reservedAdditionalInfo.newDatasetId.toString) - - private def getLayerFromComposeLayer(composeLayer: ComposeRequestLayer, uploadDir: Path): Fox[DataLayer] = - for { - dataSource <- dataSourceRepository.get(composeLayer.dataSourceId).toFox - ds <- dataSource.toUsable.toFox - layer <- ds.dataLayers.find(_.name == composeLayer.sourceName).toFox - applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) => - cOpt match { - case Some(c) => Some(c ++ composeLayer.transformations.toList) - case None => Some(composeLayer.transformations.toList) - } - linkedLayerIdentifier = LinkedLayerIdentifier(composeLayer.dataSourceId.organizationId, - composeLayer.dataSourceId.directoryName, - composeLayer.sourceName, - Some(composeLayer.newName)) - layerIsRemote = isLayerRemote(composeLayer.dataSourceId, composeLayer.sourceName) - _ <- Fox.runIf(!layerIsRemote)( - datasetSymlinkService.addSymlinksToOtherDatasetLayers(uploadDir, List(linkedLayerIdentifier))) - editedLayer: DataLayer = layer match { - case l: DataLayerWithMagLocators => - l.mapped(name = composeLayer.newName, - coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) - case l: WKWDataLayer => - l.copy(name = composeLayer.newName, - coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) - case l: WKWSegmentationLayer => - l.copy(name = composeLayer.newName, - coordinateTransformations = applyCoordinateTransformations(l.coordinateTransformations)) - } - } yield editedLayer - - private def createDatasource(composeRequest: ComposeRequest, - datasetDirectoryName: String, - organizationId: String, - uploadDir: Path): Fox[DataSource] = - for { - layers <- Fox.serialCombined(composeRequest.layers.toList)(getLayerFromComposeLayer(_, uploadDir)) - dataSource = GenericDataSource( - DataSourceId(datasetDirectoryName, organizationId), - layers, - composeRequest.voxelSize, - None - ) - - } yield dataSource - - private def isLayerRemote(dataSourceId: DataSourceId, layerName: String) = { - val layerPath = - dataBaseDir.resolve(dataSourceId.organizationId).resolve(dataSourceId.directoryName).resolve(layerName) - !Files.exists(layerPath) - } -} diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala index 81bd7deb67c..822558a12ab 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala @@ -441,6 +441,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, case Empty => deleteOnDisk(dataSourceId.organizationId, dataSourceId.directoryName, + None, datasetNeedsConversion, Some("the upload failed")) Fox.failure(s"Unknown error $label") @@ -448,6 +449,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, logger.warn(s"Error while $label: $msg, $e") deleteOnDisk(dataSourceId.organizationId, dataSourceId.directoryName, + None, datasetNeedsConversion, Some("the upload failed")) dataSourceRepository.removeDataSource(dataSourceId) @@ -494,7 +496,7 @@ class UploadService @Inject()(dataSourceRepository: DataSourceRepository, dataSourceUsable <- dataSource.toUsable.toFox ?~> "Uploaded dataset has no valid properties file, cannot link layers" layers <- Fox.serialCombined(layersToLink)(layerFromIdentifier) dataSourceWithLinkedLayers = dataSourceUsable.copy(dataLayers = dataSourceUsable.dataLayers ::: layers) - _ <- dataSourceService.updateDataSource(dataSourceWithLinkedLayers, expectExisting = true) ?~> "Could not write combined properties file" + _ <- dataSourceService.updateDataSourceOnDisk(dataSourceWithLinkedLayers, expectExisting = true) ?~> "Could not write combined properties file" } yield () } diff --git a/webknossos-datastore/conf/datastore.latest.routes b/webknossos-datastore/conf/datastore.latest.routes index b5bef4bf108..546bcb7ab0f 100644 --- a/webknossos-datastore/conf/datastore.latest.routes +++ b/webknossos-datastore/conf/datastore.latest.routes @@ -4,36 +4,29 @@ GET /health @com.scalableminds.webknossos.datastore.controllers.Application.health # Read image data -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(organizationId: String, datasetDirectoryName: String, dataLayerName: String) - -POST /wkDatasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestViaWebknossos(datasetId: String, dataLayerName: String) -POST /wkDatasets/:datasetId/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestRawCuboidPost(datasetId: String, dataLayerName: String) -GET /wkDatasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.requestRawCuboid(datasetId: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) -GET /wkDatasets/:datasetId/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.thumbnailJpeg(datasetId: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) -GET /wkDatasets/:datasetId/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.findData(datasetId: String, dataLayerName: String) -GET /wkDatasets/:datasetId/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.WKDatasetController.histogram(datasetId: String, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaWebknossos(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboidPost(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestRawCuboid(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /datasets/:datasetId/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.thumbnailJpeg(datasetId: ObjectId, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /datasets/:datasetId/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.findData(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.histogram(datasetId: ObjectId, dataLayerName: String) # Knossos compatible routes -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) +GET /datasets/:datasetId/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestViaKnossos(datasetId: ObjectId, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) # Zarr2 compatible routes -GET /zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName="") -GET /zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: ObjectId, dataLayerName="") +GET /zarr/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: ObjectId, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZAttrs(datasetId: ObjectId, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZGroup(datasetId: ObjectId, dataLayerName: String) +GET /zarr/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /zarr/:datasetId/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZArray(datasetId: ObjectId, dataLayerName: String, mag: String) +GET /zarr/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) GET /annotations/zarr/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 2) @@ -49,16 +42,16 @@ GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/.zarray GET /annotations/zarr/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Zarr3 compatible routes -GET /zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) -GET /zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) +GET /zarr3_experimental/:datasetId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSourceDirectoryContents(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataSource(datasetId: ObjectId, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerDirectoryContents(datasetId: ObjectId, dataLayerName: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJson(datasetId: ObjectId, dataLayerName: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestDataLayerMagDirectoryContents(datasetId: ObjectId, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestZarrJsonForMag(datasetId: ObjectId, dataLayerName: String, mag: String) +GET /zarr3_experimental/:datasetId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.requestRawZarrCube(datasetId: ObjectId, dataLayerName: String, mag: String, coordinates: String) GET /annotations/zarr3_experimental/:accessTokenOrId @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) GET /annotations/zarr3_experimental/:accessTokenOrId/ @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.dataSourceDirectoryContentsPrivateLink(accessTokenOrId: String, zarrVersion: Int = 3) @@ -72,39 +65,39 @@ GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:m GET /annotations/zarr3_experimental/:accessTokenOrId/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.ZarrStreamingController.rawZarrCubePrivateLink(accessTokenOrId: String, dataLayerName: String, mag: String, coordinates: String) # Segmentation mappings -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.mappingJson(datasetId: ObjectId, dataLayerName: String, mappingName: String) +GET /datasets/:datasetId/layers/:dataLayerName/mappings @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listMappings(datasetId: ObjectId, dataLayerName: String) # Agglomerate files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String, segmentId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listAgglomerates(datasetId: ObjectId, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/skeleton/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.generateAgglomerateSkeleton(datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(datasetId: ObjectId, dataLayerName: String, mappingName: String, agglomerateId: Long) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(datasetId: ObjectId, dataLayerName: String, mappingName: String) +POST /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(datasetId: ObjectId, dataLayerName: String, mappingName: String) +GET /datasets/:datasetId/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(datasetId: ObjectId, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(organizationId: String, datasetDirectoryName: String, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/meshes @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshFiles(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/chunks @com.scalableminds.webknossos.datastore.controllers.DSMeshController.listMeshChunksForSegment(datasetId: ObjectId, dataLayerName: String, targetMappingName: Option[String], editableMappingTracingId: Option[String]) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/chunks/data @com.scalableminds.webknossos.datastore.controllers.DSMeshController.readMeshChunk(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/meshes/fullMesh.stl @com.scalableminds.webknossos.datastore.controllers.DSMeshController.loadFullMeshStl(datasetId: ObjectId, dataLayerName: String) # Connectome files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(organizationId: String, datasetDirectoryName: String, dataLayerName: String, direction: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/connectomes @com.scalableminds.webknossos.datastore.controllers.DataSourceController.listConnectomeFiles(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/connectomes/synapses/positions @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsePositions(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/connectomes/synapses/types @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapseTypes(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/connectomes/synapses/:direction @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapticPartnerForSynapses(datasetId: ObjectId, dataLayerName: String, direction: String) +POST /datasets/:datasetId/layers/:dataLayerName/connectomes/synapses @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSynapsesForAgglomerates(datasetId: ObjectId, dataLayerName: String) # Ad-Hoc Meshing -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.BinaryDataController.requestAdHocMesh(datasetId: ObjectId, dataLayerName: String) # Segment-Index files -GET /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(organizationId: String, datasetDirectoryName: String, dataLayerName: String, segmentId: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(organizationId: String, datasetDirectoryName: String, dataLayerName: String) -POST /datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /datasets/:datasetId/layers/:dataLayerName/hasSegmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.checkSegmentIndexFile(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentIndex @com.scalableminds.webknossos.datastore.controllers.DataSourceController.querySegmentIndex(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentIndex/:segmentId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentIndex(datasetId: ObjectId, dataLayerName: String, segmentId: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentStatistics/volume @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentVolume(datasetId: ObjectId, dataLayerName: String) +POST /datasets/:datasetId/layers/:dataLayerName/segmentStatistics/boundingBox @com.scalableminds.webknossos.datastore.controllers.DataSourceController.getSegmentBoundingBox(datasetId: ObjectId, dataLayerName: String) # DataSource management GET /datasets @com.scalableminds.webknossos.datastore.controllers.DataSourceController.testChunk(resumableChunkNumber: Int, resumableIdentifier: String) @@ -115,19 +108,18 @@ POST /datasets/reserveManualUpload POST /datasets/finishUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.finishUpload() POST /datasets/cancelUpload @com.scalableminds.webknossos.datastore.controllers.DataSourceController.cancelUpload() GET /datasets/measureUsedStorage/:organizationId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.measureUsedStorage(organizationId: String, datasetDirectoryName: Option[String]) -GET /datasets/:organizationId/:datasetDirectoryName/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(organizationId: String, datasetDirectoryName: String) -PUT /datasets/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(organizationId: String, datasetDirectoryName: String) +GET /datasets/:datasetId/readInboxDataSource @com.scalableminds.webknossos.datastore.controllers.DataSourceController.readInboxDataSource(datasetId: ObjectId) +PUT /datasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.update(datasetId: ObjectId) POST /datasets/:organizationId/:datasetName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.add(organizationId: String, datasetName: String, folderId: Option[String]) -DELETE /datasets/:organizationId/:datasetDirectoryName/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(organizationId: String, datasetDirectoryName: String) -POST /datasets/compose @com.scalableminds.webknossos.datastore.controllers.DataSourceController.compose() +DELETE /datasets/:datasetId/deleteOnDisk @com.scalableminds.webknossos.datastore.controllers.DataSourceController.deleteOnDisk(datasetId: ObjectId) POST /datasets/exploreRemote @com.scalableminds.webknossos.datastore.controllers.DataSourceController.exploreRemoteDataset() -DELETE /wkDatasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: String) +DELETE /datasets/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.invalidateCache(datasetId: ObjectId) # Actions POST /triggers/checkInboxBlocking @com.scalableminds.webknossos.datastore.controllers.DataSourceController.triggerInboxCheckBlocking() POST /triggers/createOrganizationDirectory @com.scalableminds.webknossos.datastore.controllers.DataSourceController.createOrganizationDirectory(organizationId: String) -POST /triggers/reload/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetDirectoryName: String, layerName: Option[String]) +POST /triggers/reload/:organizationId/:datasetId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.reload(organizationId: String, datasetId: ObjectId, layerName: Option[String]) # Exports GET /exports/:jobId/download @com.scalableminds.webknossos.datastore.controllers.ExportsController.download(jobId: String) diff --git a/webknossos-datastore/conf/datastore.versioned.routes b/webknossos-datastore/conf/datastore.versioned.routes index 71101ccdbe3..698fcd3cdc9 100644 --- a/webknossos-datastore/conf/datastore.versioned.routes +++ b/webknossos-datastore/conf/datastore.versioned.routes @@ -1,9 +1,216 @@ # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait +-> /v10/ datastore.latest.Routes -> /v9/ datastore.latest.Routes + +# Read image data +POST /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPostV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpegV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMeshV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + +# Zarr2 compatible routes +GET /v9/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v9/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArrayV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v9/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMagV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v9/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + -> /v8/ datastore.latest.Routes + +# Read image data +POST /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPostV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpegV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMeshV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + +# Zarr2 compatible routes +GET /v8/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v8/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArrayV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v8/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMagV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v8/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + + -> /v7/ datastore.latest.Routes + +# Read image data +POST /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPostV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpegV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMeshV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + +# Zarr2 compatible routes +GET /v7/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v7/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArrayV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v7/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMagV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v7/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + -> /v6/ datastore.latest.Routes + +# Read image data +POST /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPostV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpegV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMeshV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + +# Zarr2 compatible routes +GET /v6/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v6/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArrayV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v6/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMagV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v6/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + -> /v5/ datastore.latest.Routes +# Read image data +POST /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaWebknossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +POST /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/readData @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidPostV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/data @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawCuboidV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, depth: Int, mag: String, halfByte: Boolean ?= false, mappingName: Option[String]) +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/thumbnail.jpg @com.scalableminds.webknossos.datastore.controllers.LegacyController.thumbnailJpegV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, x: Int, y: Int, z: Int, width: Int, height: Int, mag: String, mappingName: Option[String], intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], invertColor: Option[Boolean]) +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/findData @com.scalableminds.webknossos.datastore.controllers.LegacyController.findDataV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/histogram @com.scalableminds.webknossos.datastore.controllers.LegacyController.histogramV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) + +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mag:mag/x:x/y:y/z:z/bucket.raw @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestViaKnossosV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: Int, x: Int, y: Int, z: Int, cubeSize: Int) + +POST /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/adHocMesh @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestAdHocMeshV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/datasets/:organizationId/:datasetDirectoryName/layers/:dataLayerName/mappings/:mappingName @com.scalableminds.webknossos.datastore.controllers.LegacyController.mappingJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mappingName: String) + + +# Zarr2 compatible routes +GET /v5/zarr/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName="") +GET /v5/zarr/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zattrs @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZAttrsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/.zgroup @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZGroupV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 2) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/.zarray @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZArrayV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v5/zarr/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + +# Zarr3 compatible routes +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/datasource-properties.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataSourceV9(organizationId: String, datasetDirectoryName: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/ @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestDataLayerMagDirectoryContentsV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, zarrVersion: Int = 3) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/zarr.json @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestZarrJsonForMagV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String) +GET /v5/zarr3_experimental/:organizationId/:datasetDirectoryName/:dataLayerName/:mag/:coordinates @com.scalableminds.webknossos.datastore.controllers.LegacyController.requestRawZarrCubeV9(organizationId: String, datasetDirectoryName: String, dataLayerName: String, mag: String, coordinates: String) + + -> / datastore.latest.Routes diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala index 73d7cdd36fc..4f0ded8a08c 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteDatastoreClient.scala @@ -36,7 +36,7 @@ class TSRemoteDatastoreClient @Inject()( with ProtoGeometryImplicits with MissingBucketHeaders { - private lazy val dataStoreUriCache: AlfuCache[(String, String), String] = AlfuCache() + private lazy val dataStoreUriCache: AlfuCache[ObjectId, String] = AlfuCache() private lazy val voxelSizeCache: AlfuCache[ObjectId, VoxelSize] = AlfuCache(timeToLive = 10 minutes) private lazy val largestAgglomerateIdCache: AlfuCache[(RemoteFallbackLayer, String, Option[String]), Long] = AlfuCache(timeToLive = 10 minutes) @@ -145,23 +145,19 @@ class TSRemoteDatastoreClient @Inject()( private def voxelSizeForAnnotation(annotationId: ObjectId)(implicit tc: TokenContext): Fox[VoxelSize] = for { - dataSourceId <- remoteWebknossosClient.getDataSourceIdForAnnotation(annotationId) - dataStoreUri <- dataStoreUriWithCache(dataSourceId.organizationId, dataSourceId.directoryName) - result <- rpc( - s"$dataStoreUri/data/datasets/${dataSourceId.organizationId}/${dataSourceId.directoryName}/readInboxDataSource").withTokenFromContext + datasetId <- remoteWebknossosClient.getDatasetIdForAnnotation(annotationId) + dataStoreUri <- dataStoreUriWithCache(datasetId) + result <- rpc(s"$dataStoreUri/data/datasets/${datasetId}/readInboxDataSource").withTokenFromContext .getWithJsonResponse[InboxDataSource] scale <- result.voxelSizeOpt.toFox ?~> "could not determine voxel size of dataset" } yield scale private def getRemoteLayerUri(remoteLayer: RemoteFallbackLayer): Fox[String] = for { - datastoreUri <- dataStoreUriWithCache(remoteLayer.organizationId, remoteLayer.datasetDirectoryName) - } yield - s"$datastoreUri/data/datasets/${remoteLayer.organizationId}/${remoteLayer.datasetDirectoryName}/layers/${remoteLayer.layerName}" - - private def dataStoreUriWithCache(organizationId: String, datasetDirectoryName: String): Fox[String] = - dataStoreUriCache.getOrLoad( - (organizationId, datasetDirectoryName), - keyTuple => remoteWebknossosClient.getDataStoreUriForDataSource(keyTuple._1, keyTuple._2)) + datastoreUri <- dataStoreUriWithCache(remoteLayer.datasetId) + } yield s"$datastoreUri/data/datasets/${remoteLayer.datasetId}/layers/${remoteLayer.layerName}" + + private def dataStoreUriWithCache(datasetId: ObjectId): Fox[String] = + dataStoreUriCache.getOrLoad(datasetId, keyTuple => remoteWebknossosClient.getDataStoreUriForDataset(datasetId)) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala index 109926be669..c577f611045 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/TSRemoteWebknossosClient.scala @@ -10,7 +10,7 @@ import com.scalableminds.webknossos.datastore.Annotation.AnnotationProto import com.scalableminds.webknossos.datastore.SkeletonTracing.SkeletonTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.models.annotation.AnnotationLayerType -import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, DataSourceLike} +import com.scalableminds.webknossos.datastore.models.datasource.DataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.{ AccessTokenService, @@ -50,7 +50,7 @@ class TSRemoteWebknossosClient @Inject()( private val webknossosUri: String = config.Tracingstore.WebKnossos.uri - private lazy val dataSourceIdByAnnotationIdCache: AlfuCache[ObjectId, DataSourceId] = AlfuCache() + private lazy val datasetIdByAnnotationIdCache: AlfuCache[ObjectId, ObjectId] = AlfuCache() private lazy val annotationIdByTracingIdCache: AlfuCache[String, ObjectId] = AlfuCache(maxCapacity = 10000, timeToLive = 5 minutes) @@ -68,22 +68,21 @@ class TSRemoteWebknossosClient @Inject()( .silent .getWithJsonResponse[DataSourceLike] - def getDataStoreUriForDataSource(organizationId: String, datasetDirectoryName: String): Fox[String] = - rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetDirectoryName") - .addQueryString("organizationId" -> organizationId) + def getDataStoreUriForDataset(datasetId: ObjectId): Fox[String] = + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataStoreUri/$datasetId") .addQueryString("key" -> tracingStoreKey) .silent .getWithJsonResponse[String] - def getDataSourceIdForAnnotation(annotationId: ObjectId)(implicit ec: ExecutionContext): Fox[DataSourceId] = - dataSourceIdByAnnotationIdCache.getOrLoad( + def getDatasetIdForAnnotation(annotationId: ObjectId)(implicit ec: ExecutionContext): Fox[ObjectId] = + datasetIdByAnnotationIdCache.getOrLoad( annotationId, aId => - rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/dataSourceId") + rpc(s"$webknossosUri/api/tracingstores/$tracingStoreName/datasetId") .addQueryString("annotationId" -> aId.toString) .addQueryString("key" -> tracingStoreKey) .silent - .getWithJsonResponse[DataSourceId] + .getWithJsonResponse[ObjectId] ) def getAnnotationIdForTracing(tracingId: String)(implicit ec: ExecutionContext): Fox[ObjectId] = diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala index 5332d91f2be..a50e20eeede 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/RemoteFallbackLayer.scala @@ -8,23 +8,19 @@ import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing import com.scalableminds.webknossos.datastore.VolumeTracing.VolumeTracing.ElementClassProto import com.scalableminds.webknossos.datastore.helpers.ProtoGeometryImplicits import com.scalableminds.webknossos.datastore.models.WebknossosDataRequest -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, DataSourceId, ElementClass} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayerLike, ElementClass} import com.scalableminds.webknossos.tracingstore.tracings.editablemapping.FallbackDataKey import com.scalableminds.webknossos.tracingstore.{TSRemoteDatastoreClient, TSRemoteWebknossosClient} import com.scalableminds.util.tools.Box import scala.concurrent.ExecutionContext -case class RemoteFallbackLayer(organizationId: String, - datasetDirectoryName: String, - layerName: String, - elementClass: ElementClassProto) +case class RemoteFallbackLayer(datasetId: ObjectId, layerName: String, elementClass: ElementClassProto) object RemoteFallbackLayer extends ProtoGeometryImplicits { - def fromDataLayerAndDataSource(dataLayer: DataLayerLike, dataSource: DataSourceId): Box[RemoteFallbackLayer] = { + def fromDataLayerAndDatasetId(dataLayer: DataLayerLike, datasetId: ObjectId): Box[RemoteFallbackLayer] = { val elementClassProtoBox = ElementClass.toProto(dataLayer.elementClass) - elementClassProtoBox.map(elementClassProto => - RemoteFallbackLayer(dataSource.organizationId, dataSource.directoryName, dataLayer.name, elementClassProto)) + elementClassProtoBox.map(elementClassProto => RemoteFallbackLayer(datasetId, dataLayer.name, elementClassProto)) } } trait FallbackDataHelper extends FoxImplicits { @@ -38,8 +34,8 @@ trait FallbackDataHelper extends FoxImplicits { implicit ec: ExecutionContext): Fox[RemoteFallbackLayer] = for { layerName <- tracing.fallbackLayer.toFox ?~> "This feature is only defined on volume annotations with fallback segmentation layer." - datasetId <- remoteWebknossosClient.getDataSourceIdForAnnotation(annotationId) - } yield RemoteFallbackLayer(datasetId.organizationId, datasetId.directoryName, layerName, tracing.elementClass) + datasetId <- remoteWebknossosClient.getDatasetIdForAnnotation(annotationId) + } yield RemoteFallbackLayer(datasetId, layerName, tracing.elementClass) def getFallbackBucketFromDataStore(remoteFallbackLayer: RemoteFallbackLayer, dataRequest: WebknossosDataRequest)( implicit ec: ExecutionContext, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index e3bab6d6391..fd3604dc36f 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -973,10 +973,10 @@ class VolumeTracingService @Inject()( implicit tc: TokenContext): Fox[Option[RemoteFallbackLayer]] = for { dataSource <- remoteWebknossosClient.getDataSourceForAnnotation(annotationId) - dataSourceId = dataSource.id layerWithFallbackOpt = dataSource.dataLayers.find(_.name == fallbackLayerName.getOrElse("")) + datasetId <- remoteWebknossosClient.getDatasetIdForAnnotation(annotationId) fallbackLayer <- Fox.runOptional(layerWithFallbackOpt) { layerWithFallback => - RemoteFallbackLayer.fromDataLayerAndDataSource(layerWithFallback, dataSourceId).toFox + RemoteFallbackLayer.fromDataLayerAndDatasetId(layerWithFallback, datasetId).toFox } } yield fallbackLayer diff --git a/webknossos-tracingstore/conf/tracingstore.versioned.routes b/webknossos-tracingstore/conf/tracingstore.versioned.routes index 1faf9b948b1..aad61de028d 100644 --- a/webknossos-tracingstore/conf/tracingstore.versioned.routes +++ b/webknossos-tracingstore/conf/tracingstore.versioned.routes @@ -1,5 +1,6 @@ # Note: keep this in sync with the reported version numbers in the com.scalableminds.util.mvc.ApiVersioning trait +-> /v10/ tracingstore.latest.Routes -> /v9/ tracingstore.latest.Routes -> /v8/ tracingstore.latest.Routes -> /v7/ tracingstore.latest.Routes