From 9f89d383a944d02e16a88c2deb47e843df51b3d8 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 23 Jun 2025 14:55:04 +0200 Subject: [PATCH 1/5] Explore remote datasets as virtual datasets --- app/controllers/DatasetController.scala | 8 ++-- .../WKRemoteDataStoreController.scala | 30 ++++++++++++- app/models/dataset/DataStore.scala | 3 +- app/models/dataset/DatasetService.scala | 37 ++++++++++++++-- .../dataset/WKRemoteDataStoreClient.scala | 12 ------ .../explore/WKExploreRemoteLayerService.scala | 23 ++++++---- conf/webknossos.latest.routes | 1 + .../controllers/DataSourceController.scala | 29 ++++--------- .../dataformats/layers/N5DataLayers.scala | 40 +++++++++++++++++- .../layers/PrecomputedDataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/WKWDataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/Zarr3DataLayers.scala | 40 +++++++++++++++++- .../dataformats/layers/ZarrDataLayers.scala | 42 +++++++++++++++++-- .../models/datasource/DataLayer.scala | 2 + .../services/DSRemoteWebknossosClient.scala | 21 +++++++++- .../services/DataSourceService.scala | 5 +++ .../EditableMappingLayer.scala | 3 ++ .../tracings/volume/VolumeTracingLayer.scala | 2 + 18 files changed, 313 insertions(+), 65 deletions(-) diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index 5858ab5a6e..9944a35769 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -145,10 +145,10 @@ class DatasetController @Inject()(userService: UserService, _ <- Fox.fromBool(dataSource.dataLayers.nonEmpty) ?~> "dataset.explore.zeroLayers" folderIdOpt <- Fox.runOptional(request.body.folderPath)(folderPath => folderService.getOrCreateFromPathLiteral(folderPath, request.identity._organization)) ?~> "dataset.explore.autoAdd.getFolder.failed" - _ <- wkExploreRemoteLayerService.addRemoteDatasource(dataSource, - request.body.datasetName, - request.identity, - folderIdOpt) ?~> "dataset.explore.autoAdd.failed" + _ <- wkExploreRemoteLayerService.addRemoteDatasourceToDatabase(dataSource, + request.body.datasetName, + request.identity, + folderIdOpt) ?~> "dataset.explore.autoAdd.failed" } yield Ok } diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 37f0b70d2c..121f7dc2b6 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -9,7 +9,7 @@ import com.scalableminds.webknossos.datastore.helpers.{LayerMagLinkInfo, MagLink import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.datasource.DataSourceId import com.scalableminds.webknossos.datastore.models.datasource.inbox.{InboxDataSourceLike => InboxDataSource} -import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataStoreStatus} +import com.scalableminds.webknossos.datastore.services.{DataSourcePathInfo, DataSourceRegistrationInfo, DataStoreStatus} import com.scalableminds.webknossos.datastore.services.uploading.{ LinkedLayerIdentifier, ReserveAdditionalInformation, @@ -270,6 +270,34 @@ class WKRemoteDataStoreController @Inject()( } + // Register a datasource from the datastore as a dataset in the database. + // This is called when adding remote virtual datasets (that should only exist in the database) + // by the data store after exploration. + def registerDataSource(name: String, + key: String, + organizationId: String, + directoryName: String, + token: String): Action[DataSourceRegistrationInfo] = + Action.async(validateJson[DataSourceRegistrationInfo]) { implicit request => + dataStoreService.validateAccess(name, key) { dataStore => + for { + user <- bearerTokenService.userForToken(token) + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> Messages( + "organization.notFound", + organizationId) ~> NOT_FOUND + _ <- Fox.fromBool(organization._id == user._organization) ?~> "notAllowed" ~> FORBIDDEN + dataset <- datasetService.createVirtualDataset( + directoryName, + organizationId, + dataStore, + request.body.dataSource, + request.body.folderId, + user + ) + } yield Ok(dataset._id.toString) + } + } + def jobExportProperties(name: String, key: String, jobId: ObjectId): Action[AnyContent] = Action.async { implicit request => dataStoreService.validateAccess(name, key) { _ => diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 1672b253ce..6c5ddc6e69 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -79,8 +79,7 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO, jobService: JobServ def validateAccess(name: String, key: String)(block: DataStore => Future[Result])( implicit m: MessagesProvider): Fox[Result] = - Fox.fromFuture((for { - dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) + Fox.fromFuture((for {dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) _ <- Fox.fromBool(key == dataStore.key) result <- Fox.fromFuture(block(dataStore)) } yield result).getOrElse(Forbidden(Json.obj("granted" -> false, "msg" -> Messages("dataStore.notFound"))))) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index af2082f894..5a7de91f2e 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -1,6 +1,6 @@ package models.dataset -import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext} +import com.scalableminds.util.accesscontext.{AuthorizedAccessContext, DBAccessContext, GlobalAccessContext} import com.scalableminds.util.objectid.ObjectId import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -23,6 +23,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ AbstractDataLayer, AbstractSegmentationLayer, DataFormat, + DataSource, DataSourceId, GenericDataSource, DataLayerLike => DataLayer @@ -36,6 +37,7 @@ import models.team._ import models.user.{User, UserService} import net.liftweb.common.Box.tryo import net.liftweb.common.{Empty, EmptyBox, Full} +import play.api.i18n.Messages import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf @@ -97,6 +99,34 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield newDataset } + private def virtualRemoteDatasetStatus = "Virtual remote dataset" + + def createVirtualDataset(datasetName: String, + organizationId: String, + dataStore: DataStore, + dataSource: DataSource, + folderId: Option[String], + user: User): Fox[Dataset] = + for { + _ <- assertValidDatasetName(datasetName) + isDatasetNameAlreadyTaken <- datasetDAO.doesDatasetDirectoryExistInOrganization(datasetName, organizationId)( + GlobalAccessContext) + _ <- Fox.fromBool(!isDatasetNameAlreadyTaken) ?~> "dataset.name.alreadyTaken" + organization <- organizationDAO.findOne(organizationId)(GlobalAccessContext) ?~> "organization.notFound" + folderId <- ObjectId.fromString(folderId.getOrElse(organization._rootFolder.toString)) ?~> "dataset.upload.folderId.invalid" + _ <- folderDAO.assertUpdateAccess(folderId)(AuthorizedAccessContext(user)) ?~> "folder.noWriteAccess" + newDatasetId = ObjectId.generate + abstractDataSource = dataSource.copy(dataLayers = dataSource.dataLayers.map(_.asAbstractLayer)) + dataset <- createDataset(dataStore, + newDatasetId, + datasetName, + abstractDataSource, + status = Some(virtualRemoteDatasetStatus)) + datasetId = dataset._id + _ <- datasetDAO.updateFolder(datasetId, folderId)(GlobalAccessContext) + _ <- addUploader(dataset, user._id)(GlobalAccessContext) + } yield dataset + def getAllUnfinishedDatasetUploadsOfUser(userId: ObjectId, organizationId: String)( implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] = datasetDAO.findAllCompactWithSearch( @@ -114,7 +144,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetId: ObjectId, datasetName: String, dataSource: InboxDataSource, - publication: Option[ObjectId] = None + publication: Option[ObjectId] = None, + status: Option[String] = None ): Fox[Dataset] = { implicit val ctx: DBAccessContext = GlobalAccessContext val metadata = @@ -147,7 +178,7 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, name = datasetName, voxelSize = dataSource.voxelSizeOpt, sharingToken = None, - status = dataSource.statusOpt.getOrElse(""), + status = status.orElse(dataSource.statusOpt).getOrElse(""), logoUrl = None, metadata = metadata ) diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index b593d21cbd..47c0b8c1a7 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -82,18 +82,6 @@ class WKRemoteDataStoreClient(dataStore: DataStore, rpc: RPC) extends LazyLoggin .silent .getWithJsonResponse[List[DirectoryStorageReport]] - def addDataSource(organizationId: String, - datasetName: String, - dataSource: GenericDataSource[DataLayer], - folderId: Option[ObjectId], - userToken: String): Fox[Unit] = - for { - _ <- rpc(s"${dataStore.url}/data/datasets/$organizationId/$datasetName") - .addQueryString("token" -> userToken) - .addQueryStringOptional("folderId", folderId.map(_.toString)) - .postJson(dataSource) - } yield () - def hasSegmentIndexFile(organizationId: String, datasetName: String, layerName: String)( implicit ec: ExecutionContext): Fox[Boolean] = { val cacheKey = (organizationId, datasetName, layerName) diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 79bb249306..2072ad1f47 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -105,17 +105,22 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService credentialId <- Fox.runOptional(credentialOpt)(c => credentialService.insertOne(c)) ?~> "dataVault.credential.insert.failed" } yield credentialId - def addRemoteDatasource(dataSource: GenericDataSource[DataLayer], - datasetName: String, - user: User, - folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = + def addRemoteDatasourceToDatabase(dataSource: GenericDataSource[DataLayer], + datasetName: String, + user: User, + folderId: Option[ObjectId])(implicit ctx: DBAccessContext): Fox[Unit] = for { - organization <- organizationDAO.findOne(user._organization) dataStore <- dataStoreDAO.findOneWithUploadsAllowed + organizationId = user._organization _ <- datasetService.assertValidDatasetName(datasetName) - client = new WKRemoteDataStoreClient(dataStore, rpc) - userToken <- bearerTokenService.createAndInitDataStoreTokenForUser(user) - _ <- client.addDataSource(organization._id, datasetName, dataSource, folderId, userToken) - } yield () + datasetId <- datasetService.createVirtualDataset( + dataSource.id.directoryName, + organizationId, + dataStore, + dataSource, + folderId.map(_.toString), + user + ) + } yield datasetId } diff --git a/conf/webknossos.latest.routes b/conf/webknossos.latest.routes index d3c94188a7..805d641873 100644 --- a/conf/webknossos.latest.routes +++ b/conf/webknossos.latest.routes @@ -111,6 +111,7 @@ PUT /datastores/:name/datasources PUT /datastores/:name/datasources/paths controllers.WKRemoteDataStoreController.updatePaths(name: String, key: String) GET /datastores/:name/datasources/:organizationId/:directoryName/paths controllers.WKRemoteDataStoreController.getPaths(name: String, key: String, organizationId: String, directoryName: String) GET /datastores/:name/datasources/:datasetId controllers.WKRemoteDataStoreController.getDataSource(name: String, key: String, datasetId: ObjectId) +POST /datastores/:name/datasources/:organizationId/:directoryName controllers.WKRemoteDataStoreController.registerDataSource(name: String, key: String, organizationId: String, directoryName: String, token: String) PATCH /datastores/:name/status controllers.WKRemoteDataStoreController.statusUpdate(name: String, key: String) POST /datastores/:name/reserveUpload controllers.WKRemoteDataStoreController.reserveDatasetUpload(name: String, key: String, token: String) GET /datastores/:name/getUnfinishedUploadsForUser controllers.WKRemoteDataStoreController.getUnfinishedUploadsForUser(name: String, key: String, token: String, organizationName: String) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 86df450b00..46de84d519 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -387,32 +387,17 @@ class DataSourceController @Inject()( } } - // Stores a remote dataset in the database. + // Called by the frontend after the user has set datasetName / FolderId of an explored dataSource + // Add this data source to the WK database def add(organizationId: String, datasetName: String, folderId: Option[String]): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccessFromTokenContext(UserAccessRequest.administrateDataSources) { for { - reservedAdditionalInfo <- dsRemoteWebknossosClient.reserveDataSourceUpload( - ReserveUploadInformation( - uploadId = "", // Set by core backend - name = datasetName, - organization = organizationId, - totalFileCount = 1, - filePaths = None, - totalFileSizeInBytes = None, - layersToLink = None, - initialTeams = List.empty, - folderId = folderId, - requireUniqueName = Some(false), - ) - ) ?~> "dataset.upload.validation.failed" - datasourceId = DataSourceId(reservedAdditionalInfo.directoryName, organizationId) - _ <- dataSourceService.updateDataSource(request.body.copy(id = datasourceId), expectExisting = false) - uploadedDatasetId <- dsRemoteWebknossosClient.reportUpload(datasourceId, - 0L, - needsConversion = false, - viaAddRoute = true) ?~> "reportUpload.failed" - } yield Ok(Json.obj("newDatasetId" -> uploadedDatasetId)) + _ <- Fox.successful(()) + dataSourceId = DataSourceId(datasetName, organizationId) + dataSource = request.body.copy(id = dataSourceId) + datasetId <- dsRemoteWebknossosClient.registerDataSource(dataSource, dataSourceId, folderId) ?~> "dataset.add.failed" + } yield Ok(Json.obj("newDatasetId" -> datasetId)) } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala index fa689fe813..47760a0a9c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/N5DataLayers.scala @@ -37,7 +37,24 @@ case class N5DataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends N5Layer +) extends N5Layer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object N5DataLayer { implicit val jsonFormat: OFormat[N5DataLayer] = Json.format[N5DataLayer] @@ -57,7 +74,26 @@ case class N5SegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, ) extends SegmentationLayer - with N5Layer + with N5Layer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object N5SegmentationLayer { implicit val jsonFormat: OFormat[N5SegmentationLayer] = Json.format[N5SegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala index 1c6c0554ab..bd2baf7ef9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/PrecomputedDataLayers.scala @@ -37,7 +37,24 @@ case class PrecomputedDataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends PrecomputedLayer +) extends PrecomputedLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object PrecomputedDataLayer { implicit val jsonFormat: OFormat[PrecomputedDataLayer] = Json.format[PrecomputedDataLayer] @@ -57,7 +74,26 @@ case class PrecomputedSegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, ) extends SegmentationLayer - with PrecomputedLayer + with PrecomputedLayer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object PrecomputedSegmentationLayer { implicit val jsonFormat: OFormat[PrecomputedSegmentationLayer] = Json.format[PrecomputedSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index e892fd9952..96c3f7e181 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -46,7 +46,24 @@ case class WKWDataLayer( coordinateTransformations: Option[List[CoordinateTransformation]] = None, additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None, -) extends WKWLayer +) extends WKWLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + None, + None, + Some(dataFormat) + ) +} object WKWDataLayer { implicit val jsonFormat: OFormat[WKWDataLayer] = Json.format[WKWDataLayer] @@ -65,7 +82,26 @@ case class WKWSegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None ) extends SegmentationLayer - with WKWLayer + with WKWLayer { + def asAbstractLayer: AbstractSegmentationLayer = + AbstractSegmentationLayer( + name, + Category.segmentation, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + None, + None, + Some(dataFormat) + ) +} object WKWSegmentationLayer { implicit val jsonFormat: OFormat[WKWSegmentationLayer] = Json.format[WKWSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala index 4dff3a5864..7a5b3f7b61 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/Zarr3DataLayers.scala @@ -38,7 +38,24 @@ case class Zarr3DataLayer( override val numChannels: Option[Int] = Some(1), additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None -) extends Zarr3Layer +) extends Zarr3Layer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object Zarr3DataLayer { implicit val jsonFormat: OFormat[Zarr3DataLayer] = Json.format[Zarr3DataLayer] @@ -58,7 +75,26 @@ case class Zarr3SegmentationLayer( additionalAxes: Option[Seq[AdditionalAxis]] = None, attachments: Option[DatasetLayerAttachments] = None ) extends SegmentationLayer - with Zarr3Layer + with Zarr3Layer { + override def asAbstractLayer: DataLayerLike = + AbstractSegmentationLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object Zarr3SegmentationLayer { implicit val jsonFormat: OFormat[Zarr3SegmentationLayer] = Json.format[Zarr3SegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index d8439efc38..34244ca92b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.layers import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.{DatasetArrayBucketProvider, MagLocator} -import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration +import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.{LayerViewConfiguration, empty} import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import play.api.libs.json.{Json, OFormat} @@ -37,7 +37,24 @@ case class ZarrDataLayer( override val additionalAxes: Option[Seq[AdditionalAxis]], attachments: Option[DatasetLayerAttachments] = None, override val dataFormat: DataFormat.Value, -) extends ZarrLayer +) extends ZarrLayer { + override def asAbstractLayer: DataLayerLike = + AbstractDataLayer( + name, + category, + boundingBox, + resolutions, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object ZarrDataLayer { implicit val jsonFormat: OFormat[ZarrDataLayer] = Json.format[ZarrDataLayer] @@ -58,7 +75,26 @@ case class ZarrSegmentationLayer( attachments: Option[DatasetLayerAttachments] = None, override val dataFormat: DataFormat.Value, ) extends SegmentationLayer - with ZarrLayer + with ZarrLayer { + override def asAbstractLayer: AbstractSegmentationLayer = + AbstractSegmentationLayer( + name, + Category.segmentation, + boundingBox, + resolutions, + elementClass, + largestSegmentId, + mappings, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments, + Some(mags), + numChannels, + Some(dataFormat) + ) +} object ZarrSegmentationLayer { implicit val jsonFormat: OFormat[ZarrSegmentationLayer] = Json.format[ZarrSegmentationLayer] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala index 4e216b4b5b..a8277e1043 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/models/datasource/DataLayer.scala @@ -367,6 +367,8 @@ trait DataLayer extends DataLayerLike { case _ => this } } + + def asAbstractLayer: DataLayerLike } object DataLayer { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala index d730d01677..8cf537410d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSRemoteWebknossosClient.scala @@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.controllers.JobExportProperties import com.scalableminds.webknossos.datastore.helpers.{IntervalScheduler, LayerMagLinkInfo} import com.scalableminds.webknossos.datastore.models.UnfinishedUpload import com.scalableminds.webknossos.datastore.models.annotation.AnnotationSource -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, GenericDataSource} +import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, DataSourceId, GenericDataSource} import com.scalableminds.webknossos.datastore.models.datasource.inbox.InboxDataSourceLike import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.uploading.{ @@ -49,6 +49,12 @@ object MagPathInfo { implicit val jsonFormat: OFormat[MagPathInfo] = Json.format[MagPathInfo] } +case class DataSourceRegistrationInfo(dataSource: DataSource, folderId: Option[String]) + +object DataSourceRegistrationInfo { + implicit val jsonFormat: OFormat[DataSourceRegistrationInfo] = Json.format[DataSourceRegistrationInfo] +} + trait RemoteWebknossosClient { def requestUserAccess(accessRequest: UserAccessRequest)(implicit tc: TokenContext): Fox[UserAccessAnswer] } @@ -135,6 +141,19 @@ class DSRemoteWebknossosClient @Inject()( .postJsonWithJsonResponse[ReserveUploadInformation, ReserveAdditionalInformation](info) } yield reserveUploadInfo + def registerDataSource(dataSource: DataSource, dataSourceId: DataSourceId, folderId: Option[String])( + implicit tc: TokenContext): Fox[String] = + for { + _ <- Fox.successful(()) + info = DataSourceRegistrationInfo(dataSource, folderId) + response <- rpc( + s"$webknossosUri/api/datastores/$dataStoreName/datasources/${dataSourceId.organizationId}/${dataSourceId.directoryName}") + .addQueryString("key" -> dataStoreKey) + .withTokenFromContext + .postJson[DataSourceRegistrationInfo](info) + datasetId = response.body + } yield datasetId + def deleteDataSource(id: DataSourceId): Fox[_] = rpc(s"$webknossosUri/api/datastores/$dataStoreName/deleteDataset") .addQueryString("key" -> dataStoreKey) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala index f7df183498..74b9d55439 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DataSourceService.scala @@ -266,6 +266,11 @@ class DataSourceService @Inject()( } } + def dataSourceShouldBeStoredOnDisk(dataSource: DataSource): Boolean = + // If all mags have a "path" defined, data is not stored in the datasource (but at the location of the path) + // and we do not need to store it on disk. + !dataSource.dataLayers.forall(layer => layer.mags.forall(mag => mag.path.isDefined)) + def updateDataSource(dataSource: DataSource, expectExisting: Boolean): Fox[Unit] = { val organizationDir = dataBaseDir.resolve(dataSource.id.organizationId) val dataSourcePath = organizationDir.resolve(dataSource.id.directoryName) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index d822ab2f7e..6f20772791 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -15,6 +15,7 @@ import com.scalableminds.webknossos.datastore.models.datasource.{ CoordinateTransformation, DataFormat, DataLayer, + DataLayerLike, DataSourceId, DatasetLayerAttachments, ElementClass, @@ -111,4 +112,6 @@ case class EditableMappingLayer(name: String, // set to tracing id def version: Long = tracing.version def tracingId: String = name + + override def asAbstractLayer: DataLayerLike = ??? } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 67a4c8528e..5d3c8b543d 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -134,4 +134,6 @@ case class VolumeTracingLayer( lazy val expectedUncompressedBucketSize: Int = ElementClass.bytesPerElement(elementClass) * scala.math.pow(DataLayer.bucketLength, 3).intValue + + override def asAbstractLayer: DataLayerLike = ??? } From 42101a95a052bf106d151f4611959de680a49a50 Mon Sep 17 00:00:00 2001 From: frcroth Date: Mon, 23 Jun 2025 16:59:25 +0200 Subject: [PATCH 2/5] Do not have virtual remote datasets deleted --- app/models/dataset/DataStore.scala | 3 ++- app/models/dataset/DatasetService.scala | 7 +++---- app/models/dataset/WKRemoteDataStoreClient.scala | 2 -- .../dataset/explore/WKExploreRemoteLayerService.scala | 3 +-- 4 files changed, 6 insertions(+), 9 deletions(-) diff --git a/app/models/dataset/DataStore.scala b/app/models/dataset/DataStore.scala index 6c5ddc6e69..1672b253ce 100644 --- a/app/models/dataset/DataStore.scala +++ b/app/models/dataset/DataStore.scala @@ -79,7 +79,8 @@ class DataStoreService @Inject()(dataStoreDAO: DataStoreDAO, jobService: JobServ def validateAccess(name: String, key: String)(block: DataStore => Future[Result])( implicit m: MessagesProvider): Fox[Result] = - Fox.fromFuture((for {dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) + Fox.fromFuture((for { + dataStore <- dataStoreDAO.findOneByName(name)(GlobalAccessContext) _ <- Fox.fromBool(key == dataStore.key) result <- Fox.fromFuture(block(dataStore)) } yield result).getOrElse(Forbidden(Json.obj("granted" -> false, "msg" -> Messages("dataStore.notFound"))))) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 5a7de91f2e..26590d2a57 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -37,7 +37,6 @@ import models.team._ import models.user.{User, UserService} import net.liftweb.common.Box.tryo import net.liftweb.common.{Empty, EmptyBox, Full} -import play.api.i18n.Messages import play.api.libs.json.{JsObject, Json} import security.RandomIDGenerator import utils.WkConf @@ -65,7 +64,9 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, with LazyLogging { private val unreportedStatus = datasetDAO.unreportedStatus private val notYetUploadedStatus = "Not yet fully uploaded." - private val inactiveStatusList = List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus) + private val virtualRemoteDatasetStatus = "Virtual remote dataset" // Virtual datasets should not be deleted when not reported + private val inactiveStatusList = + List(unreportedStatus, notYetUploadedStatus, datasetDAO.deletedByUserStatus, virtualRemoteDatasetStatus) def assertValidDatasetName(name: String): Fox[Unit] = for { @@ -99,8 +100,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, } yield newDataset } - private def virtualRemoteDatasetStatus = "Virtual remote dataset" - def createVirtualDataset(datasetName: String, organizationId: String, dataStore: DataStore, diff --git a/app/models/dataset/WKRemoteDataStoreClient.scala b/app/models/dataset/WKRemoteDataStoreClient.scala index 47c0b8c1a7..abecc28931 100644 --- a/app/models/dataset/WKRemoteDataStoreClient.scala +++ b/app/models/dataset/WKRemoteDataStoreClient.scala @@ -9,14 +9,12 @@ import com.scalableminds.webknossos.datastore.explore.{ ExploreRemoteLayerParameters } import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, RawCuboidRequest} -import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, GenericDataSource} import com.scalableminds.webknossos.datastore.rpc.RPC import com.scalableminds.webknossos.datastore.services.DirectoryStorageReport import com.typesafe.scalalogging.LazyLogging import controllers.RpcTokenHolder import play.api.libs.json.JsObject import play.utils.UriEncoding -import com.scalableminds.util.objectid.ObjectId import scala.concurrent.ExecutionContext import scala.concurrent.duration.DurationInt diff --git a/app/models/dataset/explore/WKExploreRemoteLayerService.scala b/app/models/dataset/explore/WKExploreRemoteLayerService.scala index 2072ad1f47..e4e186fd97 100644 --- a/app/models/dataset/explore/WKExploreRemoteLayerService.scala +++ b/app/models/dataset/explore/WKExploreRemoteLayerService.scala @@ -121,6 +121,5 @@ class WKExploreRemoteLayerService @Inject()(credentialService: CredentialService folderId.map(_.toString), user ) - } yield datasetId - + } yield () } From 18dfe9837742d4481adcbd906dff1dcffa4fc003 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 10:27:38 +0200 Subject: [PATCH 3/5] Put mag in db --- app/models/dataset/Dataset.scala | 4 ++-- app/models/dataset/DatasetService.scala | 1 - 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index ece9e98edd..54bdb2edea 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -762,8 +762,8 @@ class DatasetMagsDAO @Inject()(sqlClient: SqlClient)(implicit ec: ExecutionConte layer.magsOpt match { case Some(mags) => mags.map(mag => { - q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, axisOrder, channelIndex, credentialId) - VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.axisOrder + q"""INSERT INTO webknossos.dataset_mags(_dataset, dataLayerName, mag, path, axisOrder, channelIndex, credentialId) + VALUES($datasetId, ${layer.name}, ${mag.mag}, ${mag.path}, ${mag.axisOrder .map(Json.toJson(_))}, ${mag.channelIndex}, ${mag.credentialId}) """.asUpdate }) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 26590d2a57..496f94de15 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -51,7 +51,6 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, datasetLastUsedTimesDAO: DatasetLastUsedTimesDAO, datasetDataLayerDAO: DatasetLayerDAO, datasetMagsDAO: DatasetMagsDAO, - datasetLayerAttachmentsDAO: DatasetLayerAttachmentsDAO, teamDAO: TeamDAO, folderDAO: FolderDAO, dataStoreService: DataStoreService, From 9c3cf7416493055494a991eaa6dececcf87dc7a9 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 10:28:50 +0200 Subject: [PATCH 4/5] Add temporary front end for testing virtual datasets --- .../viewer/model/bucket_data_handling/wkstore_adapter.ts | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts index d484c9b2e9..c4a729e9b0 100644 --- a/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/viewer/model/bucket_data_handling/wkstore_adapter.ts @@ -97,15 +97,17 @@ export async function requestWithFallback( batch: Array, ): Promise | null | undefined>> { const state = Store.getState(); + const datasetId = state.dataset.id; const datasetDirectoryName = state.dataset.directoryName; const organization = state.dataset.owningOrganization; const dataStoreHost = state.dataset.dataStore.url; const tracingStoreHost = state.annotation.tracingStore.url; + // Prefer datasetId (id) if available, otherwise fall back to old method const getDataStoreUrl = (optLayerName?: string) => - `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${ - optLayerName || layerInfo.name - }`; + datasetId + ? `${dataStoreHost}/data/wkDatasets/${datasetId}/layers/${optLayerName || layerInfo.name}` + : `${dataStoreHost}/data/datasets/${organization}/${datasetDirectoryName}/layers/${optLayerName || layerInfo.name}`; const getTracingStoreUrl = () => `${tracingStoreHost}/tracings/volume/${layerInfo.name}`; From 391227a2f057fe6b526c3f6fd22837276c49ea64 Mon Sep 17 00:00:00 2001 From: frcroth Date: Wed, 25 Jun 2025 12:05:28 +0200 Subject: [PATCH 5/5] Use mags for WKW datasets --- app/models/dataset/DatasetService.scala | 31 ++++++- .../dataformats/layers/WKWDataLayers.scala | 88 +++++++++++++++++-- .../dataformats/layers/ZarrDataLayers.scala | 2 +- 3 files changed, 108 insertions(+), 13 deletions(-) diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index 496f94de15..4ea101653f 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -9,6 +9,8 @@ import com.scalableminds.webknossos.datastore.dataformats.layers.{ N5SegmentationLayer, PrecomputedDataLayer, PrecomputedSegmentationLayer, + WKWDataLayer, + WKWSegmentationLayer, Zarr3DataLayer, Zarr3SegmentationLayer, ZarrDataLayer, @@ -360,8 +362,18 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWDataLayer( + name, + category, + boundingBox, + mags, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedDataLayer( name, @@ -442,8 +454,19 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, case Some(df) => df match { case DataFormat.wkw => - throw new NotImplementedError( - "WKW data format not supported in this context, only datasets with MagLocators are supported") + WKWSegmentationLayer( + name, + boundingBox, + mags, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachmentsOpt + ) case DataFormat.neuroglancerPrecomputed => PrecomputedSegmentationLayer( name, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala index 96c3f7e181..991b96765e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/WKWDataLayers.scala @@ -6,7 +6,7 @@ import com.scalableminds.webknossos.datastore.dataformats.{BucketProvider, Datas import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource._ import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService -import play.api.libs.json.{Json, OFormat} +import play.api.libs.json.{Format, JsError, JsResult, JsSuccess, JsValue, Json, OFormat} import ucar.ma2.{Array => MultiArray} case class WKWResolution(resolution: Vec3Int, cubeLength: Int) @@ -26,12 +26,12 @@ trait WKWLayer extends DataLayer { def wkwResolutions: List[WKWResolution] - def mags: List[MagLocator] = wkwResolutions.map(wkwResolution => MagLocator(wkwResolution.resolution)) - def resolutions: List[Vec3Int] = wkwResolutions.map(_.resolution) + def defaultCubeSize = 32 + def lengthOfUnderlyingCubes(mag: Vec3Int): Int = - wkwResolutions.find(_.resolution == mag).map(_.cubeLength).getOrElse(0) + wkwResolutions.find(_.resolution == mag).map(_ => defaultCubeSize).getOrElse(0) } @@ -39,7 +39,7 @@ case class WKWDataLayer( name: String, category: Category.Value, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, defaultViewConfiguration: Option[LayerViewConfiguration] = None, adminViewConfiguration: Option[LayerViewConfiguration] = None, @@ -63,16 +63,51 @@ case class WKWDataLayer( None, Some(dataFormat) ) + + override def wkwResolutions: List[WKWResolution] = mags.map(mag => WKWResolution(mag.mag, defaultCubeSize)) } object WKWDataLayer { - implicit val jsonFormat: OFormat[WKWDataLayer] = Json.format[WKWDataLayer] + implicit val jsonFormat: Format[WKWDataLayer] = new Format[WKWDataLayer] { + def reads(json: JsValue): JsResult[WKWDataLayer] = + for { + mag: List[MagLocator] <- (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(resolution => MagLocator(resolution.resolution))) + case JsError(_) => (json \ "mags").validate[List[MagLocator]] + } + name <- (json \ "name").validate[String] + category <- (json \ "category").validate[Category.Value] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWDataLayer( + name, + category, + boundingBox, + mag, + elementClass, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWDataLayer): JsValue = + Json.writes[WKWDataLayer].writes(layer) + } } case class WKWSegmentationLayer( name: String, boundingBox: BoundingBox, - wkwResolutions: List[WKWResolution], + mags: List[MagLocator], elementClass: ElementClass.Value, mappings: Option[Set[String]], largestSegmentId: Option[Long] = None, @@ -101,8 +136,45 @@ case class WKWSegmentationLayer( None, Some(dataFormat) ) + + override def wkwResolutions: List[WKWResolution] = mags.map(mag => WKWResolution(mag.mag, defaultCubeSize)) } object WKWSegmentationLayer { - implicit val jsonFormat: OFormat[WKWSegmentationLayer] = Json.format[WKWSegmentationLayer] + implicit val jsonFormat: Format[WKWSegmentationLayer] = new Format[WKWSegmentationLayer] { + def reads(json: JsValue): JsResult[WKWSegmentationLayer] = + for { + mag: List[MagLocator] <- (json \ "wkwResolutions").validate[List[WKWResolution]] match { + case JsSuccess(value, _) => JsSuccess(value.map(resolution => MagLocator(resolution.resolution))) + case JsError(_) => (json \ "mags").validate[List[MagLocator]] + } + name <- (json \ "name").validate[String] + boundingBox <- (json \ "boundingBox").validate[BoundingBox] + elementClass <- (json \ "elementClass").validate[ElementClass.Value] + largestSegmentId <- (json \ "largestSegmentId").validateOpt[Long] + mappings <- (json \ "mappings").validateOpt[Set[String]] + defaultViewConfiguration <- (json \ "defaultViewConfiguration").validateOpt[LayerViewConfiguration] + adminViewConfiguration <- (json \ "adminViewConfiguration").validateOpt[LayerViewConfiguration] + coordinateTransformations <- (json \ "coordinateTransformations").validateOpt[List[CoordinateTransformation]] + additionalAxes <- (json \ "additionalAxes").validateOpt[Seq[AdditionalAxis]] + attachments <- (json \ "attachments").validateOpt[DatasetLayerAttachments] + } yield { + WKWSegmentationLayer( + name, + boundingBox, + mag, + elementClass, + mappings, + largestSegmentId, + defaultViewConfiguration, + adminViewConfiguration, + coordinateTransformations, + additionalAxes, + attachments + ) + } + + def writes(layer: WKWSegmentationLayer): JsValue = + Json.writes[WKWSegmentationLayer].writes(layer) + } } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala index 34244ca92b..b79ef3d1c6 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/layers/ZarrDataLayers.scala @@ -3,7 +3,7 @@ package com.scalableminds.webknossos.datastore.dataformats.layers import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.webknossos.datastore.dataformats.{DatasetArrayBucketProvider, MagLocator} -import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.{LayerViewConfiguration, empty} +import com.scalableminds.webknossos.datastore.models.datasource.LayerViewConfiguration.LayerViewConfiguration import com.scalableminds.webknossos.datastore.models.datasource.{DataFormat, _} import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import play.api.libs.json.{Json, OFormat}