Skip to content

In Data Loading, Clip to Layer BoundingBox (Redo #8551) #8573

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions CHANGELOG.unreleased.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released
- Adjusted the names of custom model inference jobs and train model jobs to match the worker's naming. [#8524](https://github.com/scalableminds/webknossos/pull/8524)
- Updated screenshot tests to use `vitest` framework instead of `ava`. [#8553](https://github.com/scalableminds/webknossos/pull/8553)
- The mapping dropdown for segmentation is wider now so that mapping names are fully readable. [#8570](https://github.com/scalableminds/webknossos/pull/8570)
- When loading data from a data layer that has data stored beyond the bounding box specified in the datasource-properties.json, data outside of the bounding box is now zeroed. (the layer is “clipped”). [#8551](https://github.com/scalableminds/webknossos/pull/8551)

### Fixed
- Fixed a bug in the trees tab where the color change of a tree would affect the tree on which the context menu was previously opened. [#8562](https://github.com/scalableminds/webknossos/pull/8562)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ import play.api.libs.json.{JsObject, Json}

case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) {

val bottomRight: Vec3Int = topLeft.move(width, height, depth)
lazy val bottomRight: Vec3Int = topLeft.move(width, height, depth)

def intersects(other: BoundingBox): Boolean =
math.max(topLeft.x, other.topLeft.x) < math.min(bottomRight.x, other.bottomRight.x) &&
Expand Down Expand Up @@ -45,6 +45,9 @@ case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) {
BoundingBox(Vec3Int(x, y, z), w, h, d)
}

def isFullyContainedIn(other: BoundingBox): Boolean =
this.intersection(other).contains(this)

def isEmpty: Boolean =
width <= 0 || height <= 0 || depth <= 0

Expand All @@ -61,6 +64,9 @@ case class BoundingBox(topLeft: Vec3Int, width: Int, height: Int, depth: Int) {
// Since floorDiv is used for topLeft, ceilDiv is used for the size to avoid voxels being lost at the border
BoundingBox(topLeft / that, ceilDiv(width, that.x), ceilDiv(height, that.y), ceilDiv(depth, that.z))

def move(delta: Vec3Int): BoundingBox =
this.copy(topLeft = this.topLeft + delta)

def toSql: List[Int] =
List(topLeft.x, topLeft.y, topLeft.z, width, height, depth)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,9 @@ case class Vec3Int(x: Int, y: Int, z: Int) {
def /(that: Vec3Int): Vec3Int =
Vec3Int(x / that.x, y / that.y, z / that.z)

def unary_- : Vec3Int =
Vec3Int(-x, -y, -z)

def scale(s: Float): Vec3Int =
Vec3Int((x * s).toInt, (y * s).toInt, (z * s).toInt)

Expand Down Expand Up @@ -53,8 +56,6 @@ case class Vec3Int(x: Int, y: Int, z: Int) {
def move(other: Vec3Int): Vec3Int =
move(other.x, other.y, other.z)

def negate: Vec3Int = Vec3Int(-x, -y, -z)

def to(bottomRight: Vec3Int): Seq[Vec3Int] =
range(bottomRight, _ to _)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ class BinaryDataController @Inject()(
(dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
datasetDirectoryName,
dataLayerName) ~> NOT_FOUND
(data, indices) <- requestData(dataSource, dataLayer, request.body)
(data, indices) <- requestData(dataSource.id, dataLayer, request.body)
duration = Instant.since(t)
_ = if (duration > (10 seconds))
logger.info(
Expand Down Expand Up @@ -110,7 +110,7 @@ class BinaryDataController @Inject()(
depth,
DataServiceRequestSettings(halfByte = halfByte, appliedAgglomerate = mappingName)
)
(data, indices) <- requestData(dataSource, dataLayer, dataRequest)
(data, indices) <- requestData(dataSource.id, dataLayer, dataRequest)
} yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*)
}
}
Expand All @@ -126,7 +126,7 @@ class BinaryDataController @Inject()(
(dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
datasetDirectoryName,
dataLayerName) ~> NOT_FOUND
(data, indices) <- requestData(dataSource, dataLayer, request.body)
(data, indices) <- requestData(dataSource.id, dataLayer, request.body)
} yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*)
}
}
Expand Down Expand Up @@ -154,7 +154,7 @@ class BinaryDataController @Inject()(
cubeSize,
cubeSize
)
(data, indices) <- requestData(dataSource, dataLayer, dataRequest)
(data, indices) <- requestData(dataSource.id, dataLayer, dataRequest)
} yield Ok(data).withHeaders(createMissingBucketsHeaders(indices): _*)
}
}
Expand Down Expand Up @@ -188,7 +188,7 @@ class BinaryDataController @Inject()(
depth = 1,
DataServiceRequestSettings(appliedAgglomerate = mappingName)
)
(data, _) <- requestData(dataSource, dataLayer, dataRequest)
(data, _) <- requestData(dataSource.id, dataLayer, dataRequest)
intensityRange: Option[(Double, Double)] = intensityMin.flatMap(min => intensityMax.map(max => (min, max)))
layerColor = color.flatMap(Color.fromHTML)
params = ImageCreatorParameters(
Expand Down Expand Up @@ -227,7 +227,7 @@ class BinaryDataController @Inject()(
datasetDirectoryName,
dataLayerName) ~> NOT_FOUND
segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> Messages("dataLayer.notFound")
mappingRequest = DataServiceMappingRequest(dataSource, segmentationLayer, mappingName)
mappingRequest = DataServiceMappingRequest(Some(dataSource.id), segmentationLayer, mappingName)
result <- mappingService.handleMappingRequest(mappingRequest)
} yield Ok(result)
}
Expand All @@ -248,7 +248,7 @@ class BinaryDataController @Inject()(
dataLayerName) ~> NOT_FOUND
segmentationLayer <- tryo(dataLayer.asInstanceOf[SegmentationLayer]).toFox ?~> "dataLayer.mustBeSegmentation"
adHocMeshRequest = AdHocMeshRequest(
Some(dataSource),
Some(dataSource.id),
segmentationLayer,
request.body.cuboid(dataLayer),
request.body.segmentId,
Expand Down Expand Up @@ -286,7 +286,7 @@ class BinaryDataController @Inject()(
(dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId,
datasetDirectoryName,
dataLayerName) ~> NOT_FOUND
positionAndMagOpt <- findDataService.findPositionWithData(dataSource, dataLayer)
positionAndMagOpt <- findDataService.findPositionWithData(dataSource.id, dataLayer)
} yield Ok(Json.obj("position" -> positionAndMagOpt.map(_._1), "mag" -> positionAndMagOpt.map(_._2)))
}
}
Expand All @@ -300,19 +300,19 @@ class BinaryDataController @Inject()(
datasetDirectoryName,
dataLayerName) ?~> Messages(
"dataSource.notFound") ~> NOT_FOUND ?~> Messages("histogram.layerMissing", dataLayerName)
listOfHistograms <- findDataService.createHistogram(dataSource, dataLayer) ?~> Messages("histogram.failed",
dataLayerName)
listOfHistograms <- findDataService.createHistogram(dataSource.id, dataLayer) ?~> Messages("histogram.failed",
dataLayerName)
} yield Ok(Json.toJson(listOfHistograms))
}
}

private def requestData(
dataSource: DataSource,
dataSourceId: DataSourceId,
dataLayer: DataLayer,
dataRequests: DataRequestCollection
)(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = {
val requests =
dataRequests.map(r => DataServiceDataRequest(dataSource, dataLayer, r.cuboid(dataLayer), r.settings))
dataRequests.map(r => DataServiceDataRequest(Some(dataSourceId), dataLayer, r.cuboid(dataLayer), r.settings))
binaryDataService.handleDataRequests(requests)
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ class ZarrStreamingController @Inject()(
_ <- Fox.fromBool(dataLayer.containsMag(magParsed)) ?~> Messages("dataLayer.wrongMag", dataLayerName, mag) ~> NOT_FOUND
cubeSize = DataLayer.bucketLength
request = DataServiceDataRequest(
dataSource,
Some(dataSource.id),
dataLayer,
Cuboid(
topLeft = VoxelPosition(x * cubeSize * magParsed.x,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ class DatasetArrayBucketProvider(dataLayer: DataLayer,
case Some(remoteSourceDescriptorService: RemoteSourceDescriptorService) =>
for {
magPath: VaultPath <- remoteSourceDescriptorService.vaultPathFor(readInstruction.baseDir,
readInstruction.dataSource.id,
readInstruction.dataSourceId,
readInstruction.dataLayer.name,
magLocator)
chunkContentsCache <- sharedChunkContentsCacheOpt.toFox
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,8 @@ class MappingProvider(layer: SegmentationLayer) {

def load(readInstruction: MappingReadInstruction): Box[Array[Byte]] = {
val mappingFile = readInstruction.baseDir
.resolve(readInstruction.dataSource.id.organizationId)
.resolve(readInstruction.dataSource.id.directoryName)
.resolve(readInstruction.dataSourceId.organizationId)
.resolve(readInstruction.dataSourceId.directoryName)
.resolve(layer.name)
.resolve(MappingProvider.mappingsDir)
.resolve(s"${readInstruction.mapping}.${MappingProvider.mappingFileExtension}")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import play.api.libs.json._
package object datasource {

case class DataSourceId(directoryName: String, organizationId: String) {
override def toString: String = s"DataSourceId($organizationId/$directoryName)"
override def toString: String = s"$organizationId/$directoryName"
}

object DataSourceId {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
package com.scalableminds.webknossos.datastore.models.requests

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.geometry.{BoundingBox, Vec3Int}
import com.scalableminds.webknossos.datastore.models.{BucketPosition, VoxelPosition}

/**
Expand Down Expand Up @@ -51,4 +51,10 @@ case class Cuboid(topLeft: VoxelPosition, width: Int, height: Int, depth: Int) {
height * mag.y,
depth * mag.z
)

def toBoundingBoxInMag: BoundingBox =
BoundingBox(Vec3Int(topLeft.voxelXInMag, topLeft.voxelYInMag, topLeft.voxelZInMag), width, height, depth)

def toMag1BoundingBox: BoundingBox =
toMag1.toBoundingBoxInMag
}
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
package com.scalableminds.webknossos.datastore.models.requests

import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.webknossos.datastore.models.{AdditionalCoordinate, BucketPosition}
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, SegmentationLayer}
import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSourceId, SegmentationLayer}

import java.nio.file.Path

Expand All @@ -15,32 +16,41 @@ object DataServiceRequestSettings {
}

case class DataServiceDataRequest(
dataSource: DataSource, // null in VolumeTracings
dataSourceId: Option[DataSourceId], // None in case of volume tracings
dataLayer: DataLayer,
cuboid: Cuboid,
settings: DataServiceRequestSettings
) {
def isSingleBucket: Boolean = cuboid.isSingleBucket(DataLayer.bucketLength)
def mag: Vec3Int = cuboid.mag

// dataSource is None and unused for volume tracings. Insert dummy DataSourceId
// (also unused in that case, except for logging and bucket provider cache key)
def dataSourceIdOrVolumeDummy: DataSourceId = dataSourceId.getOrElse(DataSourceId("VolumeTracing", dataLayer.name))
}

case class DataReadInstruction(
baseDir: Path,
dataSource: DataSource,
dataSourceId: DataSourceId, // Dummy value in case of volume tracings
dataLayer: DataLayer,
bucket: BucketPosition,
version: Option[Long] = None
) {
def layerSummary: String = f"${dataSource.id.organizationId}/${dataSource.id.directoryName}/${dataLayer.name}"
def layerSummary: String = f"$dataSourceId/${dataLayer.name}"
}

case class DataServiceMappingRequest(
dataSource: DataSource,
dataSourceId: Option[DataSourceId], // None in case of volume tracings
dataLayer: SegmentationLayer,
mapping: String
)
) {
// dataSource is None and unused for volume tracings. Insert dummy DataSourceId
// (also unused in that case, except for logging and bucket provider cache key)
def dataSourceIdOrVolumeDummy: DataSourceId = dataSourceId.getOrElse(DataSourceId("VolumeTracing", dataLayer.name))
}

case class MappingReadInstruction(
baseDir: Path,
dataSource: DataSource,
dataSourceId: DataSourceId, // Dummy value in case of volume tracings
mapping: String
)
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ import org.apache.pekko.util.Timeout
import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int}
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate
import com.scalableminds.webknossos.datastore.models.datasource.{DataSource, ElementClass, SegmentationLayer}
import com.scalableminds.webknossos.datastore.models.datasource.{DataSourceId, ElementClass, SegmentationLayer}
import com.scalableminds.webknossos.datastore.models.requests.{
Cuboid,
DataServiceDataRequest,
Expand All @@ -26,7 +26,7 @@ import scala.concurrent.duration._
import scala.concurrent.{Await, ExecutionContext}
import scala.reflect.ClassTag

case class AdHocMeshRequest(dataSource: Option[DataSource],
case class AdHocMeshRequest(dataSourceId: Option[DataSourceId],
dataLayer: SegmentationLayer,
cuboid: Cuboid,
segmentId: Long,
Expand Down Expand Up @@ -102,7 +102,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService,
request.mappingType match {
case Some("JSON") =>
mappingService.applyMapping(
DataServiceMappingRequest(request.dataSource.orNull, request.dataLayer, mappingName),
DataServiceMappingRequest(request.dataSourceId, request.dataLayer, mappingName),
data,
dataTypeFunctors.fromLong)
case _ => Fox.successful(data)
Expand All @@ -118,7 +118,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService,
case Some("HDF5") =>
binaryDataService.agglomerateServiceOpt.map { agglomerateService =>
val dataRequest = DataServiceDataRequest(
request.dataSource.orNull,
request.dataSourceId,
request.dataLayer,
request.cuboid,
DataServiceRequestSettings(halfByte = false, request.mapping, None)
Expand Down Expand Up @@ -177,7 +177,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService,
val cuboid = request.cuboid

val dataRequest = DataServiceDataRequest(
request.dataSource.orNull,
request.dataSourceId,
request.dataLayer,
cuboid,
DataServiceRequestSettings.default.copy(additionalCoordinates = request.additionalCoordinates)
Expand Down
Loading