diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 271b85a03a2..e3b6267980b 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -18,6 +18,7 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Measurement tools are now accessible when viewing datasets outside of an annotation. [#8334](https://github.com/scalableminds/webknossos/pull/8334) ### Changed +- When using a zarr link to a data layer as another layers source, the user's token is used to access the data. [#8322](https://github.com/scalableminds/webknossos/pull/8322/) ### Fixed - Fixed a bug that lead to trees being dropped when merging to trees together. [#8359](https://github.com/scalableminds/webknossos/pull/8359) diff --git a/test/backend/DataVaultTestSuite.scala b/test/backend/DataVaultTestSuite.scala index a8f3ad86053..f8fdc2b879c 100644 --- a/test/backend/DataVaultTestSuite.scala +++ b/test/backend/DataVaultTestSuite.scala @@ -1,5 +1,6 @@ package backend +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import org.scalatestplus.play.PlaySpec @@ -26,6 +27,7 @@ import scala.concurrent.ExecutionContext.{global => globalExecutionContext} class DataVaultTestSuite extends PlaySpec { val handleFoxJustification = "Handling Fox in Unit Test Context" + val tokenContext = TokenContext(None) "Data vault" when { "using Range requests" when { @@ -36,10 +38,11 @@ class DataVaultTestSuite extends PlaySpec { "return correct response" in { WsTestClient.withClient { ws => val uri = new URI("http://storage.googleapis.com/") - val vaultPath = new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None), ws)) + val vaultPath = + new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None), ws, "example.com")) val bytes = (vaultPath / s"neuroglancer-fafb-data/fafb_v14/fafb_v14_orig/$dataKey") - .readBytes(Some(range))(globalExecutionContext) + .readBytes(Some(range))(globalExecutionContext, tokenContext) .get(handleFoxJustification) assert(bytes.length == range.length) @@ -53,7 +56,9 @@ class DataVaultTestSuite extends PlaySpec { val vaultPath = new VaultPath(uri, GoogleCloudDataVault.create(RemoteSourceDescriptor(uri, None))) "return correct response" in { - val bytes = (vaultPath / dataKey).readBytes(Some(range))(globalExecutionContext).get(handleFoxJustification) + val bytes = (vaultPath / dataKey) + .readBytes(Some(range))(globalExecutionContext, tokenContext) + .get(handleFoxJustification) assert(bytes.length == range.length) assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70))) @@ -63,7 +68,7 @@ class DataVaultTestSuite extends PlaySpec { "requesting a non-existent object" in { val result = (vaultPath / s"non-existent-key${UUID.randomUUID}") - .readBytes()(globalExecutionContext) + .readBytes()(globalExecutionContext, tokenContext) .await(handleFoxJustification) assertBoxEmpty(result) } @@ -71,7 +76,7 @@ class DataVaultTestSuite extends PlaySpec { "return failure" when { "requesting invalid range" in { val result = (vaultPath / dataKey) - .readBytes(Some(Range.Long(-5, -10, 1)))(globalExecutionContext) + .readBytes(Some(Range.Long(-5, -10, 1)))(globalExecutionContext, tokenContext) .await(handleFoxJustification) assertBoxFailure(result) } @@ -83,7 +88,7 @@ class DataVaultTestSuite extends PlaySpec { uri, Some(GoogleServiceAccountCredential("name", JsString("secret"), "user", "org"))))) val result = (vaultPath / dataKey) - .readBytes(Some(Range.Long(-10, 10, 1)))(globalExecutionContext) + .readBytes(Some(Range.Long(-10, 10, 1)))(globalExecutionContext, tokenContext) .await(handleFoxJustification) assertBoxFailure(result) } @@ -97,7 +102,9 @@ class DataVaultTestSuite extends PlaySpec { val vaultPath = new VaultPath(uri, S3DataVault.create(RemoteSourceDescriptor(uri, None), ws)(globalExecutionContext)) val bytes = - (vaultPath / "s0/5/5/5").readBytes(Some(range))(globalExecutionContext).get(handleFoxJustification) + (vaultPath / "s0/5/5/5") + .readBytes(Some(range))(globalExecutionContext, tokenContext) + .get(handleFoxJustification) assert(bytes.length == range.length) assert(bytes.take(10).sameElements(Array(0, 0, 0, 3, 0, 0, 0, 64, 0, 0))) } @@ -113,9 +120,10 @@ class DataVaultTestSuite extends PlaySpec { "return correct response" in { WsTestClient.withClient { ws => val uri = new URI("http://storage.googleapis.com/") - val vaultPath = new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None), ws)) + val vaultPath = + new VaultPath(uri, HttpsDataVault.create(RemoteSourceDescriptor(uri, None), ws, "example.com")) val bytes = (vaultPath / s"neuroglancer-fafb-data/fafb_v14/fafb_v14_orig/$dataKey") - .readBytes()(globalExecutionContext) + .readBytes()(globalExecutionContext, tokenContext) .get(handleFoxJustification) assert(bytes.length == dataLength) @@ -128,7 +136,8 @@ class DataVaultTestSuite extends PlaySpec { "return correct response" in { val uri = new URI("gs://neuroglancer-fafb-data/fafb_v14/fafb_v14_orig") val vaultPath = new VaultPath(uri, GoogleCloudDataVault.create(RemoteSourceDescriptor(uri, None))) - val bytes = (vaultPath / dataKey).readBytes()(globalExecutionContext).get(handleFoxJustification) + val bytes = + (vaultPath / dataKey).readBytes()(globalExecutionContext, tokenContext).get(handleFoxJustification) assert(bytes.length == dataLength) assert(bytes.take(10).sameElements(Array(-1, -40, -1, -32, 0, 16, 74, 70, 73, 70))) @@ -143,7 +152,7 @@ class DataVaultTestSuite extends PlaySpec { new VaultPath(uri, S3DataVault.create(RemoteSourceDescriptor(uri, None), ws)(globalExecutionContext)) val bytes = (vaultPath / "33792-34304_29696-30208_3216-3232") - .readBytes()(globalExecutionContext) + .readBytes()(globalExecutionContext, tokenContext) .get(handleFoxJustification) assert(bytes.take(10).sameElements(Array(-87, -95, -85, -94, -101, 124, 115, 100, 113, 111))) } @@ -155,7 +164,7 @@ class DataVaultTestSuite extends PlaySpec { WsTestClient.withClient { ws => val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None), ws)(globalExecutionContext) val vaultPath = new VaultPath(uri, s3DataVault) - val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) + val result = vaultPath.readBytes()(globalExecutionContext, tokenContext).await(handleFoxJustification) assertBoxEmpty(result) } } @@ -167,7 +176,7 @@ class DataVaultTestSuite extends PlaySpec { WsTestClient.withClient { ws => val s3DataVault = S3DataVault.create(RemoteSourceDescriptor(uri, None), ws)(globalExecutionContext) val vaultPath = new VaultPath(uri, s3DataVault) - val result = vaultPath.readBytes()(globalExecutionContext).await(handleFoxJustification) + val result = vaultPath.readBytes()(globalExecutionContext, tokenContext).await(handleFoxJustification) assertBoxEmpty(result) } } @@ -207,7 +216,8 @@ class DataVaultTestSuite extends PlaySpec { "using vault path" when { class MockDataVault extends DataVault { override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = ??? + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] = ??? override def listDirectory(path: VaultPath, maxItems: Int)(implicit ec: ExecutionContext): Fox[List[VaultPath]] = ??? diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala index a01d0072e08..665e8be095c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/BinaryDataController.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.controllers import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.image.{Color, JPEGWriter} import com.scalableminds.util.time.Instant @@ -94,7 +95,7 @@ class BinaryDataController @Inject()( // If true, use lossy compression by sending only half-bytes of the data halfByte: Boolean, mappingName: Option[String] - ): Action[AnyContent] = Action.async { implicit request => + ): Action[AnyContent] = Action.async { implicit r => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { @@ -140,7 +141,7 @@ class BinaryDataController @Inject()( x: Int, y: Int, z: Int, - cubeSize: Int): Action[AnyContent] = Action.async { implicit request => + cubeSize: Int): Action[AnyContent] = Action.async { implicit r => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { @@ -171,7 +172,7 @@ class BinaryDataController @Inject()( intensityMin: Option[Double], intensityMax: Option[Double], color: Option[String], - invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit request => + invertColor: Option[Boolean]): Action[RawBuffer] = Action.async(parse.raw) { implicit r => accessTokenService.validateAccessFromTokenContext( UserAccessRequest.readDataSources(DataSourceId(datasetDirectoryName, organizationId))) { for { @@ -252,10 +253,11 @@ class BinaryDataController @Inject()( request.body.cuboid(dataLayer), request.body.segmentId, request.body.voxelSizeFactorInUnit, + tokenContextForRequest(request), request.body.mapping, request.body.mappingType, request.body.additionalCoordinates, - request.body.findNeighbors + request.body.findNeighbors, ) // The client expects the ad-hoc mesh as a flat float-array. Three consecutive floats form a 3D point, three // consecutive 3D points (i.e., nine floats) form a triangle. @@ -308,7 +310,7 @@ class BinaryDataController @Inject()( dataSource: DataSource, dataLayer: DataLayer, dataRequests: DataRequestCollection - ): Fox[(Array[Byte], List[Int])] = { + )(implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { val requests = dataRequests.map(r => DataServiceDataRequest(dataSource, dataLayer, r.cuboid(dataLayer), r.settings)) binaryDataService.handleDataRequests(requests) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala index a7de2562831..f59b2d20f2c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/ZarrStreamingController.scala @@ -261,7 +261,7 @@ class ZarrStreamingController @Inject()( dataLayerName: String, mag: String, coordinates: String, - )(implicit m: MessagesProvider): Fox[Result] = + )(implicit m: MessagesProvider, tc: TokenContext): Fox[Result] = for { (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, datasetDirectoryName, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala index d4a1c2974fd..970b76539bb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/BucketProvider.scala @@ -1,9 +1,11 @@ package com.scalableminds.webknossos.datastore.dataformats +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.models.requests.DataReadInstruction + import scala.concurrent.ExecutionContext trait BucketProvider { - def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] + def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala index 4638466d316..08671a9464a 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/dataformats/DatasetArrayBucketProvider.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.dataformats +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.time.Instant @@ -16,6 +17,7 @@ import com.scalableminds.webknossos.datastore.models.requests.DataReadInstructio import com.scalableminds.webknossos.datastore.storage.RemoteSourceDescriptorService import com.typesafe.scalalogging.LazyLogging import net.liftweb.common.Empty + import scala.concurrent.duration._ import ucar.ma2.{Array => MultiArray} @@ -32,7 +34,7 @@ class DatasetArrayBucketProvider(dataLayer: DataLayer, // Cache the DatasetArrays of all mags of this layer private lazy val datasetArrayCache = AlfuCache[Vec3Int, DatasetArray](maxCapacity = 50) - def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { datasetArray <- datasetArrayCache.getOrLoad(readInstruction.bucket.mag, _ => openDatasetArrayWithTimeLogging(readInstruction)) @@ -45,8 +47,8 @@ class DatasetArrayBucketProvider(dataLayer: DataLayer, dataLayer.elementClass == ElementClass.uint24) } yield bucketData - private def openDatasetArrayWithTimeLogging(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[DatasetArray] = { + private def openDatasetArrayWithTimeLogging( + readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, tc: TokenContext): Fox[DatasetArray] = { val before = Instant.now for { result <- openDatasetArray(readInstruction).futureBox @@ -59,8 +61,8 @@ class DatasetArrayBucketProvider(dataLayer: DataLayer, } yield result } - private def openDatasetArray(readInstruction: DataReadInstruction)( - implicit ec: ExecutionContext): Fox[DatasetArray] = { + private def openDatasetArray(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[DatasetArray] = { val magLocatorOpt: Option[MagLocator] = dataLayer.mags.find(_.mag == readInstruction.bucket.mag) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala index 2a414a01510..0160d345568 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/ChunkReader.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.webknossos.datastore.datavault.VaultPath @@ -18,7 +19,7 @@ class ChunkReader(header: DatasetHeader) { def read(path: VaultPath, chunkShapeFromMetadata: Array[Int], range: Option[NumericRange[Long]], - useSkipTypingShortcut: Boolean)(implicit ec: ExecutionContext): Fox[MultiArray] = + useSkipTypingShortcut: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[MultiArray] = for { chunkBytesAndShapeBox: Box[(Array[Byte], Option[Array[Int]])] <- readChunkBytesAndShape(path, range).futureBox chunkShape: Array[Int] = chunkBytesAndShapeBox.toOption.flatMap(_._2).getOrElse(chunkShapeFromMetadata) @@ -39,7 +40,8 @@ class ChunkReader(header: DatasetHeader) { // Returns bytes (optional, Fox.empty may later be replaced with fill value) // and chunk shape (optional, only for data formats where each chunk reports its own shape, e.g. N5) protected def readChunkBytesAndShape(path: VaultPath, range: Option[NumericRange[Long]])( - implicit ec: ExecutionContext): Fox[(Array[Byte], Option[Array[Int]])] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Option[Array[Int]])] = for { bytes <- path.readBytes(range) decompressed <- tryo(header.compressorImpl.decompress(bytes)).toFox ?~> "chunk.decompress.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala index b04fa57ee80..4c64e13f34b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/DatasetArray.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox @@ -64,10 +65,11 @@ class DatasetArray(vaultPath: VaultPath, chunkShape // irregular shaped chunk indexes are currently not supported for 2d datasets } - def readBytesWithAdditionalCoordinates(shapeXYZ: Vec3Int, - offsetXYZ: Vec3Int, - additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], - shouldReadUint24: Boolean)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + def readBytesWithAdditionalCoordinates( + shapeXYZ: Vec3Int, + offsetXYZ: Vec3Int, + additionalCoordinatesOpt: Option[Seq[AdditionalCoordinate]], + shouldReadUint24: Boolean)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { (shapeArray, offsetArray) <- tryo(constructShapeAndOffsetArrays( shapeXYZ, @@ -114,7 +116,8 @@ class DatasetArray(vaultPath: VaultPath, } // returns byte array in fortran-order with little-endian values - private def readBytes(shape: Array[Int], offset: Array[Int])(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def readBytes(shape: Array[Int], offset: Array[Int])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { typedMultiArray <- readAsFortranOrder(shape, offset) asBytes <- BytesConverter.toByteArray(typedMultiArray, header.resolvedDataType, ByteOrder.LITTLE_ENDIAN) @@ -147,8 +150,8 @@ class DatasetArray(vaultPath: VaultPath, // The local variables like chunkIndices are also in this order unless explicitly named. // Loading data adapts to the array's axis order so that …CXYZ data in fortran-order is // returned, regardless of the array’s internal storage. - private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])( - implicit ec: ExecutionContext): Fox[MultiArray] = { + private def readAsFortranOrder(shape: Array[Int], offset: Array[Int])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[MultiArray] = { val totalOffset: Array[Int] = offset.zip(header.voxelOffset).map { case (o, v) => o - v }.padTo(offset.length, 0) val chunkIndices = ChunkUtils.computeChunkIndices(datasetShape.map(fullAxisOrder.permuteIndicesArrayToWk), fullAxisOrder.permuteIndicesArrayToWk(chunkShape), @@ -185,19 +188,22 @@ class DatasetArray(vaultPath: VaultPath, s"Copying data from dataset chunk failed. Chunk shape (F): ${printAsOuterF(sourceChunk.getShape)}, target shape (F): ${printAsOuterF( target.getShape)}, offsetInChunk: ${printAsOuterF(offsetInChunk)}. Axis order (C): $fullAxisOrder (outer: ${fullAxisOrder.toStringWk})" - protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( - implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = ??? // Defined in subclass + protected def getShardedChunkPathAndRange( + chunkIndex: Array[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[(VaultPath, NumericRange[Long])] = + ??? // Defined in subclass private def chunkContentsCacheKey(chunkIndex: Array[Int]): String = s"${dataSourceId}__${layerName}__${vaultPath}__chunk_${chunkIndex.mkString(",")}" private def getSourceChunkDataWithCache(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean = false)( - implicit ec: ExecutionContext): Fox[MultiArray] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[MultiArray] = sharedChunkContentsCache.getOrLoad(chunkContentsCacheKey(chunkIndex), _ => readSourceChunkData(chunkIndex, useSkipTypingShortcut)) private def readSourceChunkData(chunkIndex: Array[Int], useSkipTypingShortcut: Boolean)( - implicit ec: ExecutionContext): Fox[MultiArray] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[MultiArray] = if (header.isSharded) { for { (shardPath, chunkRange) <- getShardedChunkPathAndRange(chunkIndex) ?~> "chunk.getShardedPathAndRange.failed" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala index 27541d75dd1..7dd6038d8b8 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5Array.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.n5 +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.cache.AlfuCache import com.scalableminds.webknossos.datastore.datareaders.{AxisOrder, ChunkReader, DatasetArray, DatasetHeader} @@ -21,7 +22,8 @@ object N5Array extends LazyLogging { axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[N5Array] = + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[N5Array] = for { headerBytes <- (path / N5Header.FILENAME_ATTRIBUTES_JSON) .readBytes() ?~> s"Could not read header at ${N5Header.FILENAME_ATTRIBUTES_JSON}" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala index 02f9e9cfb36..8133f06bc1d 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/n5/N5ChunkReader.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.n5 +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.webknossos.datastore.datareaders.{ChunkReader, DatasetHeader} @@ -21,7 +22,8 @@ class N5ChunkReader(header: DatasetHeader) extends ChunkReader(header) with Lazy private val dataExtractor: N5DataExtractor = new N5DataExtractor override protected def readChunkBytesAndShape(path: VaultPath, range: Option[NumericRange[Long]])( - implicit ec: ExecutionContext): Fox[(Array[Byte], Option[Array[Int]])] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Option[Array[Int]])] = { def processBytes(bytes: Array[Byte], expectedElementCount: Int): Box[Array[Byte]] = for { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala index f7cc98ef8d1..4558031c0f5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/precomputed/PrecomputedArray.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.precomputed +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, FoxImplicits, JsonHelper} @@ -19,14 +20,14 @@ import net.liftweb.common.Box import ucar.ma2.{Array => MultiArray} object PrecomputedArray extends LazyLogging { - def open( - magPath: VaultPath, - dataSourceId: DataSourceId, - layerName: String, - axisOrderOpt: Option[AxisOrder], - channelIndex: Option[Int], - additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[PrecomputedArray] = + def open(magPath: VaultPath, + dataSourceId: DataSourceId, + layerName: String, + axisOrderOpt: Option[AxisOrder], + channelIndex: Option[Int], + additionalAxes: Option[Seq[AdditionalAxis]], + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[PrecomputedArray] = for { headerBytes <- (magPath.parent / PrecomputedHeader.FILENAME_INFO) .readBytes() ?~> s"Could not read header at ${PrecomputedHeader.FILENAME_INFO}" @@ -136,10 +137,10 @@ class PrecomputedArray(vaultPath: VaultPath, case _ => bytes } - private def getShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def getShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = shardIndexCache.getOrLoad(shardPath, readShardIndex) - private def readShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + private def readShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = shardPath.readBytes(Some(shardIndexRange)) private def parseShardIndex(index: Array[Byte]): Seq[(Long, Long)] = @@ -227,11 +228,13 @@ class PrecomputedArray(vaultPath: VaultPath, } private def getMinishardIndex(shardPath: VaultPath, minishardNumber: Int)( - implicit ec: ExecutionContext): Fox[Array[(Long, Long, Long)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[(Long, Long, Long)]] = minishardIndexCache.getOrLoad((shardPath, minishardNumber), readMinishardIndex) private def readMinishardIndex(vaultPathAndMinishardNumber: (VaultPath, Int))( - implicit ec: ExecutionContext): Fox[Array[(Long, Long, Long)]] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[(Long, Long, Long)]] = { val (vaultPath, minishardNumber) = vaultPathAndMinishardNumber for { index <- getShardIndex(vaultPath) @@ -250,8 +253,8 @@ class PrecomputedArray(vaultPath: VaultPath, chunkEnd = (shardIndexRange.end) + chunkSpecification._2 + chunkSpecification._3 } yield Range.Long(chunkStart, chunkEnd, 1) - override def getShardedChunkPathAndRange(chunkIndex: Array[Int])( - implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = { + override def getShardedChunkPathAndRange( + chunkIndex: Array[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[(VaultPath, NumericRange[Long])] = { val chunkIdentifier = getHashForChunk(chunkIndex) val minishardInfo = getMinishardInfo(chunkIdentifier) val shardPath = getPathForShard(minishardInfo._1) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala index c29e42d399f..43716f4b2e3 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/wkw/WKWArray.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.datareaders.wkw import com.google.common.io.LittleEndianDataInputStream +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.Fox @@ -21,7 +22,8 @@ object WKWArray extends WKWDataFormatHelper { def open(path: VaultPath, dataSourceId: DataSourceId, layerName: String, - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[WKWArray] = + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[WKWArray] = for { headerBytes <- (path / FILENAME_HEADER_WKW).readBytes() ?~> s"Could not read header at ${FILENAME_HEADER_WKW}" dataInputStream = new LittleEndianDataInputStream(new ByteArrayInputStream(headerBytes)) @@ -58,8 +60,8 @@ class WKWArray(vaultPath: VaultPath, private val parsedShardIndexCache: AlfuCache[VaultPath, Array[Long]] = AlfuCache() - override protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( - implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = + override protected def getShardedChunkPathAndRange( + chunkIndex: Array[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[(VaultPath, NumericRange[Long])] = for { shardCoordinates <- Fox.option2Fox(chunkIndexToShardIndex(chunkIndex).headOption) shardFilename = getChunkFilename(shardCoordinates) @@ -76,7 +78,8 @@ class WKWArray(vaultPath: VaultPath, else shardIndex(0) + header.numBytesPerChunk.toLong * chunkIndexInShardIndex.toLong - private def readAndParseShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[Long]] = { + private def readAndParseShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Long]] = { val skipBytes = 8 // First 8 bytes of header are other metadata val bytesPerShardIndexEntry = 8 val numEntriesToRead = if (header.isCompressed) 1 + header.numChunksPerShard else 1 diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala index dbc1b5af04d..752643d1c0e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr/ZarrArray.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.zarr +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.cache.AlfuCache @@ -20,7 +21,8 @@ object ZarrArray extends LazyLogging { axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[ZarrArray] = + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[ZarrArray] = for { headerBytes <- (path / ZarrHeader.FILENAME_DOT_ZARRAY) .readBytes() ?~> s"Could not read header at ${ZarrHeader.FILENAME_DOT_ZARRAY}" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala index 874dd1d38fd..c0b640f026e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3Array.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.zarr3 +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox.box2Fox import com.scalableminds.util.tools.{Fox, JsonHelper} @@ -21,7 +22,8 @@ object Zarr3Array extends LazyLogging { axisOrderOpt: Option[AxisOrder], channelIndex: Option[Int], additionalAxes: Option[Seq[AdditionalAxis]], - sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext): Fox[Zarr3Array] = + sharedChunkContentsCache: AlfuCache[String, MultiArray])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Zarr3Array] = for { headerBytes <- (path / Zarr3ArrayHeader.FILENAME_ZARR_JSON) .readBytes() ?~> s"Could not read header at ${Zarr3ArrayHeader.FILENAME_ZARR_JSON}" @@ -116,7 +118,8 @@ class Zarr3Array(vaultPath: VaultPath, .sum } - private def readAndParseShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext): Fox[Array[(Long, Long)]] = + private def readAndParseShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[(Long, Long)]] = for { shardIndexRaw <- readShardIndex(shardPath) parsed = parseShardIndex(shardIndexRaw) @@ -131,7 +134,7 @@ class Zarr3Array(vaultPath: VaultPath, } private def getShardIndexSize = shardIndexEntryLength * chunksPerShard + shardIndexChecksumLength - private def readShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext) = + private def readShardIndex(shardPath: VaultPath)(implicit ec: ExecutionContext, tc: TokenContext) = shardingCodec match { case Some(codec) if codec.index_location == IndexLocationSetting.start => shardPath.readBytes(Some(Range.Long(0, getShardIndexSize.toLong, 1))) @@ -166,8 +169,8 @@ class Zarr3Array(vaultPath: VaultPath, chunkIndex.zip(header.chunkShape).map { case (i, s) => i * s } ) - override protected def getShardedChunkPathAndRange(chunkIndex: Array[Int])( - implicit ec: ExecutionContext): Fox[(VaultPath, NumericRange[Long])] = + override protected def getShardedChunkPathAndRange( + chunkIndex: Array[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[(VaultPath, NumericRange[Long])] = for { shardCoordinates <- Fox.option2Fox(chunkIndexToShardIndex(chunkIndex).headOption) shardFilename = getChunkFilename(shardCoordinates) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ChunkReader.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ChunkReader.scala index 3190cfe378a..975781ccec9 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ChunkReader.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datareaders/zarr3/Zarr3ChunkReader.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datareaders.zarr3 +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.datareaders.{ChunkReader, DatasetHeader} import com.scalableminds.webknossos.datastore.datavault.VaultPath @@ -10,7 +11,8 @@ import scala.concurrent.ExecutionContext class Zarr3ChunkReader(header: DatasetHeader, array: Zarr3Array) extends ChunkReader(header) { override protected def readChunkBytesAndShape(path: VaultPath, range: Option[NumericRange[Long]])( - implicit ec: ExecutionContext): Fox[(Array[Byte], Option[Array[Int]])] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Option[Array[Int]])] = for { bytes <- path.readBytes(range) decoded = array.codecs.foldRight(bytes)((c, bytes) => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/DataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/DataVault.scala index 2fcb3475d7b..7ef95b2e305 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/DataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/DataVault.scala @@ -1,12 +1,13 @@ package com.scalableminds.webknossos.datastore.datavault +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import scala.concurrent.ExecutionContext trait DataVault { - def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] + def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] def listDirectory(path: VaultPath, maxItems: Int)(implicit ec: ExecutionContext): Fox[List[VaultPath]] } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala index 0d42244c6a0..c6192bc99e1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/FileSystemDataVault.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datavault +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.bool2Fox import com.scalableminds.webknossos.datastore.storage.DataVaultService @@ -16,7 +17,8 @@ import scala.jdk.CollectionConverters._ class FileSystemDataVault extends DataVault { override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] = for { localPath <- vaultPathToLocalPath(path) bytes <- readBytesLocal(localPath, range) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/GoogleCloudDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/GoogleCloudDataVault.scala index 4d17f793c3a..919fb4f0562 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/GoogleCloudDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/GoogleCloudDataVault.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.datavault import com.google.auth.oauth2.ServiceAccountCredentials import com.google.cloud.storage.{BlobId, BlobInfo, Storage, StorageException, StorageOptions} +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.storage.{GoogleServiceAccountCredential, RemoteSourceDescriptor} import net.liftweb.common.Box.tryo @@ -33,7 +34,8 @@ class GoogleCloudDataVault(uri: URI, credential: Option[GoogleServiceAccountCred private lazy val bucket: String = uri.getAuthority override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] = { val objName = path.toUri.getPath.tail val blobId = BlobId.of(bucket, objName) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala index dbe4f58ccf4..8ef7290aacd 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/HttpsDataVault.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datavault +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, future2Fox} @@ -12,22 +13,27 @@ import com.scalableminds.webknossos.datastore.storage.{ import com.typesafe.scalalogging.LazyLogging import org.apache.commons.lang3.builder.HashCodeBuilder import play.api.http.Status -import play.api.libs.ws.{WSAuthScheme, WSClient, WSResponse} +import play.api.libs.ws.{WSAuthScheme, WSClient, WSRequest, WSResponse} import java.net.URI import scala.concurrent.duration.DurationInt import scala.collection.immutable.NumericRange import scala.concurrent.ExecutionContext -class HttpsDataVault(credential: Option[DataVaultCredential], ws: WSClient) extends DataVault with LazyLogging { +class HttpsDataVault(credential: Option[DataVaultCredential], ws: WSClient, dataStoreHost: String) + extends DataVault + with LazyLogging { private val readTimeout = 10 minutes // This will be set after the first completed range request by looking at the response headers of a HEAD request and the response headers of a GET request private var supportsRangeRequests: Option[Boolean] = None + private lazy val dataStoreAuthority = new URI(dataStoreHost).getAuthority + override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] = { val uri = path.toUri for { response <- range match { @@ -61,21 +67,23 @@ class HttpsDataVault(credential: Option[DataVaultCredential], ws: WSClient) exte } ) - private def getWithRange(uri: URI, range: NumericRange[Long])(implicit ec: ExecutionContext): Fox[WSResponse] = + private def getWithRange(uri: URI, range: NumericRange[Long])(implicit ec: ExecutionContext, + tc: TokenContext): Fox[WSResponse] = for { _ <- ensureRangeRequestsSupported(uri) response <- buildRequest(uri).withHttpHeaders("Range" -> s"bytes=${range.start}-${range.end - 1}").get().toFox _ = updateRangeRequestsSupportedForResponse(response) } yield response - private def getWithSuffixRange(uri: URI, length: Long)(implicit ec: ExecutionContext): Fox[WSResponse] = + private def getWithSuffixRange(uri: URI, length: Long)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[WSResponse] = for { _ <- ensureRangeRequestsSupported(uri) response <- buildRequest(uri).withHttpHeaders("Range" -> s"bytes=-$length").get().toFox _ = updateRangeRequestsSupportedForResponse(response) } yield response - private def getComplete(uri: URI)(implicit ec: ExecutionContext): Fox[WSResponse] = + private def getComplete(uri: URI)(implicit ec: ExecutionContext, tc: TokenContext): Fox[WSResponse] = buildRequest(uri).get().toFox private def ensureRangeRequestsSupported(uri: URI)(implicit ec: ExecutionContext): Fox[Unit] = @@ -103,13 +111,19 @@ class HttpsDataVault(credential: Option[DataVaultCredential], ws: WSClient) exte supportsRangeRequests = Some(response.header("Content-Range").isDefined) } - private def buildRequest(uri: URI) = { + private def buildRequest(uri: URI)(implicit tc: TokenContext): WSRequest = { val request = ws.url(uri.toString).withRequestTimeout(readTimeout) - getBasicAuthCredential match { - case Some(credential) => - request.withAuth(credential.username, credential.password, WSAuthScheme.BASIC) - case None => request + tc.userTokenOpt match { + case Some(token) if uri.getAuthority == dataStoreAuthority => + request.withHttpHeaders("X-Auth-Token" -> token) + case _ => + getBasicAuthCredential match { + case Some(credential) => + request.withAuth(credential.username, credential.password, WSAuthScheme.BASIC) + case None => request + } } + } private def getBasicAuthCredential: Option[HttpBasicAuthCredential] = @@ -133,6 +147,6 @@ class HttpsDataVault(credential: Option[DataVaultCredential], ws: WSClient) exte } object HttpsDataVault { - def create(remoteSourceDescriptor: RemoteSourceDescriptor, ws: WSClient): HttpsDataVault = - new HttpsDataVault(remoteSourceDescriptor.credential, ws) + def create(remoteSourceDescriptor: RemoteSourceDescriptor, ws: WSClient, dataStoreHost: String): HttpsDataVault = + new HttpsDataVault(remoteSourceDescriptor.credential, ws, dataStoreHost) } diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala index e37765b7cfc..32530aed1a5 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/S3DataVault.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.datavault +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.util.tools.Fox.{box2Fox, future2Fox} import com.scalableminds.webknossos.datastore.storage.{ @@ -102,7 +103,8 @@ class S3DataVault(s3AccessKeyCredential: Option[S3AccessKeyCredential], } override def readBytesAndEncoding(path: VaultPath, range: RangeSpecifier)( - implicit ec: ExecutionContext): Fox[(Array[Byte], Encoding.Value)] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[(Array[Byte], Encoding.Value)] = for { objectKey <- Fox.box2Fox(S3DataVault.objectKeyFromUri(path.toUri)) request = range match { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala index b0d66b98e28..f39dcd93473 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/datavault/VaultPath.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.datavault import com.aayushatharva.brotli4j.Brotli4jLoader import com.aayushatharva.brotli4j.decoder.BrotliInputStream +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.io.ZipIO import com.scalableminds.util.tools.{Fox, JsonHelper} import com.scalableminds.util.tools.Fox.box2Fox @@ -18,13 +19,14 @@ import scala.concurrent.ExecutionContext class VaultPath(uri: URI, dataVault: DataVault) extends LazyLogging { - def readBytes(range: Option[NumericRange[Long]] = None)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + def readBytes(range: Option[NumericRange[Long]] = None)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { bytesAndEncoding <- dataVault.readBytesAndEncoding(this, RangeSpecifier.fromRangeOpt(range)) ?=> "Failed to read from vault path" decoded <- decode(bytesAndEncoding) ?~> s"Failed to decode ${bytesAndEncoding._2}-encoded response." } yield decoded - def readLastBytes(byteCount: Int)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + def readLastBytes(byteCount: Int)(implicit ec: ExecutionContext, tc: TokenContext): Fox[Array[Byte]] = for { bytesAndEncoding <- dataVault.readBytesAndEncoding(this, SuffixLength(byteCount)) ?=> "Failed to read from vault path" decoded <- decode(bytesAndEncoding) ?~> s"Failed to decode ${bytesAndEncoding._2}-encoded response." @@ -93,7 +95,7 @@ class VaultPath(uri: URI, dataVault: DataVault) extends LazyLogging { override def hashCode(): Int = new HashCodeBuilder(17, 31).append(uri.toString).append(dataVault).toHashCode - def parseAsJson[T: Reads](implicit ec: ExecutionContext): Fox[T] = + def parseAsJson[T: Reads](implicit ec: ExecutionContext, tc: TokenContext): Fox[T] = for { fileBytes <- this.readBytes().toFox fileAsString <- tryo(new String(fileBytes, StandardCharsets.UTF_8)).toFox diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala index 97a9e2a827a..84d3f64c0b1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreLocalLayerService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -51,7 +52,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) mag <- Fox .option2Fox(Vec3Int.fromMagLiteral(dir.getFileName.toString, allowScalar = true)) ?~> s"invalid mag: ${dir.getFileName}" vaultPath <- dataVaultService.getVaultPath(remoteSourceDescriptor) ?~> "dataVault.setup.failed" - layersWithVoxelSizes <- new ZarrArrayExplorer(mag).explore(vaultPath, None) + layersWithVoxelSizes <- new ZarrArrayExplorer(mag).explore(vaultPath, None)(TokenContext(None)) } yield layersWithVoxelSizes)) (layers, voxelSize) <- adaptLayersAndVoxelSize(layersWithVoxelSizes.flatten, None) relativeLayers = layers.map(selectLastTwoDirectories) @@ -130,7 +131,7 @@ class ExploreLocalLayerService @Inject()(dataVaultService: DataVaultService) fullPath <- Fox.successful(path.resolve(layer)) remoteSourceDescriptor <- Fox.successful(RemoteSourceDescriptor(fullPath.toUri, None)) vaultPath <- dataVaultService.getVaultPath(remoteSourceDescriptor) ?~> "dataVault.setup.failed" - layersWithVoxelSizes <- explorer.explore(vaultPath, None) + layersWithVoxelSizes <- explorer.explore(vaultPath, None)(TokenContext(None)) (layers, voxelSize) <- adaptLayersAndVoxelSize(layersWithVoxelSizes, None) relativeLayers = makeLayersRelative(layers) dataSource = new DataSourceWithMagLocators(dataSourceId, relativeLayers, voxelSize) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala index 3daef569eb9..14cf24d007b 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ExploreRemoteLayerService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.DataStoreConfig @@ -52,7 +53,8 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, with LazyLogging { def exploreRemoteDatasource(parameters: List[ExploreRemoteLayerParameters], reportMutable: ListBuffer[String])( - implicit ec: ExecutionContext): Fox[GenericDataSource[DataLayer]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[GenericDataSource[DataLayer]] = for { exploredLayersNested <- Fox.serialCombined(parameters)( parameters => @@ -75,7 +77,8 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, private def exploreRemoteLayersForOneUri(layerUri: String, credentialId: Option[String], reportMutable: ListBuffer[String])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { uri <- tryo(new URI(removeNeuroglancerPrefixesFromUri(removeHeaderFileNamesFromUriSuffix(layerUri)))) ?~> s"Received invalid URI: $layerUri" _ <- bool2Fox(uri.getScheme != null) ?~> s"Received invalid URI: $layerUri" @@ -116,7 +119,8 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, credentialId: Option[String], explorers: List[RemoteLayerExplorer], reportMutable: ListBuffer[String])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = remotePathsWithDepth match { case Nil => Fox.empty @@ -139,7 +143,8 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, explorers: List[RemoteLayerExplorer], credentialId: Option[String], reportMutable: ListBuffer[String])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = Fox .sequence(explorers.map { explorer => { @@ -179,7 +184,8 @@ class ExploreRemoteLayerService @Inject()(dataVaultService: DataVaultService, credentialId: Option[String], explorers: List[RemoteLayerExplorer], reportMutable: ListBuffer[String])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = explorationResultOfPath match { case Full(layersWithVoxelSizes) => Fox.successful(layersWithVoxelSizes) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5ArrayExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5ArrayExplorer.scala index ac26c14b253..efd112792dc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5ArrayExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5ArrayExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -16,7 +17,8 @@ class N5ArrayExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExpl override def name: String = "N5 Array" - override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(N5Layer, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(N5Layer, VoxelSize)]] = for { headerPath <- Fox.successful(remotePath / N5Header.FILENAME_ATTRIBUTES_JSON) name = guessNameFromPath(remotePath) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5MultiscalesExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5MultiscalesExplorer.scala index 1e9aafba57e..c527b5c7837 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5MultiscalesExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/N5MultiscalesExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -17,16 +18,18 @@ class N5MultiscalesExplorer(implicit val ec: ExecutionContext) extends RemoteLay override def name: String = "N5 Multiscales" - override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(N5Layer, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(N5Layer, VoxelSize)]] = for { metadataPath <- Fox.successful(remotePath / N5Metadata.FILENAME_ATTRIBUTES_JSON) n5Metadata <- metadataPath.parseAsJson[N5Metadata] ?~> s"Failed to read N5 header at $metadataPath" layers <- Fox.serialCombined(n5Metadata.multiscales)(layerFromN5MultiscalesItem(_, remotePath, credentialId)) } yield layers - private def layerFromN5MultiscalesItem(multiscalesItem: N5MultiscalesItem, - remotePath: VaultPath, - credentialId: Option[String]): Fox[(N5Layer, VoxelSize)] = + private def layerFromN5MultiscalesItem( + multiscalesItem: N5MultiscalesItem, + remotePath: VaultPath, + credentialId: Option[String])(implicit tc: TokenContext): Fox[(N5Layer, VoxelSize)] = for { voxelSizeNanometers <- extractVoxelSize(multiscalesItem.datasets.map(_.transform)) magsWithAttributes <- Fox.serialCombined(multiscalesItem.datasets)(d => @@ -99,7 +102,7 @@ class N5MultiscalesExplorer(implicit val ec: ExecutionContext) extends RemoteLay private def n5MagFromDataset(n5Dataset: N5MultiscalesDataset, layerPath: VaultPath, voxelSize: Vec3Double, - credentialId: Option[String]): Fox[MagWithAttributes] = + credentialId: Option[String])(implicit tc: TokenContext): Fox[MagWithAttributes] = for { axisOrder <- extractAxisOrder(n5Dataset.transform.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?" mag <- magFromTransform(voxelSize, n5Dataset.transform) ?~> "Could not extract mag from transforms" diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NeuroglancerUriExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NeuroglancerUriExplorer.scala index 01f21a076ee..8f850237bd1 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NeuroglancerUriExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NeuroglancerUriExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.datavault.VaultPath @@ -18,8 +19,8 @@ class NeuroglancerUriExplorer(dataVaultService: DataVaultService)(implicit val e with ExploreLayerUtils { override def name: String = "Neuroglancer URI Explorer" - override def explore(remotePath: VaultPath, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { _ <- Fox.successful(()) uriFragment <- tryo(remotePath.toUri.getFragment.drop(1)) ?~> "URI has no matching fragment part" @@ -32,7 +33,8 @@ class NeuroglancerUriExplorer(dataVaultService: DataVaultService)(implicit val e renamedLayers = makeLayerNamesUnique(layers.map(_._1)) } yield renamedLayers.zip(layers.map(_._2)) - private def exploreNeuroglancerLayer(layerSpec: JsValue): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + private def exploreNeuroglancerLayer(layerSpec: JsValue)( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { _ <- Fox.successful(()) obj <- layerSpec.validate[JsObject].toFox @@ -47,9 +49,8 @@ class NeuroglancerUriExplorer(dataVaultService: DataVaultService)(implicit val e layerWithViewConfiguration <- assignViewConfiguration(layer, viewConfiguration) } yield layerWithViewConfiguration - private def exploreLayer(layerType: String, - remotePath: VaultPath, - name: String): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + private def exploreLayer(layerType: String, remotePath: VaultPath, name: String)( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = layerType match { case "n5" => Fox.firstSuccess( diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorationUtils.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorationUtils.scala index 3c7fb18082c..388b1904eeb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorationUtils.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffExplorationUtils.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.image.Color import com.scalableminds.util.tools.TextUtils.normalizeStrong @@ -182,7 +183,7 @@ trait NgffExplorationUtils extends FoxImplicits { Vec3Double(xFactors.product, yFactors.product, zFactors.product) } - protected def getShape(dataset: NgffDataset, path: VaultPath): Fox[Array[Int]] + protected def getShape(dataset: NgffDataset, path: VaultPath)(implicit tc: TokenContext): Fox[Array[Int]] protected def createAdditionalAxis(name: String, index: Int, bounds: Array[Int]): Box[AdditionalAxis] = for { @@ -191,7 +192,8 @@ trait NgffExplorationUtils extends FoxImplicits { } yield AdditionalAxis(normalizedName, bounds, index) protected def getAdditionalAxes(multiscale: NgffMultiscalesItem, remotePath: VaultPath)( - implicit ec: ExecutionContext): Fox[Seq[AdditionalAxis]] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Seq[AdditionalAxis]] = { val defaultAxes = List("c", "x", "y", "z") for { // Selecting shape of first mag, assuming no mags for additional coordinates @@ -209,7 +211,8 @@ trait NgffExplorationUtils extends FoxImplicits { } protected def getNgffMultiscaleChannelCount(multiscale: NgffMultiscalesItem, remotePath: VaultPath)( - implicit ec: ExecutionContext): Fox[Int] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[Int] = for { firstDataset <- multiscale.datasets.headOption.toFox shape <- getShape(firstDataset, remotePath) @@ -228,7 +231,7 @@ trait NgffExplorationUtils extends FoxImplicits { datasetName: String, voxelSizeInAxisUnits: Vec3Double, axisOrder: AxisOrder, - isSegmentation: Boolean): Fox[DataLayerWithMagLocators] + isSegmentation: Boolean)(implicit tc: TokenContext): Fox[DataLayerWithMagLocators] protected def layersFromNgffMultiscale(multiscale: NgffMultiscalesItem, remotePath: VaultPath, @@ -236,7 +239,8 @@ trait NgffExplorationUtils extends FoxImplicits { channelCount: Int, channelAttributes: Option[Seq[ChannelAttributes]] = None, isSegmentation: Boolean = false)( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { axisOrder <- extractAxisOrder(multiscale.axes) ?~> "Could not extract XYZ axis order mapping. Does the data have x, y and z axes, stated in multiscales metadata?" unifiedAxisUnit <- selectAxisUnit(multiscale.axes, axisOrder) @@ -301,12 +305,12 @@ trait NgffExplorationUtils extends FoxImplicits { } } - protected def layersForLabel(remotePath: VaultPath, - labelPath: String, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] + protected def layersForLabel(remotePath: VaultPath, labelPath: String, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] protected def exploreLabelLayers(remotePath: VaultPath, credentialId: Option[String])( - implicit ec: ExecutionContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { labelDescriptionPath <- Fox.successful(remotePath / NgffLabelsGroup.LABEL_PATH) labelGroup <- labelDescriptionPath.parseAsJson[NgffLabelsGroup] diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_4Explorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_4Explorer.scala index 3b4ca3cef80..26ca242ab6e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_4Explorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_4Explorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Double import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -17,8 +18,8 @@ class NgffV0_4Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp override def name: String = "OME NGFF Zarr v0.4" - override def explore(remotePath: VaultPath, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { zattrsPath <- Fox.successful(remotePath / NgffMetadata.FILENAME_DOT_ZATTRS) ngffHeader <- zattrsPath.parseAsJson[NgffMetadata] ?~> s"Failed to read OME NGFF header at $zattrsPath" @@ -44,7 +45,7 @@ class NgffV0_4Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp datasetName: String, voxelSizeInAxisUnits: Vec3Double, axisOrder: AxisOrder, - isSegmentation: Boolean): Fox[(ZarrLayer)] = + isSegmentation: Boolean)(implicit tc: TokenContext): Fox[(ZarrLayer)] = for { magsWithAttributes <- Fox.serialCombined(multiscale.datasets)(d => zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentialId, Some(channelIndex))) @@ -85,7 +86,7 @@ class NgffV0_4Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp ) } yield layer - private def getZarrHeader(ngffDataset: NgffDataset, layerPath: VaultPath) = { + private def getZarrHeader(ngffDataset: NgffDataset, layerPath: VaultPath)(implicit tc: TokenContext) = { val magPath = layerPath / ngffDataset.path val zarrayPath = magPath / ZarrHeader.FILENAME_DOT_ZARRAY for { @@ -98,12 +99,13 @@ class NgffV0_4Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp } yield header } - private def zarrMagFromNgffDataset(ngffDataset: NgffDataset, - layerPath: VaultPath, - voxelSizeInAxisUnits: Vec3Double, - axisOrder: AxisOrder, - credentialId: Option[String], - channelIndex: Option[Int])(implicit ec: ExecutionContext): Fox[MagWithAttributes] = + private def zarrMagFromNgffDataset( + ngffDataset: NgffDataset, + layerPath: VaultPath, + voxelSizeInAxisUnits: Vec3Double, + axisOrder: AxisOrder, + credentialId: Option[String], + channelIndex: Option[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[MagWithAttributes] = for { mag <- magFromTransforms(ngffDataset.coordinateTransformations, voxelSizeInAxisUnits, axisOrder) ?~> "Could not extract mag from scale transforms" magPath = layerPath / ngffDataset.path @@ -118,15 +120,14 @@ class NgffV0_4Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp elementClass, boundingBox) - protected def getShape(dataset: NgffDataset, path: VaultPath): Fox[Array[Int]] = + protected def getShape(dataset: NgffDataset, path: VaultPath)(implicit tc: TokenContext): Fox[Array[Int]] = for { zarrHeader <- getZarrHeader(dataset, path) shape = zarrHeader.shape } yield shape - protected def layersForLabel(remotePath: VaultPath, - labelPath: String, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + protected def layersForLabel(remotePath: VaultPath, labelPath: String, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { fullLabelPath <- Fox.successful(remotePath / "labels" / labelPath) zattrsPath = fullLabelPath / NgffMetadata.FILENAME_DOT_ZATTRS diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_5Explorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_5Explorer.scala index 8b97eacaed0..d3a3f66f03f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_5Explorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/NgffV0_5Explorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Double import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -18,8 +19,8 @@ class NgffV0_5Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp override def name: String = "OME NGFF Zarr v0.5" - override def explore(remotePath: VaultPath, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { zarrJsonPath <- Fox.successful(remotePath / Zarr3ArrayHeader.FILENAME_ZARR_JSON) groupHeader <- zarrJsonPath.parseAsJson[Zarr3GroupHeader] ?~> s"Failed to read OME NGFF header at $zarrJsonPath" @@ -46,7 +47,7 @@ class NgffV0_5Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp datasetName: String, voxelSizeInAxisUnits: Vec3Double, axisOrder: AxisOrder, - isSegmentation: Boolean): Fox[DataLayerWithMagLocators] = + isSegmentation: Boolean)(implicit tc: TokenContext): Fox[DataLayerWithMagLocators] = for { magsWithAttributes <- Fox.serialCombined(multiscale.datasets)(d => zarrMagFromNgffDataset(d, remotePath, voxelSizeInAxisUnits, axisOrder, credentialId, Some(channelIndex))) @@ -86,7 +87,8 @@ class NgffV0_5Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp ) } yield layer - private def getZarrHeader(ngffDataset: NgffDataset, layerPath: VaultPath): Fox[Zarr3ArrayHeader] = { + private def getZarrHeader(ngffDataset: NgffDataset, layerPath: VaultPath)( + implicit tc: TokenContext): Fox[Zarr3ArrayHeader] = { val magPath = layerPath / ngffDataset.path val zarrJsonPath = magPath / Zarr3ArrayHeader.FILENAME_ZARR_JSON for { @@ -94,12 +96,13 @@ class NgffV0_5Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp } yield parsedHeader } - private def zarrMagFromNgffDataset(ngffDataset: NgffDataset, - layerPath: VaultPath, - voxelSizeInAxisUnits: Vec3Double, - axisOrder: AxisOrder, - credentialId: Option[String], - channelIndex: Option[Int])(implicit ec: ExecutionContext): Fox[MagWithAttributes] = + private def zarrMagFromNgffDataset( + ngffDataset: NgffDataset, + layerPath: VaultPath, + voxelSizeInAxisUnits: Vec3Double, + axisOrder: AxisOrder, + credentialId: Option[String], + channelIndex: Option[Int])(implicit ec: ExecutionContext, tc: TokenContext): Fox[MagWithAttributes] = for { mag <- magFromTransforms(ngffDataset.coordinateTransformations, voxelSizeInAxisUnits, axisOrder) ?~> "Could not extract mag from scale transforms" magPath = layerPath / ngffDataset.path @@ -114,15 +117,14 @@ class NgffV0_5Explorer(implicit val ec: ExecutionContext) extends RemoteLayerExp elementClass, boundingBox) - protected def getShape(dataset: NgffDataset, path: VaultPath): Fox[Array[Int]] = + protected def getShape(dataset: NgffDataset, path: VaultPath)(implicit tc: TokenContext): Fox[Array[Int]] = for { zarrHeader <- getZarrHeader(dataset, path) shape = zarrHeader.shape } yield shape - protected def layersForLabel(remotePath: VaultPath, - labelPath: String, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + protected def layersForLabel(remotePath: VaultPath, labelPath: String, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { fullLabelPath <- Fox.successful(remotePath / "labels" / labelPath) zarrJsonPath = fullLabelPath / Zarr3ArrayHeader.FILENAME_ZARR_JSON diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/PrecomputedExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/PrecomputedExplorer.scala index b32dc8695a9..6b47daa896c 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/PrecomputedExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/PrecomputedExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -19,7 +20,8 @@ import scala.concurrent.ExecutionContext class PrecomputedExplorer(implicit val ec: ExecutionContext) extends RemoteLayerExplorer { override def name: String = "Neuroglancer Precomputed" - override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(PrecomputedLayer, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(PrecomputedLayer, VoxelSize)]] = for { infoPath <- Fox.successful(remotePath / PrecomputedHeader.FILENAME_INFO) precomputedHeader <- infoPath diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/RemoteLayerExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/RemoteLayerExplorer.scala index f800febaa10..efe70dda8a4 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/RemoteLayerExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/RemoteLayerExplorer.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.explore import com.scalableminds.util.geometry.BoundingBox import collections.SequenceUtils +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.datavault.VaultPath @@ -19,7 +20,8 @@ trait RemoteLayerExplorer extends FoxImplicits { implicit def ec: ExecutionContext - def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] + def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] def name: String diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/WebknossosZarrExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/WebknossosZarrExplorer.scala index f9381e4e5c7..1c30212e1dc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/WebknossosZarrExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/WebknossosZarrExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator import com.scalableminds.webknossos.datastore.dataformats.layers.{ @@ -24,8 +25,8 @@ class WebknossosZarrExplorer(implicit val ec: ExecutionContext) extends RemoteLa override def name: String = "WEBKNOSSOS-based Zarr" - override def explore(remotePath: VaultPath, - credentialId: Option[String]): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(DataLayerWithMagLocators, VoxelSize)]] = for { dataSourcePropertiesPath <- Fox.successful(remotePath / GenericDataSource.FILENAME_DATASOURCE_PROPERTIES_JSON) dataSource <- dataSourcePropertiesPath.parseAsJson[DataSource] @@ -54,15 +55,14 @@ class WebknossosZarrExplorer(implicit val ec: ExecutionContext) extends RemoteLa private def adaptMags(mags: List[MagLocator], remoteLayerPath: VaultPath, headerFilename: String, - credentialId: Option[String]): Fox[List[MagLocator]] = + credentialId: Option[String])(implicit tc: TokenContext): Fox[List[MagLocator]] = Fox.serialCombined(mags)(m => for { magPath <- fixRemoteMagPath(m, remoteLayerPath, headerFilename) } yield m.copy(path = magPath, credentialId = credentialId)) - private def fixRemoteMagPath(mag: MagLocator, - remoteLayerPath: VaultPath, - headerFilename: String): Fox[Option[String]] = + private def fixRemoteMagPath(mag: MagLocator, remoteLayerPath: VaultPath, headerFilename: String)( + implicit tc: TokenContext): Fox[Option[String]] = mag.path match { case Some(path) => Fox.successful(Some(path)) case None => diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/Zarr3ArrayExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/Zarr3ArrayExplorer.scala index f12f452998a..cd2a07a1629 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/Zarr3ArrayExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/Zarr3ArrayExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -16,7 +17,8 @@ class Zarr3ArrayExplorer(implicit val ec: ExecutionContext) extends RemoteLayerE override def name: String = "Zarr v3 Array" - override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(Zarr3Layer, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(Zarr3Layer, VoxelSize)]] = for { zarrayPath <- Fox.successful(remotePath / Zarr3ArrayHeader.FILENAME_ZARR_JSON) name = guessNameFromPath(remotePath) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ZarrArrayExplorer.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ZarrArrayExplorer.scala index a60a4bbba68..415ab53aceb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ZarrArrayExplorer.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/explore/ZarrArrayExplorer.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.explore +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{Vec3Double, Vec3Int} import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.dataformats.MagLocator @@ -16,7 +17,8 @@ class ZarrArrayExplorer(mag: Vec3Int = Vec3Int.ones)(implicit val ec: ExecutionC override def name: String = "Zarr Array" - override def explore(remotePath: VaultPath, credentialId: Option[String]): Fox[List[(ZarrLayer, VoxelSize)]] = + override def explore(remotePath: VaultPath, credentialId: Option[String])( + implicit tc: TokenContext): Fox[List[(ZarrLayer, VoxelSize)]] = for { zarrayPath <- Fox.successful(remotePath / ZarrHeader.FILENAME_DOT_ZARRAY) name = guessNameFromPath(remotePath) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala index c51b25e7bb8..0b307145450 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AdHocMeshService.scala @@ -1,5 +1,7 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext + import java.nio._ import org.apache.pekko.actor.{Actor, ActorRef, ActorSystem, Props} import org.apache.pekko.pattern.ask @@ -29,6 +31,7 @@ case class AdHocMeshRequest(dataSource: Option[DataSource], cuboid: Cuboid, segmentId: Long, voxelSizeFactor: Vec3Double, // assumed to be in dataset’s unit + tokenContext: TokenContext, mapping: Option[String] = None, mappingType: Option[String] = None, additionalCoordinates: Option[Seq[AdditionalCoordinate]] = None, @@ -44,7 +47,7 @@ class AdHocMeshActor(val service: AdHocMeshService, val timeout: FiniteDuration) def receive: Receive = { case request: AdHocMeshRequest => - sender() ! Await.result(service.requestAdHocMesh(request).futureBox, timeout) + sender() ! Await.result(service.requestAdHocMesh(request)(request.tokenContext).futureBox, timeout) case _ => sender() ! Failure("Unexpected message sent to AdHocMeshActor.") } @@ -68,7 +71,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, case e: Exception => Failure(e.getMessage) } - def requestAdHocMesh(request: AdHocMeshRequest): Fox[(Array[Float], List[Int])] = + def requestAdHocMesh(request: AdHocMeshRequest)(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = request.dataLayer.elementClass match { case ElementClass.uint8 => generateAdHocMeshImpl[Byte, ByteBuffer](request, @@ -87,7 +90,7 @@ class AdHocMeshService(binaryDataService: BinaryDataService, private def generateAdHocMeshImpl[T: ClassTag, B <: Buffer]( request: AdHocMeshRequest, - dataTypeFunctors: DataTypeFunctors[T, B]): Fox[(Array[Float], List[Int])] = { + dataTypeFunctors: DataTypeFunctors[T, B])(implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { def applyMapping(data: Array[T]): Fox[Array[T]] = request.mapping match { diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 9efb26e993b..93aaf088716 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.ExtendedTypes.ExtendedArraySeq @@ -32,7 +33,7 @@ class BinaryDataService(val dataBaseDir: Path, private lazy val bucketProviderCache = new BucketProviderCache(maxEntries = 5000) - def handleDataRequest(request: DataServiceDataRequest): Fox[Array[Byte]] = { + def handleDataRequest(request: DataServiceDataRequest)(implicit tc: TokenContext): Fox[Array[Byte]] = { val bucketQueue = request.cuboid.allBucketsInCuboid if (!request.cuboid.hasValidDimensions) { @@ -51,7 +52,8 @@ class BinaryDataService(val dataBaseDir: Path, } } - def handleDataRequests(requests: List[DataServiceDataRequest]): Fox[(Array[Byte], List[Int])] = { + def handleDataRequests(requests: List[DataServiceDataRequest])( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { def convertIfNecessary(isNecessary: Boolean, inputArray: Array[Byte], conversionFunc: Array[Byte] => Fox[Array[Byte]], @@ -88,7 +90,8 @@ class BinaryDataService(val dataBaseDir: Path, } } - private def handleBucketRequest(request: DataServiceDataRequest, bucket: BucketPosition): Fox[Array[Byte]] = + private def handleBucketRequest(request: DataServiceDataRequest, bucket: BucketPosition)( + implicit tc: TokenContext): Fox[Array[Byte]] = if (request.dataLayer.doesContainBucket(bucket) && request.dataLayer.containsMag(bucket.mag)) { val readInstruction = DataReadInstruction(dataBaseDir, request.dataSource, request.dataLayer, bucket, request.settings.version) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala index 4da043fb9f2..086567c359f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/DSFullMeshService.scala @@ -62,11 +62,12 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, case None => loadFullMeshFromAdHoc(organizationId, datasetDirectoryName, dataLayerName, fullMeshRequest) } - private def loadFullMeshFromAdHoc( - organizationId: String, - datasetName: String, - dataLayerName: String, - fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, m: MessagesProvider): Fox[Array[Byte]] = + private def loadFullMeshFromAdHoc(organizationId: String, + datasetName: String, + dataLayerName: String, + fullMeshRequest: FullMeshRequest)(implicit ec: ExecutionContext, + m: MessagesProvider, + tc: TokenContext): Fox[Array[Byte]] = for { mag <- fullMeshRequest.mag.toFox ?~> "mag.neededForAdHoc" seedPosition <- fullMeshRequest.seedPosition.toFox ?~> "seedPosition.neededForAdHoc" @@ -92,13 +93,15 @@ class DSFullMeshService @Inject()(dataSourceRepository: DataSourceRepository, topLeft: VoxelPosition, chunkSize: Vec3Int, visited: collection.mutable.Set[VoxelPosition] = collection.mutable.Set[VoxelPosition]())( - implicit ec: ExecutionContext): Fox[List[Array[Float]]] = { + implicit ec: ExecutionContext, + tc: TokenContext): Fox[List[Array[Float]]] = { val adHocMeshRequest = AdHocMeshRequest( Some(dataSource), segmentationLayer, Cuboid(topLeft, chunkSize.x + 1, chunkSize.y + 1, chunkSize.z + 1), fullMeshRequest.segmentId, dataSource.scale.factor, + tc, fullMeshRequest.mappingName, fullMeshRequest.mappingType, fullMeshRequest.additionalCoordinates diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala index aa712788421..7325a75dcbb 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/FindDataService.scala @@ -1,6 +1,7 @@ package com.scalableminds.webknossos.datastore.services import com.google.inject.Inject +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.Vec3Int import com.scalableminds.util.tools.{Fox, FoxImplicits} import com.scalableminds.webknossos.datastore.models.datasource.{DataLayer, DataSource, ElementClass} @@ -22,10 +23,8 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp with FoxImplicits { val binaryDataService: BinaryDataService = dataServicesHolder.binaryDataService - private def getDataFor(dataSource: DataSource, - dataLayer: DataLayer, - position: Vec3Int, - mag: Vec3Int): Fox[Array[Byte]] = { + private def getDataFor(dataSource: DataSource, dataLayer: DataLayer, position: Vec3Int, mag: Vec3Int)( + implicit tc: TokenContext): Fox[Array[Byte]] = { val request = DataRequest( VoxelPosition(position.x, position.y, position.z, mag), DataLayer.bucketLength, @@ -46,7 +45,7 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp private def getConcatenatedDataFor(dataSource: DataSource, dataLayer: DataLayer, positions: List[Vec3Int], - mag: Vec3Int) = + mag: Vec3Int)(implicit tc: TokenContext) = for { dataBucketWise: Seq[Array[Byte]] <- Fox .sequenceOfFulls(positions.map(getDataFor(dataSource, dataLayer, _, mag))) @@ -97,8 +96,8 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp positions.map(_.alignWithGridFloor(Vec3Int.full(DataLayer.bucketLength))).distinct } - private def checkAllPositionsForData(dataSource: DataSource, - dataLayer: DataLayer): Fox[Option[(Vec3Int, Vec3Int)]] = { + private def checkAllPositionsForData(dataSource: DataSource, dataLayer: DataLayer)( + implicit tc: TokenContext): Fox[Option[(Vec3Int, Vec3Int)]] = { def searchPositionIter(positions: List[Vec3Int], mag: Vec3Int): Fox[Option[Vec3Int]] = positions match { @@ -132,12 +131,13 @@ class FindDataService @Inject()(dataServicesHolder: BinaryDataServiceHolder)(imp magIter(createPositions(dataLayer).distinct, dataLayer.resolutions.sortBy(_.maxDim)) } - def findPositionWithData(dataSource: DataSource, dataLayer: DataLayer): Fox[Option[(Vec3Int, Vec3Int)]] = + def findPositionWithData(dataSource: DataSource, dataLayer: DataLayer)( + implicit tc: TokenContext): Fox[Option[(Vec3Int, Vec3Int)]] = for { positionAndMagOpt <- checkAllPositionsForData(dataSource, dataLayer) } yield positionAndMagOpt - def createHistogram(dataSource: DataSource, dataLayer: DataLayer): Fox[List[Histogram]] = { + def createHistogram(dataSource: DataSource, dataLayer: DataLayer)(implicit tc: TokenContext): Fox[List[Histogram]] = { def calculateHistogramValues(data: Array[_ >: UByte with UShort with UInt with ULong with Float], bytesPerElement: Int, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala index 23e7b2eb467..cf221b097dc 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/SegmentIndexFileService.scala @@ -1,5 +1,6 @@ package com.scalableminds.webknossos.datastore.services +import com.scalableminds.util.accesscontext.TokenContext import com.scalableminds.util.geometry.{BoundingBox, Vec3Int} import com.scalableminds.util.io.PathUtils import com.scalableminds.util.tools.{Fox, FoxImplicits} @@ -123,7 +124,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, dataLayerName: String, segmentId: Long, mag: Vec3Int, - mappingName: Option[String])(implicit m: MessagesProvider): Fox[Long] = + mappingName: Option[String])(implicit m: MessagesProvider, tc: TokenContext): Fox[Long] = calculateSegmentVolume( segmentId, mag, @@ -132,12 +133,13 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, getTypedDataForBucketPosition(organizationId, datasetDirectoryName, dataLayerName, mappingName) ) - def getSegmentBoundingBox(organizationId: String, - datasetDirectoryName: String, - dataLayerName: String, - segmentId: Long, - mag: Vec3Int, - mappingName: Option[String])(implicit m: MessagesProvider): Fox[BoundingBox] = + def getSegmentBoundingBox( + organizationId: String, + datasetDirectoryName: String, + dataLayerName: String, + segmentId: Long, + mag: Vec3Int, + mappingName: Option[String])(implicit m: MessagesProvider, tc: TokenContext): Fox[BoundingBox] = for { bb <- calculateSegmentBoundingBox( @@ -160,7 +162,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, mappingName: Option[String])( bucketPosition: Vec3Int, mag: Vec3Int, - additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit m: MessagesProvider) = + additionalCoordinates: Option[Seq[AdditionalCoordinate]])(implicit m: MessagesProvider, tc: TokenContext) = for { // Additional coordinates parameter ignored, see #7556 (dataSource, dataLayer) <- dataSourceRepository.getDataSourceAndDataLayer(organizationId, @@ -231,7 +233,7 @@ class SegmentIndexFileService @Inject()(config: DataStoreConfig, dataLayer: DataLayer, mag: Vec3Int, mag1BucketPositions: Seq[Vec3Int], - mappingName: Option[String]): Fox[Array[Byte]] = { + mappingName: Option[String])(implicit tc: TokenContext): Fox[Array[Byte]] = { val dataRequests = mag1BucketPositions.map { position => DataServiceDataRequest( dataSource = dataSource, diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala index c65c9c8e5fc..9c27031514e 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/storage/DataVaultService.scala @@ -2,6 +2,7 @@ package com.scalableminds.webknossos.datastore.storage import com.scalableminds.util.cache.AlfuCache import com.scalableminds.util.tools.Fox +import com.scalableminds.webknossos.datastore.DataStoreConfig import com.scalableminds.webknossos.datastore.datavault.{ DataVault, FileSystemDataVault, @@ -28,7 +29,7 @@ object DataVaultService { List(schemeS3, schemeHttps, schemeHttp, schemeGS).contains(uriScheme) } -class DataVaultService @Inject()(ws: WSClient) extends LazyLogging { +class DataVaultService @Inject()(ws: WSClient, config: DataStoreConfig) extends LazyLogging { private val vaultCache: AlfuCache[RemoteSourceDescriptor, DataVault] = AlfuCache(maxCapacity = 100) @@ -49,7 +50,7 @@ class DataVaultService @Inject()(ws: WSClient) extends LazyLogging { } else if (scheme == DataVaultService.schemeS3) { S3DataVault.create(remoteSource, ws) } else if (scheme == DataVaultService.schemeHttps || scheme == DataVaultService.schemeHttp) { - HttpsDataVault.create(remoteSource, ws) + HttpsDataVault.create(remoteSource, ws, config.Http.uri) } else if (scheme == DataVaultService.schemeFile) { FileSystemDataVault.create } else { diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala index b1140ca2857..85f7d4de912 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingLayer.scala @@ -28,7 +28,8 @@ import scala.concurrent.ExecutionContext class EditableMappingBucketProvider(layer: EditableMappingLayer) extends BucketProvider with ProtoGeometryImplicits { - override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = { + override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = { val bucket: BucketPosition = readInstruction.bucket for { tracingId <- Fox.successful(layer.name) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 33f9cc81a6d..c4b0801474a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -192,8 +192,8 @@ class EditableMappingService @Inject()( voxelAsLong <- voxelAsLongArray.headOption } yield voxelAsLong - def volumeData(editableMappingLayer: EditableMappingLayer, - dataRequests: DataRequestCollection): Fox[(Array[Byte], List[Int])] = { + def volumeData(editableMappingLayer: EditableMappingLayer, dataRequests: DataRequestCollection)( + implicit tc: TokenContext): Fox[(Array[Byte], List[Int])] = { val requests = dataRequests.map( r => DataServiceDataRequest(null, @@ -368,14 +368,15 @@ class EditableMappingService @Inject()( bytes = UnsignedIntegerArray.toByteArray(unsignedIntArray, elementClass) } yield bytes - def createAdHocMesh(editableMappingLayer: EditableMappingLayer, - request: WebknossosAdHocMeshRequest): Fox[(Array[Float], List[Int])] = { + def createAdHocMesh(editableMappingLayer: EditableMappingLayer, request: WebknossosAdHocMeshRequest)( + implicit tc: TokenContext): Fox[(Array[Float], List[Int])] = { val adHocMeshRequest = AdHocMeshRequest( dataSource = None, dataLayer = editableMappingLayer, cuboid = request.cuboid(editableMappingLayer), segmentId = request.segmentId, voxelSizeFactor = request.voxelSizeFactorInUnit, + tokenContext = tc, mapping = None, mappingType = None, findNeighbors = request.findNeighbors diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala index 8dc36aa9945..6585dd549a6 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingLayer.scala @@ -30,7 +30,8 @@ class VolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit val ec: Ex val volumeDataStore: FossilDBClient = layer.volumeDataStore val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService - override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = loadBucket(layer, readInstruction.bucket, readInstruction.version) override def bucketStream(version: Option[Long] = None): Iterator[(BucketPosition, Array[Byte])] = @@ -46,7 +47,8 @@ class TemporaryVolumeTracingBucketProvider(layer: VolumeTracingLayer)(implicit v val volumeDataStore: FossilDBClient = layer.volumeDataStore val temporaryTracingService: TemporaryTracingService = layer.temporaryTracingService - override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext): Fox[Array[Byte]] = + override def load(readInstruction: DataReadInstruction)(implicit ec: ExecutionContext, + tc: TokenContext): Fox[Array[Byte]] = for { _ <- temporaryTracingService.assertTracingStillPresent(layer.name) data <- loadBucket(layer, readInstruction.bucket, readInstruction.version) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 2abc2fba980..8fe46f03b1a 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -623,6 +623,7 @@ class VolumeTracingService @Inject()( request.cuboid(volumeLayer), request.segmentId, request.voxelSizeFactorInUnit, + tc, None, None, request.additionalCoordinates,