diff --git a/CHANGELOG.unreleased.md b/CHANGELOG.unreleased.md index 273dd508a54..18f5000d79c 100644 --- a/CHANGELOG.unreleased.md +++ b/CHANGELOG.unreleased.md @@ -20,10 +20,12 @@ For upgrade instructions, please check the [migration guide](MIGRATIONS.released - Upgraded backend dependencies for improved performance and stability. [#7922](https://github.com/scalableminds/webknossos/pull/7922) - It is now saved whether segment groups are collapsed or expanded, so this information doesn't get lost e.g. upon page reload. [#7928](https://github.com/scalableminds/webknossos/pull/7928/) - The context menu entry "Focus in Segment List" expands all necessary segment groups in the segments tab to show the highlighted segment. [#7950](https://github.com/scalableminds/webknossos/pull/7950) +- In the proofreading mode, you can enable/disable that only the active segment and the hovered segment are rendered. [#7654](https://github.com/scalableminds/webknossos/pull/7654) ### Changed - The warning about a mismatch between the scale of a pre-computed mesh and the dataset scale's factor now also considers all supported mags of the active segmentation layer. This reduces the false posive rate regarding this warning. [#7921](https://github.com/scalableminds/webknossos/pull/7921/) - It is no longer allowed to edit annotations of other organizations, even if they are set to public and to others-may-edit. [#7923](https://github.com/scalableminds/webknossos/pull/7923) +- When proofreading segmentations, the user can now interact with super-voxels directly in the data viewports. Additionally, proofreading is significantly faster because the segmentation data doesn't have to be re-downloaded after each merge/split operation. [#7654](https://github.com/scalableminds/webknossos/pull/7654) ### Fixed - Fixed a bug that allowed the default newly created bounding box to appear outside the dataset. In case the whole bounding box would be outside it is created regardless. [#7892](https://github.com/scalableminds/webknossos/pull/7892) diff --git a/app/models/annotation/nml/NmlWriter.scala b/app/models/annotation/nml/NmlWriter.scala index 33af7551f28..2ab7037ce36 100644 --- a/app/models/annotation/nml/NmlWriter.scala +++ b/app/models/annotation/nml/NmlWriter.scala @@ -263,7 +263,7 @@ class NmlWriter @Inject()(implicit ec: ExecutionContext) extends FoxImplicits { case Right(volumeTracing) => volumeTracing.fallbackLayer.foreach(writer.writeAttribute("fallbackLayer", _)) volumeTracing.largestSegmentId.foreach(id => writer.writeAttribute("largestSegmentId", id.toString)) - if (!volumeTracing.mappingIsEditable.getOrElse(false)) { + if (!volumeTracing.hasEditableMapping.getOrElse(false)) { volumeTracing.mappingName.foreach { mappingName => writer.writeAttribute("mappingName", mappingName) } diff --git a/frontend/javascripts/admin/admin_rest_api.ts b/frontend/javascripts/admin/admin_rest_api.ts index c2974a36e6d..77f1cf2d854 100644 --- a/frontend/javascripts/admin/admin_rest_api.ts +++ b/frontend/javascripts/admin/admin_rest_api.ts @@ -77,13 +77,19 @@ import type { MappingType, VolumeTracing, UserConfiguration, + Mapping, + NumberLike, } from "oxalis/store"; import type { NewTask, TaskCreationResponseContainer } from "admin/task/task_create_bulk_view"; import type { QueryObject } from "admin/task/task_search_form"; import { V3 } from "libs/mjs"; import type { Versions } from "oxalis/view/version_view"; import { enforceValidatedDatasetViewConfiguration } from "types/schemas/dataset_view_configuration_defaults"; -import { parseProtoTracing } from "oxalis/model/helpers/proto_helpers"; +import { + parseProtoListOfLong, + parseProtoTracing, + serializeProtoListOfLong, +} from "oxalis/model/helpers/proto_helpers"; import type { RequestOptions } from "libs/request"; import Request from "libs/request"; import type { Message } from "libs/toast"; @@ -886,7 +892,7 @@ export async function getTracingForAnnotationType( ): Promise { const { tracingId, typ } = annotationLayerDescriptor; const version = extractVersion(versions, tracingId, typ); - const tracingType = typ.toLowerCase(); + const tracingType = typ.toLowerCase() as "skeleton" | "volume"; const possibleVersionString = version != null ? `&version=${version}` : ""; const tracingArrayBuffer = await doWithToken((token) => Request.receiveArraybuffer( @@ -1599,23 +1605,6 @@ export function getEditableMappingInfo( ); } -export function getAgglomerateIdForSegmentId( - tracingStoreUrl: string, - tracingId: string, - segmentId: number, -): Promise { - return doWithToken(async (token) => { - const urlParams = new URLSearchParams({ - token, - segmentId: `${segmentId}`, - }); - const { agglomerateId } = await Request.receiveJSON( - `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomerateIdForSegmentId?${urlParams.toString()}`, - ); - return agglomerateId; - }); -} - export function getPositionForSegmentInAgglomerate( datastoreUrl: string, datasetId: APIDatasetId, @@ -2068,6 +2057,67 @@ export function getAgglomerateSkeleton( ); } +export async function getAgglomeratesForSegmentsFromDatastore( + dataStoreUrl: string, + datasetId: APIDatasetId, + layerName: string, + mappingId: string, + segmentIds: Array, +): Promise { + const segmentIdBuffer = serializeProtoListOfLong(segmentIds); + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => + Request.receiveArraybuffer( + `${dataStoreUrl}/data/datasets/${datasetId.owningOrganization}/${datasetId.name}/layers/${layerName}/agglomerates/${mappingId}/agglomeratesForSegments?token=${token}`, + { + method: "POST", + body: segmentIdBuffer, + headers: { + "Content-Type": "application/octet-stream", + }, + }, + ), + ); + // Ensure that the values are bigint if the keys are bigint + const adaptToType = Utils.isBigInt(segmentIds[0]) + ? (el: NumberLike) => BigInt(el) + : (el: NumberLike) => el; + const keyValues = _.zip(segmentIds, parseProtoListOfLong(listArrayBuffer).map(adaptToType)); + // @ts-ignore + return new Map(keyValues); +} + +export async function getAgglomeratesForSegmentsFromTracingstore( + tracingStoreUrl: string, + tracingId: string, + segmentIds: Array, +): Promise { + const segmentIdBuffer = serializeProtoListOfLong( + // The tracing store expects the ids to be sorted + segmentIds.sort((a: T, b: T) => Number(a - b)), + ); + const listArrayBuffer: ArrayBuffer = await doWithToken((token) => + Request.receiveArraybuffer( + `${tracingStoreUrl}/tracings/mapping/${tracingId}/agglomeratesForSegments?token=${token}`, + { + method: "POST", + body: segmentIdBuffer, + headers: { + "Content-Type": "application/octet-stream", + }, + }, + ), + ); + + // Ensure that the values are bigint if the keys are bigint + const adaptToType = Utils.isBigInt(segmentIds[0]) + ? (el: NumberLike) => BigInt(el) + : (el: NumberLike) => el; + + const keyValues = _.zip(segmentIds, parseProtoListOfLong(listArrayBuffer).map(adaptToType)); + // @ts-ignore + return new Map(keyValues); +} + export function getEditableAgglomerateSkeleton( tracingStoreUrl: string, tracingId: string, @@ -2228,10 +2278,10 @@ export async function getEdgesForAgglomerateMinCut( tracingStoreUrl: string, tracingId: string, segmentsInfo: { - segmentId1: number; - segmentId2: number; + segmentId1: NumberLike; + segmentId2: NumberLike; mag: Vector3; - agglomerateId: number; + agglomerateId: NumberLike; editableMappingId: string; }, ): Promise> { @@ -2239,7 +2289,13 @@ export async function getEdgesForAgglomerateMinCut( Request.sendJSONReceiveJSON( `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphMinCut?token=${token}`, { - data: segmentsInfo, + data: { + ...segmentsInfo, + // TODO: Proper 64 bit support (#6921) + segmentId1: Number(segmentsInfo.segmentId1), + segmentId2: Number(segmentsInfo.segmentId2), + agglomerateId: Number(segmentsInfo.agglomerateId), + }, }, ), ); @@ -2254,9 +2310,9 @@ export async function getNeighborsForAgglomerateNode( tracingStoreUrl: string, tracingId: string, segmentInfo: { - segmentId: number; + segmentId: NumberLike; mag: Vector3; - agglomerateId: number; + agglomerateId: NumberLike; editableMappingId: string; }, ): Promise { @@ -2264,7 +2320,12 @@ export async function getNeighborsForAgglomerateNode( Request.sendJSONReceiveJSON( `${tracingStoreUrl}/tracings/volume/${tracingId}/agglomerateGraphNeighbors?token=${token}`, { - data: segmentInfo, + data: { + ...segmentInfo, + // TODO: Proper 64 bit support (#6921) + segmentId: Number(segmentInfo.segmentId), + agglomerateId: Number(segmentInfo.agglomerateId), + }, }, ), ); diff --git a/frontend/javascripts/libs/async/debounced_abortable_saga.ts b/frontend/javascripts/libs/async/debounced_abortable_saga.ts index beab5188acd..971ea7d0e20 100644 --- a/frontend/javascripts/libs/async/debounced_abortable_saga.ts +++ b/frontend/javascripts/libs/async/debounced_abortable_saga.ts @@ -2,6 +2,9 @@ import { call, type Saga } from "oxalis/model/sagas/effect-generators"; import { buffers, Channel, channel, runSaga } from "redux-saga"; import { delay, race, take } from "redux-saga/effects"; +// biome-ignore lint/complexity/noBannedTypes: This is copied from redux-saga because it cannot be imported. +type NotUndefined = {} | null; + /* * This function takes a saga and a debounce threshold * and returns a function F that will trigger the given saga @@ -15,7 +18,7 @@ import { delay, race, take } from "redux-saga/effects"; * is slower than a standard _.debounce. Also see * debounced_abortable_saga.spec.ts for a small benchmark. */ -export function createDebouncedAbortableCallable( +export function createDebouncedAbortableCallable( fn: (param1: T) => Saga, debounceThreshold: number, context: C, @@ -56,7 +59,7 @@ export function createDebouncedAbortableParameterlessCallable( }; } -function* debouncedAbortableSagaRunner( +function* debouncedAbortableSagaRunner( debounceThreshold: number, triggerChannel: Channel, abortableFn: (param: T) => Saga, diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/abstract_cuckoo_table.ts b/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts similarity index 58% rename from frontend/javascripts/oxalis/model/bucket_data_handling/abstract_cuckoo_table.ts rename to frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts index 58ea79e6853..c6308eb9640 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/abstract_cuckoo_table.ts +++ b/frontend/javascripts/libs/cuckoo/abstract_cuckoo_table.ts @@ -2,43 +2,70 @@ import * as THREE from "three"; import UpdatableTexture from "libs/UpdatableTexture"; import { getRenderer } from "oxalis/controller/renderer"; import { createUpdatableTexture } from "oxalis/geometries/materials/plane_material_factory_helpers"; +import _ from "lodash"; -const TEXTURE_CHANNEL_COUNT = 4; -const DEFAULT_LOAD_FACTOR = 0.25; +const DEFAULT_LOAD_FACTOR = 0.9; export const EMPTY_KEY_VALUE = 2 ** 32 - 1; +const REHASH_THRESHOLD = 20; export type SeedSubscriberFn = (seeds: number[]) => void; let cachedNullTexture: UpdatableTexture | undefined; export abstract class AbstractCuckooTable { - static ELEMENTS_PER_ENTRY = 4; entryCapacity: number; - table!: Uint32Array; - seeds!: number[]; - seedSubscribers: Array = []; + protected table!: Uint32Array; + protected seeds!: number[]; + protected seedSubscribers: Array = []; _texture: UpdatableTexture; - textureWidth: number; + protected textureWidth: number; + protected autoTextureUpdate: boolean = true; + + static getTextureChannelCount() { + return 4; + } + + static getElementsPerEntry() { + return 4; + } + + static getTextureType() { + return THREE.UnsignedIntType; + } + + static getTextureFormat() { + return THREE.RGBAIntegerFormat; + } + + static getInternalFormat(): THREE.PixelFormatGPU { + return "RGBA32UI"; + } + + getClass(): typeof AbstractCuckooTable { + const thisConstructor = this.constructor as typeof AbstractCuckooTable; + return thisConstructor; + } constructor(textureWidth: number) { this.textureWidth = textureWidth; this._texture = createUpdatableTexture( textureWidth, textureWidth, - TEXTURE_CHANNEL_COUNT, - THREE.UnsignedIntType, + this.getClass().getTextureChannelCount(), + this.getClass().getTextureType(), getRenderer(), - THREE.RGBAIntegerFormat, + this.getClass().getTextureFormat(), ); // The internal format has to be set manually, since ThreeJS does not // derive this value by itself. // See https://webgl2fundamentals.org/webgl/lessons/webgl-data-textures.html // for a reference of the internal formats. - this._texture.internalFormat = "RGBA32UI"; + this._texture.internalFormat = this.getClass().getInternalFormat(); this.entryCapacity = Math.floor( - (textureWidth ** 2 * TEXTURE_CHANNEL_COUNT) / AbstractCuckooTable.ELEMENTS_PER_ENTRY, + (textureWidth ** 2 * this.getClass().getTextureChannelCount()) / + this.getClass().getElementsPerEntry(), ); this.initializeTableArray(); @@ -46,10 +73,18 @@ export abstract class AbstractCuckooTable { this.flushTableToTexture(); } + getCriticalCapacity(): number { + /* + * Returns the capacity at which inserts can become very expensive due + * to increased likelihood of collisions. + */ + return Math.floor(this.entryCapacity * DEFAULT_LOAD_FACTOR); + } + static computeTextureWidthFromCapacity(requestedCapacity: number): number { const capacity = requestedCapacity / DEFAULT_LOAD_FACTOR; const textureWidth = Math.ceil( - Math.sqrt((capacity * TEXTURE_CHANNEL_COUNT) / AbstractCuckooTable.ELEMENTS_PER_ENTRY), + Math.sqrt((capacity * this.getTextureChannelCount()) / this.getElementsPerEntry()), ); return textureWidth; } @@ -62,18 +97,18 @@ export abstract class AbstractCuckooTable { // Use 1x1 texture to avoid WebGL warnings. 1, 1, - TEXTURE_CHANNEL_COUNT, - THREE.UnsignedIntType, + this.getTextureChannelCount(), + this.getTextureType(), getRenderer(), - THREE.RGBAIntegerFormat, + this.getTextureFormat(), ); - cachedNullTexture.internalFormat = "RGBA32UI"; + cachedNullTexture.internalFormat = this.getInternalFormat(); return cachedNullTexture; } private initializeTableArray() { - this.table = new Uint32Array(AbstractCuckooTable.ELEMENTS_PER_ENTRY * this.entryCapacity).fill( + this.table = new Uint32Array(this.getClass().getElementsPerEntry() * this.entryCapacity).fill( EMPTY_KEY_VALUE, ); @@ -109,8 +144,8 @@ export abstract class AbstractCuckooTable { getUniformValues() { return { CUCKOO_ENTRY_CAPACITY: this.entryCapacity, - CUCKOO_ELEMENTS_PER_ENTRY: AbstractCuckooTable.ELEMENTS_PER_ENTRY, - CUCKOO_ELEMENTS_PER_TEXEL: TEXTURE_CHANNEL_COUNT, + CUCKOO_ELEMENTS_PER_ENTRY: this.getClass().getElementsPerEntry(), + CUCKOO_ELEMENTS_PER_TEXEL: this.getClass().getTextureChannelCount(), CUCKOO_TWIDTH: this.textureWidth, }; } @@ -119,38 +154,66 @@ export abstract class AbstractCuckooTable { this._texture.update(this.table, 0, 0, this.textureWidth, this.textureWidth); } + disableAutoTextureUpdate() { + this.autoTextureUpdate = false; + } + + enableAutoTextureUpdateAndFlush() { + this.autoTextureUpdate = true; + this.flushTableToTexture(); + } + /* Should throw an error if the provided key is not valid (e.g., because it contains reserved values). */ abstract checkValidKey(key: K): void; - set(pendingKey: K, pendingValue: V, rehashAttempt: number = 0) { + set(pendingKey: K, pendingValue: V) { + const newDisplacedEntry = this.internalSet(pendingKey, pendingValue, !this.autoTextureUpdate); + if (newDisplacedEntry == null) { + // Success + return; + } + const oldTable = this.table; + for (let rehashAttempt = 1; rehashAttempt <= REHASH_THRESHOLD; rehashAttempt++) { + if (this.rehash(oldTable, true)) { + if (this.internalSet(newDisplacedEntry[0], newDisplacedEntry[1], true) == null) { + // Since a rehash was performed, the incremental texture updates were + // skipped. Update the entire texture if configured. + if (this.autoTextureUpdate) { + this.flushTableToTexture(); + } + return; + } + } + } + throw new Error( + `Cannot rehash, since ${REHASH_THRESHOLD} attempts were exceeded. Is the capacity exceeded?`, + ); + } + + private internalSet( + pendingKey: K, + pendingValue: V, + skipTextureUpdate: boolean, + ): Entry | undefined | null { this.checkValidKey(pendingKey); let displacedEntry; let currentAddress; let iterationCounter = 0; - const ITERATION_THRESHOLD = 40; - const REHASH_THRESHOLD = 100; - - if (rehashAttempt >= REHASH_THRESHOLD) { - throw new Error( - `Cannot rehash, since this is already the ${rehashAttempt}th attempt. Is the capacity exceeded?`, - ); - } - const existingValueWithAddress = this.getWithAddress(pendingKey); if (existingValueWithAddress) { // The key already exists. We only have to overwrite // the corresponding value. const [, address] = existingValueWithAddress; - this.writeEntryAtAddress(pendingKey, pendingValue, address, rehashAttempt > 0); - return; + this.writeEntryAtAddress(pendingKey, pendingValue, address, skipTextureUpdate); + return null; } let seedIndex = Math.floor(Math.random() * this.seeds.length); - while (iterationCounter++ < ITERATION_THRESHOLD) { + while (iterationCounter++ < this.entryCapacity) { const seed = this.seeds[seedIndex]; currentAddress = this._hashKeyToAddress(seed, pendingKey); @@ -159,11 +222,11 @@ export abstract class AbstractCuckooTable { pendingKey, pendingValue, currentAddress, - rehashAttempt > 0, + skipTextureUpdate, ); if (this.canDisplacedEntryBeIgnored(displacedEntry[0], pendingKey)) { - return; + return null; } [pendingKey, pendingValue] = displacedEntry; @@ -172,12 +235,8 @@ export abstract class AbstractCuckooTable { seedIndex = (seedIndex + Math.floor(Math.random() * (this.seeds.length - 1)) + 1) % this.seeds.length; } - this.rehash(rehashAttempt + 1); - this.set(pendingKey, pendingValue, rehashAttempt + 1); - // Since a rehash was performed, the incremental texture updates were - // skipped. Update the entire texture: - this.flushTableToTexture(); + return displacedEntry; } unset(key: K) { @@ -186,7 +245,12 @@ export abstract class AbstractCuckooTable { const value = this.getValueAtAddress(key, hashedAddress); if (value != null) { - this.writeEntryAtAddress(this.getEmptyKey(), this.getEmptyValue(), hashedAddress, false); + this.writeEntryAtAddress( + this.getEmptyKey(), + this.getEmptyValue(), + hashedAddress, + !this.autoTextureUpdate, + ); return; } } @@ -204,25 +268,29 @@ export abstract class AbstractCuckooTable { */ abstract getEmptyValue(): V; - private rehash(rehashAttempt: number): void { - const oldTable = this.table; - + private rehash(oldTable: Uint32Array, skipTextureUpdate: boolean): boolean { + // Theoretically, one could avoid allocating a new table on repeated rehashes, + // but these are likely not a bottleneck. this.initializeTableArray(); for ( let offset = 0; - offset < this.entryCapacity * AbstractCuckooTable.ELEMENTS_PER_ENTRY; - offset += AbstractCuckooTable.ELEMENTS_PER_ENTRY + offset < this.entryCapacity * this.getClass().getElementsPerEntry(); + offset += this.getClass().getElementsPerEntry() ) { if (oldTable[offset] === EMPTY_KEY_VALUE) { continue; } const [key, value] = this.getEntryAtAddress( - offset / AbstractCuckooTable.ELEMENTS_PER_ENTRY, + offset / this.getClass().getElementsPerEntry(), oldTable, ); - this.set(key, value, rehashAttempt); + if (this.internalSet(key, value, skipTextureUpdate) != null) { + // Rehash did not work + return false; + } } + return true; } get(key: K): V | null { @@ -260,20 +328,20 @@ export abstract class AbstractCuckooTable { abstract writeEntryToTable(key: K, value: V, hashedAddress: number): void; - writeEntryAtAddress(key: K, value: V, hashedAddress: number, isRehashing: boolean): Entry { + writeEntryAtAddress(key: K, value: V, hashedAddress: number, skipTextureUpdate: boolean): Entry { const displacedEntry: Entry = this.getEntryAtAddress(hashedAddress); this.writeEntryToTable(key, value, hashedAddress); - if (!isRehashing) { + if (!skipTextureUpdate) { // Only partially update if we are not rehashing. Otherwise, it makes more // sense to flush the entire texture content after the rehashing is done. - const offset = hashedAddress * AbstractCuckooTable.ELEMENTS_PER_ENTRY; - const texelOffset = offset / TEXTURE_CHANNEL_COUNT; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); + const texelOffset = offset / this.getClass().getTextureChannelCount(); this._texture.update( - this.table.subarray(offset, offset + AbstractCuckooTable.ELEMENTS_PER_ENTRY), + this.table.subarray(offset, offset + this.getClass().getElementsPerEntry()), texelOffset % this.textureWidth, Math.floor(texelOffset / this.textureWidth), - AbstractCuckooTable.ELEMENTS_PER_ENTRY / TEXTURE_CHANNEL_COUNT, + this.getClass().getElementsPerEntry() / this.getClass().getTextureChannelCount(), 1, ); } @@ -307,4 +375,23 @@ export abstract class AbstractCuckooTable { } abstract _hashKeyToAddress(seed: number, key: K): number; + + getDiminishedEntryCapacity(): number { + // Important: + // This method is only needed for CuckooTable subclasses that + // use a single 32-bit key. + // We pretend that the entryCapacity has one + // slot less than it actually has. This is a shortcut to + // avoid that a single _hashCombine call in combination with + // a power-of-two-modulo operation does not have good enough + // hash properties. Without this, filling the table up to 90% + // will not work reliably (unit tests well, too). As an + // alternative, one could also use the fmix finalize step by Murmur3, + // but this requires more bit operations on CPU and GPU. + // The downside of this approach is that we waste one slot of the + // hash table. + // Other cuckootable implementations don't need this trick, because + // they call _hashCombine multiple times. + return this.entryCapacity - 1; + } } diff --git a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts new file mode 100644 index 00000000000..d8ceee4bbb2 --- /dev/null +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint32.ts @@ -0,0 +1,87 @@ +import * as THREE from "three"; +import { AbstractCuckooTable, EMPTY_KEY_VALUE } from "./abstract_cuckoo_table"; +import { NumberLike } from "oxalis/store"; + +const EMPTY_KEY = EMPTY_KEY_VALUE; +const EMPTY_VALUE = EMPTY_KEY_VALUE; + +// This module defines a cuckoo table that can map from a 32-bit key to 32-bit value. +type Key = number; +type Value = number; +type Entry = [Key, Value]; + +export class CuckooTableUint32 extends AbstractCuckooTable { + static getElementsPerEntry() { + return 2; + } + static getTextureChannelCount() { + return 2; + } + static getTextureFormat() { + return THREE.RGIntegerFormat; + } + static getInternalFormat(): THREE.PixelFormatGPU { + return "RG32UI"; + } + static fromCapacity(requestedCapacity: number): CuckooTableUint32 { + return new CuckooTableUint32(this.computeTextureWidthFromCapacity(requestedCapacity)); + } + + getEmptyKey(): Key { + return EMPTY_KEY; + } + + getEmptyValue(): Value { + return EMPTY_VALUE; + } + + getEntryAtAddress(hashedAddress: number, optTable?: Uint32Array): Entry { + const table = optTable || this.table; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); + return [table[offset], table[offset + 1]]; + } + + canDisplacedEntryBeIgnored(displacedKey: Key, newKey: Key): boolean { + return ( + // Either, the slot is empty... (the value of EMPTY_KEY is not allowed as a key) + this._areKeysEqual(displacedKey, EMPTY_KEY) || + // or the slot already refers to the key + this._areKeysEqual(displacedKey, newKey) + ); + } + + checkValidKey(key: Key) { + if (this._areKeysEqual(key, EMPTY_KEY)) { + throw new Error(`The key ${EMPTY_KEY} is not allowed for the CuckooTable.`); + } + } + + _areKeysEqual(key1: Key, key2: Key): boolean { + return key1 === key2; + } + + writeEntryToTable(key: Key, value: Value, hashedAddress: number) { + const offset = hashedAddress * this.getClass().getElementsPerEntry(); + this.table[offset] = key; + this.table[offset + 1] = value; + } + + _hashKeyToAddress(seed: number, key: Key): number { + let state = this._hashCombine(seed, key); + return state % this.getDiminishedEntryCapacity(); + } + + setNumberLike(key: NumberLike, value: NumberLike) { + if (typeof key !== "number" || typeof value !== "number") { + throw new Error("Key and Value must be Number."); + } + this.set(key, value); + } + + unsetNumberLike(key: NumberLike) { + if (typeof key !== "number") { + throw new Error("Key must be Number."); + } + this.unset(key); + } +} diff --git a/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts new file mode 100644 index 00000000000..3872820a7c3 --- /dev/null +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_uint64.ts @@ -0,0 +1,81 @@ +import { convertNumberTo64BitTuple } from "libs/utils"; +import { AbstractCuckooTable, EMPTY_KEY_VALUE } from "./abstract_cuckoo_table"; +import { NumberLike } from "oxalis/store"; + +const EMPTY_KEY = [EMPTY_KEY_VALUE, EMPTY_KEY_VALUE] as Value; +const EMPTY_VALUE = [EMPTY_KEY_VALUE, EMPTY_KEY_VALUE] as Value; + +// This module defines a cuckoo table that can map from a 64-bit key to 64-bit value. +// Both key and value are stored as a tuple of: [High-32-Bits, Low-32-Bits] +type Key = [number, number]; +type Value = [number, number]; +type Entry = [Key, Value]; + +export class CuckooTableUint64 extends AbstractCuckooTable { + static fromCapacity(requestedCapacity: number): CuckooTableUint64 { + return new CuckooTableUint64(this.computeTextureWidthFromCapacity(requestedCapacity)); + } + + getEmptyKey(): Key { + return EMPTY_KEY; + } + + getEmptyValue(): Value { + return EMPTY_VALUE; + } + + getEntryAtAddress(hashedAddress: number, optTable?: Uint32Array): Entry { + const table = optTable || this.table; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); + return [ + [table[offset], table[offset + 1]], + [table[offset + 2], table[offset + 3]], + ]; + } + + canDisplacedEntryBeIgnored(displacedKey: Key, newKey: Key): boolean { + return ( + // Either, the slot is empty... (the value of EMPTY_KEY is not allowed as a key) + this._areKeysEqual(displacedKey, EMPTY_KEY) || + // or the slot already refers to the key + this._areKeysEqual(displacedKey, newKey) + ); + } + + checkValidKey(key: Key) { + if (this._areKeysEqual(key, EMPTY_KEY)) { + throw new Error(`The key ${EMPTY_KEY} is not allowed for the CuckooTable.`); + } + } + + _areKeysEqual(key1: Key, key2: Key): boolean { + return key1[0] === key2[0] && key1[1] === key2[1]; + } + + writeEntryToTable(key: Key, value: Value, hashedAddress: number) { + const offset = hashedAddress * this.getClass().getElementsPerEntry(); + this.table[offset] = key[0]; + this.table[offset + 1] = key[1]; + this.table[offset + 2] = value[0]; + this.table[offset + 3] = value[1]; + } + + _hashKeyToAddress(seed: number, key: Key): number { + let state = this._hashCombine(seed, key[0]); + state = this._hashCombine(state, key[1]); + + return state % this.entryCapacity; + } + + setNumberLike(key: NumberLike, value: NumberLike) { + const keyTuple = convertNumberTo64BitTuple(key); + const valueTuple = convertNumberTo64BitTuple(value); + + this.set(keyTuple, valueTuple); + } + + unsetNumberLike(key: NumberLike) { + const keyTuple = convertNumberTo64BitTuple(key); + this.unset(keyTuple); + } +} diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_vec3.ts similarity index 67% rename from frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table.ts rename to frontend/javascripts/libs/cuckoo/cuckoo_table_vec3.ts index 7ed668024a4..edb29db27c3 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table.ts +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_vec3.ts @@ -8,9 +8,9 @@ type Key = number; type Value = Vector3; type Entry = [Key, Value]; -export class CuckooTable extends AbstractCuckooTable { - static fromCapacity(requestedCapacity: number): CuckooTable { - return new CuckooTable(this.computeTextureWidthFromCapacity(requestedCapacity)); +export class CuckooTableVec3 extends AbstractCuckooTable { + static fromCapacity(requestedCapacity: number): CuckooTableVec3 { + return new CuckooTableVec3(this.computeTextureWidthFromCapacity(requestedCapacity)); } getEmptyKey(): Key { @@ -23,21 +23,21 @@ export class CuckooTable extends AbstractCuckooTable { getEntryAtAddress(hashedAddress: number, optTable?: Uint32Array): Entry { const table = optTable || this.table; - const offset = hashedAddress * AbstractCuckooTable.ELEMENTS_PER_ENTRY; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); return [table[offset], [table[offset + 1], table[offset + 2], table[offset + 3]]]; } canDisplacedEntryBeIgnored(displacedKey: Key, newKey: Key): boolean { return ( // Either, the slot is empty... (the value of EMPTY_KEY is not allowed as a key) - displacedKey === EMPTY_KEY || + this._areKeysEqual(displacedKey, EMPTY_KEY) || // or the slot already refers to the key - displacedKey === newKey + this._areKeysEqual(displacedKey, newKey) ); } checkValidKey(key: Key) { - if (key === EMPTY_KEY) { + if (this._areKeysEqual(key, EMPTY_KEY)) { throw new Error(`The key ${EMPTY_KEY} is not allowed for the CuckooTable.`); } } @@ -47,16 +47,15 @@ export class CuckooTable extends AbstractCuckooTable { } writeEntryToTable(key: Key, value: Value, hashedAddress: number) { - const offset = hashedAddress * AbstractCuckooTable.ELEMENTS_PER_ENTRY; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); this.table[offset] = key; this.table[offset + 1] = value[0]; this.table[offset + 2] = value[1]; this.table[offset + 3] = value[2]; } - _hashKeyToAddress(seed: number, key: number): number { + _hashKeyToAddress(seed: number, key: Key): number { const state = this._hashCombine(seed, key); - - return state % this.entryCapacity; + return state % this.getDiminishedEntryCapacity(); } } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table_vec5.ts b/frontend/javascripts/libs/cuckoo/cuckoo_table_vec5.ts similarity index 92% rename from frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table_vec5.ts rename to frontend/javascripts/libs/cuckoo/cuckoo_table_vec5.ts index 15518d1adfb..eae502b58f7 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/cuckoo_table_vec5.ts +++ b/frontend/javascripts/libs/cuckoo/cuckoo_table_vec5.ts @@ -23,7 +23,7 @@ type Entry = [Key, Value]; - x, y and z are constrained to be smaller than ~4.29 billion each - 32 different mags are supported per layer - 64 layers are supported - - ~2 million different bucket can be addressed on the GPU. + - ~2 million different buckets can be addressed on the GPU. */ type CompressedEntry = Vector4; @@ -56,7 +56,7 @@ export class CuckooTableVec5 extends AbstractCuckooTable { } getEntryAtAddress(hashedAddress: number, optTable?: Uint32Array): Entry { - const offset = hashedAddress * AbstractCuckooTable.ELEMENTS_PER_ENTRY; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); return this.readDecompressedEntry(offset, optTable); } @@ -81,7 +81,7 @@ export class CuckooTableVec5 extends AbstractCuckooTable { readDecompressedEntry(offset: number, optTable?: Uint32Array) { const table = optTable || this.table; return this.decompressEntry( - table.slice(offset, offset + AbstractCuckooTable.ELEMENTS_PER_ENTRY) as unknown as Vector4, + table.slice(offset, offset + this.getClass().getElementsPerEntry()) as unknown as Vector4, ); } @@ -108,7 +108,7 @@ export class CuckooTableVec5 extends AbstractCuckooTable { writeEntryToTable(key: Key, value: Value, hashedAddress: number) { const compressedEntry = this.compressEntry(key, value); - const offset = hashedAddress * AbstractCuckooTable.ELEMENTS_PER_ENTRY; + const offset = hashedAddress * this.getClass().getElementsPerEntry(); for (let i = 0; i < compressedEntry.length; i++) { this.table[offset + i] = compressedEntry[i]; } diff --git a/frontend/javascripts/libs/request.ts b/frontend/javascripts/libs/request.ts index 0aef5b80a4e..b5dd852089e 100644 --- a/frontend/javascripts/libs/request.ts +++ b/frontend/javascripts/libs/request.ts @@ -16,6 +16,7 @@ const compress = createWorker(CompressWorker); type method = "GET" | "POST" | "DELETE" | "HEAD" | "OPTIONS" | "PUT" | "PATCH"; export type RequestOptionsBase = { + body?: ReadableStream | Blob | BufferSource | FormData | URLSearchParams | string; compress?: boolean; doNotInvestigate?: boolean; extractHeaders?: boolean; diff --git a/frontend/javascripts/libs/utils.ts b/frontend/javascripts/libs/utils.ts index 1daec09389e..5048a9670a2 100644 --- a/frontend/javascripts/libs/utils.ts +++ b/frontend/javascripts/libs/utils.ts @@ -3,7 +3,7 @@ import _ from "lodash"; // @ts-expect-error ts-migrate(7016) FIXME: Could not find a declaration file for module 'java... Remove this comment to see the full error message import naturalSort from "javascript-natural-sort"; import type { APIDataset, APIUser } from "types/api_flow_types"; -import type { BoundingBoxObject } from "oxalis/store"; +import type { BoundingBoxObject, NumberLike } from "oxalis/store"; import type { Vector3, Vector4, @@ -118,6 +118,18 @@ export function unique(array: Array): Array { return [...new Set(array)]; } +export function union(iterables: Array>): Set { + const set: Set = new Set(); + + for (const iterable of iterables) { + for (const item of iterable) { + set.add(item); + } + } + + return set; +} + export function enforce(fn: (arg0: A) => B): (arg0: A | null | undefined) => B { return (nullableA: A | null | undefined) => { if (nullableA == null) { @@ -625,6 +637,26 @@ export function diffArrays( }; } +export function diffMaps( + stateA: Map, + stateB: Map, +): { + changed: Iterable; + onlyA: Iterable; + onlyB: Iterable; +} { + const keysOfA = Array.from(stateA.keys()); + const keysOfB = Array.from(stateB.keys()); + const changed = keysOfA.filter((x) => stateB.has(x) && stateB.get(x) !== stateA.get(x)); + const onlyA = keysOfA.filter((x) => !stateB.has(x)); + const onlyB = keysOfB.filter((x) => !stateA.has(x)); + return { + changed, + onlyA, + onlyB, + }; +} + export function withoutValues(arr: Array, elements: Array): Array { // This set-based implementation avoids stackoverflow errors from which // _.without(arr, ...elements) suffers. @@ -874,12 +906,18 @@ export function castForArrayType(uncastNumber: number, data: TypedArray): number return data instanceof BigUint64Array ? BigInt(uncastNumber) : uncastNumber; } -export function convertNumberTo64Bit(num: number | null): [Vector4, Vector4] { +export function convertNumberTo64Bit(num: number | bigint | null): [Vector4, Vector4] { + const [bigNumHigh, bigNumLow] = convertNumberTo64BitTuple(num); + + const low = convertDecToBase256(bigNumLow); + const high = convertDecToBase256(bigNumHigh); + + return [high, low]; +} + +export function convertNumberTo64BitTuple(num: number | bigint | null): [number, number] { if (num == null || Number.isNaN(num)) { - return [ - [0, 0, 0, 0], - [0, 0, 0, 0], - ]; + return [0, 0]; } // Cast to BigInt as bit-wise operations only work with 32 bits, // even though Number uses 53 bits. @@ -888,10 +926,7 @@ export function convertNumberTo64Bit(num: number | null): [Vector4, Vector4] { const bigNumLow = Number((2n ** 32n - 1n) & bigNum); const bigNumHigh = Number(bigNum >> 32n); - const low = convertDecToBase256(bigNumLow); - const high = convertDecToBase256(bigNumHigh); - - return [high, low]; + return [bigNumHigh, bigNumLow]; } export async function promiseAllWithErrors(promises: Array>): Promise<{ @@ -1080,6 +1115,44 @@ export function diffObjects( return changes(object, base); } +export function fastDiffSetAndMap(setA: Set, mapB: Map) { + /* + * This function was designed for a special use case within the mapping saga, + * where a Set of (potentially new) segment IDs is passed for setA and a known mapping from + * id->id is passed for mapB. + * The function computes: + * - aWithoutB: segment IDs that are in setA but not in mapB.keys() + * - bWithoutA: segment IDs that are in mapB.keys() but not in setA + * - intersection: a Map only contains keys that are in both setA and mapB.keys() (the values are used from mapB). + */ + const aWithoutB = new Set(); + const bWithoutA = new Set(); + // This function assumes that the returned intersection is relatively large which is common + // for the use case it was designed for. Under this assumption, mapB is simply copied to + // initialize the intersection. Afterwards, items that are not within setA are removed from + // the intersection. + const intersection = new Map(mapB); + + for (const item of setA) { + if (!mapB.has(item)) { + aWithoutB.add(item); + } + } + + for (const item of mapB.keys()) { + if (!setA.has(item)) { + bWithoutA.add(item); + intersection.delete(item); + } + } + + return { + aWithoutB: aWithoutB, + bWithoutA: bWithoutA, + intersection: intersection, + }; +} + export function areVec3AlmostEqual(a: Vector3, b: Vector3, epsilon: number = 1e-6): boolean { return _.every(a.map((v, i) => Math.abs(v - b[i]) < epsilon)); } @@ -1168,6 +1241,15 @@ export function notEmpty(value: TValue | null | undefined): value is TVa return value !== null && value !== undefined; } +export function isNumberMap(x: Map): x is Map { + const { value } = x.entries().next(); + return value && typeof value[0] === "number"; +} + +export function isBigInt(x: NumberLike): x is bigint { + return typeof x === "bigint"; +} + export function assertNever(value: never): never { throw new Error(`Unexpected value that is not 'never': ${JSON.stringify(value)}`); } diff --git a/frontend/javascripts/oxalis/api/api_latest.ts b/frontend/javascripts/oxalis/api/api_latest.ts index c5cfe2712ce..290676b8eeb 100644 --- a/frontend/javascripts/oxalis/api/api_latest.ts +++ b/frontend/javascripts/oxalis/api/api_latest.ts @@ -1524,7 +1524,7 @@ class DataApi { const mappingProperties = { mapping: mapping instanceof Map - ? new Map(mapping) + ? (new Map(mapping as Map) as Mapping) : new Map(Object.entries(mapping).map(([key, value]) => [parseInt(key, 10), value])), mappingColors, hideUnmappedIds, diff --git a/frontend/javascripts/oxalis/api/api_v2.ts b/frontend/javascripts/oxalis/api/api_v2.ts index fa3640c87e0..2690c18884c 100644 --- a/frontend/javascripts/oxalis/api/api_v2.ts +++ b/frontend/javascripts/oxalis/api/api_v2.ts @@ -569,7 +569,7 @@ class DataApi { const mappingProperties = { mapping: mapping instanceof Map - ? new Map(mapping) + ? (new Map(mapping as Map) as Mapping) : new Map(Object.entries(mapping).map(([key, value]) => [parseInt(key, 10), value])), }; Store.dispatch(setMappingAction(layerName, "", "JSON", mappingProperties)); diff --git a/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts b/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts index 5c841b352c2..01eeab430fe 100644 --- a/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts +++ b/frontend/javascripts/oxalis/controller/segment_mesh_controller.ts @@ -388,8 +388,10 @@ export default class SegmentMeshController { child.material.opacity = targetOpacity; } }); + const isNotProofreadingMode = Store.getState().uiInformation.activeTool !== "PROOFREAD"; + const changeMaterial = (fn: (material: MeshMaterial) => void) => { - if (mesh.isMerged) { + if (mesh.isMerged || isNotProofreadingMode) { // Update the material for all meshes that belong to the current // segment ID. parent.traverse((child) => { @@ -411,7 +413,7 @@ export default class SegmentMeshController { const newColor: readonly [number, number, number] = mesh.isHovered ? HOVERED_COLOR : ACTIVATED_COLOR; - material.color.setHSL(...newColor); + material.color = new THREE.Color().setHSL(...newColor); material.opacity = 1.0; material.emissive.setHSL(...HOVERED_COLOR); }); diff --git a/frontend/javascripts/oxalis/default_state.ts b/frontend/javascripts/oxalis/default_state.ts index 8158cfde034..6e16e0bf3c8 100644 --- a/frontend/javascripts/oxalis/default_state.ts +++ b/frontend/javascripts/oxalis/default_state.ts @@ -51,6 +51,7 @@ const defaultState: OxalisState = { userConfiguration: { autoSaveLayouts: true, autoRenderMeshInProofreading: true, + selectiveVisibilityInProofreading: true, brushSize: 50, clippingDistance: 50, clippingDistanceArbitrary: 64, @@ -100,6 +101,7 @@ const defaultState: OxalisState = { controlMode: ControlModeEnum.VIEW, mousePosition: null, hoveredSegmentId: 0, + hoveredUnmappedSegmentId: 0, activeMappingByLayer: {}, isMergerModeEnabled: false, gpuSetup: { diff --git a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts index ec7f3692811..acc90cedd07 100644 --- a/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts +++ b/frontend/javascripts/oxalis/geometries/materials/plane_material_factory.ts @@ -14,6 +14,8 @@ import { getActiveCellId, getActiveSegmentationTracing, getActiveSegmentPosition, + getBucketRetrievalSourceFn, + needsLocalHdf5Mapping, } from "oxalis/model/accessors/volumetracing_accessor"; import { getPackingDegree } from "oxalis/model/bucket_data_handling/data_rendering_logic"; import { @@ -47,7 +49,7 @@ import app from "app"; import getMainFragmentShader, { getMainVertexShader } from "oxalis/shaders/main_data_shaders.glsl"; import shaderEditor from "oxalis/model/helpers/shader_editor"; import type { ElementClass } from "types/api_flow_types"; -import { CuckooTable } from "oxalis/model/bucket_data_handling/cuckoo_table"; +import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; import { getGlobalLayerIndexForLayerName } from "oxalis/model/bucket_data_handling/layer_rendering_manager"; import { V3 } from "libs/mjs"; import TPS3D from "libs/thin_plate_spline"; @@ -112,7 +114,8 @@ class PlaneMaterialFactory { leastRecentlyVisibleLayers: Array<{ name: string; isSegmentationLayer: boolean }>; oldFragmentShaderCode: string | null | undefined; oldVertexShaderCode: string | null | undefined; - unsubscribeSeedsFn: (() => void) | null = null; + unsubscribeColorSeedsFn: (() => void) | null = null; + unsubscribeMappingSeedsFn: (() => void) | null = null; scaledTpsInvPerLayer: Record = {}; @@ -140,6 +143,9 @@ class PlaneMaterialFactory { sphericalCapRadius: { value: 140, }, + selectiveVisibilityInProofreading: { + value: true, + }, is3DViewBeingRendered: { value: true, }, @@ -155,11 +161,11 @@ class PlaneMaterialFactory { viewportExtent: { value: [0, 0], }, - isMappingEnabled: { + shouldApplyMappingOnGPU: { value: false, }, - mappingSize: { - value: 0, + mappingIsPartial: { + value: false, }, hideUnmappedIds: { value: false, @@ -213,6 +219,12 @@ class PlaneMaterialFactory { hoveredSegmentIdLow: { value: new THREE.Vector4(0, 0, 0, 0), }, + hoveredUnmappedSegmentIdHigh: { + value: new THREE.Vector4(0, 0, 0, 0), + }, + hoveredUnmappedSegmentIdLow: { + value: new THREE.Vector4(0, 0, 0, 0), + }, // The same is done for the active cell id. activeCellIdHigh: { value: new THREE.Vector4(0, 0, 0, 0), @@ -309,7 +321,7 @@ class PlaneMaterialFactory { value: sharedLookUpTexture, }; - this.unsubscribeSeedsFn = sharedLookUpCuckooTable.subscribeToSeeds((seeds: number[]) => { + this.unsubscribeColorSeedsFn = sharedLookUpCuckooTable.subscribeToSeeds((seeds: number[]) => { this.uniforms.lookup_seeds = { value: seeds, }; @@ -331,18 +343,41 @@ class PlaneMaterialFactory { attachSegmentationMappingTextures(): void { const segmentationLayer = Model.getSegmentationLayerWithMappingSupport(); - const [mappingTexture, mappingLookupTexture] = - segmentationLayer?.mappings != null - ? segmentationLayer.mappings.getMappingTextures() // It's important to set up the uniforms (even when they are null), since later - : // additions to `this.uniforms` won't be properly attached otherwise. - [null, null, null]; + const cuckoo = + segmentationLayer?.mappings != null ? segmentationLayer.mappings.getCuckooTable() : null; + // It's important to set up the uniforms, since later additions to + // `this.uniforms` won't be properly attached otherwise. this.uniforms.segmentation_mapping_texture = { - value: mappingTexture, + value: cuckoo?.getTexture() || CuckooTableVec3.getNullTexture(), }; - this.uniforms.segmentation_mapping_lookup_texture = { - value: mappingLookupTexture, + this.uniforms.mapping_seeds = { value: [0, 0, 0] }; + this.uniforms.is_mapping_64bit = { + value: segmentationLayer?.mappings?.is64Bit() || false, }; + + this.unsubscribeMappingSeedsFn?.(); + + if (cuckoo) { + this.unsubscribeMappingSeedsFn = cuckoo.subscribeToSeeds((seeds: number[]) => { + this.uniforms.mapping_seeds = { value: seeds }; + }); + const { + CUCKOO_ENTRY_CAPACITY, + CUCKOO_ELEMENTS_PER_ENTRY, + CUCKOO_ELEMENTS_PER_TEXEL, + CUCKOO_TWIDTH, + } = cuckoo.getUniformValues(); + this.uniforms.MAPPING_CUCKOO_ENTRY_CAPACITY = { value: CUCKOO_ENTRY_CAPACITY }; + this.uniforms.MAPPING_CUCKOO_ELEMENTS_PER_ENTRY = { value: CUCKOO_ELEMENTS_PER_ENTRY }; + this.uniforms.MAPPING_CUCKOO_ELEMENTS_PER_TEXEL = { value: CUCKOO_ELEMENTS_PER_TEXEL }; + this.uniforms.MAPPING_CUCKOO_TWIDTH = { value: CUCKOO_TWIDTH }; + } else { + this.uniforms.MAPPING_CUCKOO_ENTRY_CAPACITY = { value: 0 }; + this.uniforms.MAPPING_CUCKOO_ELEMENTS_PER_ENTRY = { value: 0 }; + this.uniforms.MAPPING_CUCKOO_ELEMENTS_PER_TEXEL = { value: 0 }; + this.uniforms.MAPPING_CUCKOO_TWIDTH = { value: 0 }; + } } attachSegmentationColorTexture(): void { @@ -350,20 +385,20 @@ class PlaneMaterialFactory { if (segmentationLayer == null) { this.uniforms.custom_color_seeds = { value: [0, 0, 0] }; - this.uniforms.CUCKOO_ENTRY_CAPACITY = { value: 0 }; - this.uniforms.CUCKOO_ELEMENTS_PER_ENTRY = { value: 0 }; - this.uniforms.CUCKOO_ELEMENTS_PER_TEXEL = { value: 0 }; - this.uniforms.CUCKOO_TWIDTH = { value: 0 }; - this.uniforms.custom_color_texture = { value: CuckooTable.getNullTexture() }; + this.uniforms.COLOR_CUCKOO_ENTRY_CAPACITY = { value: 0 }; + this.uniforms.COLOR_CUCKOO_ELEMENTS_PER_ENTRY = { value: 0 }; + this.uniforms.COLOR_CUCKOO_ELEMENTS_PER_TEXEL = { value: 0 }; + this.uniforms.COLOR_CUCKOO_TWIDTH = { value: 0 }; + this.uniforms.custom_color_texture = { value: CuckooTableVec3.getNullTexture() }; return; } const cuckoo = segmentationLayer.layerRenderingManager.getCustomColorCuckooTable(); const customColorTexture = cuckoo.getTexture(); - if (this.unsubscribeSeedsFn != null) { - this.unsubscribeSeedsFn(); + if (this.unsubscribeColorSeedsFn != null) { + this.unsubscribeColorSeedsFn(); } - this.unsubscribeSeedsFn = cuckoo.subscribeToSeeds((seeds: number[]) => { + this.unsubscribeColorSeedsFn = cuckoo.subscribeToSeeds((seeds: number[]) => { this.uniforms.custom_color_seeds = { value: seeds }; }); const { @@ -372,10 +407,10 @@ class PlaneMaterialFactory { CUCKOO_ELEMENTS_PER_TEXEL, CUCKOO_TWIDTH, } = cuckoo.getUniformValues(); - this.uniforms.CUCKOO_ENTRY_CAPACITY = { value: CUCKOO_ENTRY_CAPACITY }; - this.uniforms.CUCKOO_ELEMENTS_PER_ENTRY = { value: CUCKOO_ELEMENTS_PER_ENTRY }; - this.uniforms.CUCKOO_ELEMENTS_PER_TEXEL = { value: CUCKOO_ELEMENTS_PER_TEXEL }; - this.uniforms.CUCKOO_TWIDTH = { value: CUCKOO_TWIDTH }; + this.uniforms.COLOR_CUCKOO_ENTRY_CAPACITY = { value: CUCKOO_ENTRY_CAPACITY }; + this.uniforms.COLOR_CUCKOO_ELEMENTS_PER_ENTRY = { value: CUCKOO_ELEMENTS_PER_ENTRY }; + this.uniforms.COLOR_CUCKOO_ELEMENTS_PER_TEXEL = { value: CUCKOO_ELEMENTS_PER_TEXEL }; + this.uniforms.COLOR_CUCKOO_TWIDTH = { value: CUCKOO_TWIDTH }; this.uniforms.custom_color_texture = { value: customColorTexture, }; @@ -516,7 +551,15 @@ class PlaneMaterialFactory { true, ), ); - + this.storePropertyUnsubscribers.push( + listenToStoreProperty( + (storeState) => storeState.userConfiguration.selectiveVisibilityInProofreading, + (selectiveVisibilityInProofreading) => { + this.uniforms.selectiveVisibilityInProofreading.value = selectiveVisibilityInProofreading; + }, + true, + ), + ); this.storePropertyUnsubscribers.push( listenToStoreProperty( (storeState) => getResolutionInfoByLayer(storeState.dataset), @@ -553,15 +596,6 @@ class PlaneMaterialFactory { true, ), ); - this.storePropertyUnsubscribers.push( - listenToStoreProperty( - (storeState) => getMappingInfoForSupportedLayer(storeState).mappingSize, - (mappingSize) => { - this.uniforms.mappingSize.value = mappingSize; - }, - true, - ), - ); this.storePropertyUnsubscribers.push( listenToStoreProperty( (storeState) => getMappingInfoForSupportedLayer(storeState).hideUnmappedIds, @@ -739,8 +773,21 @@ class PlaneMaterialFactory { ); this.storePropertyUnsubscribers.push( listenToStoreProperty( - (storeState) => - Utils.maybe(getActiveCellId)(getActiveSegmentationTracing(storeState)).getOrElse(0), + (storeState) => storeState.temporaryConfiguration.hoveredUnmappedSegmentId, + (hoveredUnmappedSegmentId) => { + const [high, low] = Utils.convertNumberTo64Bit(hoveredUnmappedSegmentId); + + this.uniforms.hoveredUnmappedSegmentIdLow.value.set(...low); + this.uniforms.hoveredUnmappedSegmentIdHigh.value.set(...high); + }, + ), + ); + this.storePropertyUnsubscribers.push( + listenToStoreProperty( + (storeState) => { + const activeSegmentationTracing = getActiveSegmentationTracing(storeState); + return activeSegmentationTracing ? getActiveCellId(activeSegmentationTracing) : 0; + }, () => this.updateActiveCellId(), true, ), @@ -768,15 +815,42 @@ class PlaneMaterialFactory { ); this.storePropertyUnsubscribers.push( listenToStoreProperty( - (storeState) => - getMappingInfoForSupportedLayer(storeState).mappingStatus === - MappingStatusEnum.ENABLED && // The shader should only know about the mapping when a JSON mapping exists - getMappingInfoForSupportedLayer(storeState).mappingType === "JSON", - (isEnabled) => { - this.uniforms.isMappingEnabled.value = isEnabled; + (storeState) => { + const layer = getSegmentationLayerWithMappingSupport(storeState); + if (!layer) { + return false; + } + + return ( + getMappingInfoForSupportedLayer(storeState).mappingStatus === + MappingStatusEnum.ENABLED && + _.isEqual(getBucketRetrievalSourceFn(layer.name)(storeState).slice(0, 2), [ + "REQUESTED-WITHOUT-MAPPING", + "LOCAL-MAPPING-APPLIED", + ]) + ); + }, + (shouldApplyMappingOnGPU) => { + this.uniforms.shouldApplyMappingOnGPU.value = shouldApplyMappingOnGPU; }, ), ); + this.storePropertyUnsubscribers.push( + listenToStoreProperty( + (storeState) => { + const layer = getSegmentationLayerWithMappingSupport(storeState); + if (!layer) { + return false; + } + + return needsLocalHdf5Mapping(storeState, layer.name); + }, + (mappingIsPartial) => { + this.uniforms.mappingIsPartial.value = mappingIsPartial; + }, + ), + ); + this.storePropertyUnsubscribers.push( listenToStoreProperty( (storeState) => storeState.uiInformation.activeTool, @@ -841,18 +915,14 @@ class PlaneMaterialFactory { } updateActiveCellId() { - const activeCellId = Utils.maybe(getActiveCellId)( - getActiveSegmentationTracing(Store.getState()), - ).getOrElse(0); - const segmentationLayer = Model.getVisibleSegmentationLayer(); + const activeSegmentationTracing = getActiveSegmentationTracing(Store.getState()); + const activeCellId = activeSegmentationTracing ? getActiveCellId(activeSegmentationTracing) : 0; - if (segmentationLayer == null) { + if (activeSegmentationTracing == null) { return; } - const mappedActiveCellId = segmentationLayer.cube.mapId(activeCellId); - - const [high, low] = Utils.convertNumberTo64Bit(mappedActiveCellId); + const [high, low] = Utils.convertNumberTo64Bit(activeCellId); this.uniforms.activeCellIdLow.value.set(...low); this.uniforms.activeCellIdHigh.value.set(...high); diff --git a/frontend/javascripts/oxalis/model.ts b/frontend/javascripts/oxalis/model.ts index 76d69797b96..3b52a41585d 100644 --- a/frontend/javascripts/oxalis/model.ts +++ b/frontend/javascripts/oxalis/model.ts @@ -227,6 +227,9 @@ export class OxalisModel { const additionalCoordinates = Store.getState().flycam.additionalCoordinates; const id = cube.getDataValue(pos, additionalCoordinates, null, usableZoomStep); return { + // Note that this id can be an unmapped id even when + // a mapping is active, if it is a HDF5 mapping that is partially loaded + // and no entry exists yet for the input id. id: cube.mapId(id), unmappedId: id, }; diff --git a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts index 83f845e5db7..46f0df76b90 100644 --- a/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/dataset_accessor.ts @@ -668,7 +668,6 @@ const dummyMapping = { mappingColors: null, hideUnmappedIds: false, mappingStatus: MappingStatusEnum.DISABLED, - mappingSize: 0, mappingType: "JSON", } as const; diff --git a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts index 0ef13d54387..ad999fbf349 100644 --- a/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/skeletontracing_accessor.ts @@ -16,6 +16,7 @@ import type { TreeGroupTypeFlat, Node, OxalisState, + NumberLike, } from "oxalis/store"; import { findGroup, @@ -270,7 +271,7 @@ export function getTreeGroupsMap( } // This is the pattern for the automatically assigned names for agglomerate skeletons export const getTreeNameForAgglomerateSkeleton = ( - agglomerateId: number, + agglomerateId: NumberLike, mappingName: string, ): string => `agglomerate ${agglomerateId} (${mappingName})`; diff --git a/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts b/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts index 9c700dd91e6..e2a5022aaa3 100644 --- a/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/tool_accessor.ts @@ -21,6 +21,7 @@ import { PricingPlanEnum, } from "admin/organization/pricing_plan_utils"; import { isSkeletonLayerTransformed } from "./skeletontracing_accessor"; +import { reuseInstanceOnEquality } from "./accessor_helpers"; const zoomInToUseToolMessage = "Please zoom in further to use this tool. If you want to edit volume data on this zoom level, create an annotation with restricted resolutions from the extended annotation menu in the dashboard."; @@ -289,7 +290,7 @@ function getDisabledVolumeInfo(state: OxalisState) { visibleSegmentationLayer != null && visibleSegmentationLayer.name === segmentationTracingLayer.tracingId; const isEditableMappingActive = - segmentationTracingLayer != null && !!segmentationTracingLayer.mappingIsEditable; + segmentationTracingLayer != null && !!segmentationTracingLayer.hasEditableMapping; const isJSONMappingActive = segmentationTracingLayer != null && @@ -307,7 +308,7 @@ function getDisabledVolumeInfo(state: OxalisState) { isSegmentationTracingTransformed; const isUneditableMappingLocked = - (segmentationTracingLayer?.mappingIsLocked && !segmentationTracingLayer?.mappingIsEditable) ?? + (segmentationTracingLayer?.mappingIsLocked && !segmentationTracingLayer?.hasEditableMapping) ?? false; return isVolumeDisabled || isEditableMappingActive @@ -336,19 +337,19 @@ function getDisabledVolumeInfo(state: OxalisState) { } const getVolumeDisabledWhenVolumeIsEnabled = memoizeOne(_getVolumeDisabledWhenVolumeIsEnabled); -export function getDisabledInfoForTools( - state: OxalisState, -): Record { - const hasSkeleton = state.tracing.skeleton != null; - const skeletonToolInfo = getSkeletonToolInfo(hasSkeleton, isSkeletonLayerTransformed(state)); +export const getDisabledInfoForTools = reuseInstanceOnEquality( + (state: OxalisState): Record => { + const hasSkeleton = state.tracing.skeleton != null; + const skeletonToolInfo = getSkeletonToolInfo(hasSkeleton, isSkeletonLayerTransformed(state)); - const disabledVolumeInfo = getDisabledVolumeInfo(state); - return { - ...ALWAYS_ENABLED_TOOL_INFOS, - ...skeletonToolInfo, - ...disabledVolumeInfo, - }; -} + const disabledVolumeInfo = getDisabledVolumeInfo(state); + return { + ...ALWAYS_ENABLED_TOOL_INFOS, + ...skeletonToolInfo, + ...disabledVolumeInfo, + }; + }, +); export function adaptActiveToolToShortcuts( activeTool: AnnotationTool, diff --git a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts index 4f052daad31..fad69f7d94f 100644 --- a/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts +++ b/frontend/javascripts/oxalis/model/accessors/volumetracing_accessor.ts @@ -48,6 +48,7 @@ import messages from "messages"; import { MISSING_GROUP_ID } from "oxalis/view/right-border-tabs/tree_hierarchy_view_helpers"; import { Store } from "oxalis/singletons"; import { setSelectedSegmentsOrGroupAction } from "../actions/volumetracing_actions"; +import _ from "lodash"; export function getVolumeTracings(tracing: Tracing): Array { return tracing.volumes; @@ -522,9 +523,9 @@ export const getRenderableResolutionForActiveSegmentationTracing = reuseInstance export function getMappingInfoForVolumeTracing( state: OxalisState, - tracingId: string | null | undefined, + tracingIdOrLayerName: string | null | undefined, ): ActiveMappingInfo { - return getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingId); + return getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, tracingIdOrLayerName); } function getVolumeTracingForLayerName( @@ -554,7 +555,7 @@ export function hasEditableMapping( if (volumeTracing == null) return false; - return !!volumeTracing.mappingIsEditable; + return !!volumeTracing.hasEditableMapping; } export function isMappingLocked( @@ -783,6 +784,52 @@ export function getMeshInfoForSegment( return meshesForAddCoords[segmentId]; } +export function needsLocalHdf5Mapping(state: OxalisState, layerName: string) { + const volumeTracing = getVolumeTracingByLayerName(state.tracing, layerName); + if (volumeTracing == null) { + return false; + } + + return ( + // An annotation that has an editable mapping is likely proofread a lot. + // Switching between tools should not require a reload which is why + // needsLocalHdf5Mapping() will always return true in that case. + volumeTracing.hasEditableMapping || + state.uiInformation.activeTool === AnnotationToolEnum.PROOFREAD + ); +} + +export type BucketRetrievalSource = + | ["REQUESTED-WITHOUT-MAPPING", "NO-LOCAL-MAPPING-APPLIED"] + | ["REQUESTED-WITHOUT-MAPPING", "LOCAL-MAPPING-APPLIED", string] + | ["REQUESTED-WITH-MAPPING", string]; + +export const getBucketRetrievalSourceFn = + // The function that is passed to memoize will only be executed once + // per layerName. This is important since the function uses reuseInstanceOnEquality + // to create a function that ensures that identical BucketRetrievalSource tuples will be re-used between + // consecutive calls. + _.memoize((layerName: string) => + reuseInstanceOnEquality((state: OxalisState): BucketRetrievalSource => { + const usesLocalHdf5Mapping = needsLocalHdf5Mapping(state, layerName); + + const mappingInfo = getMappingInfoForVolumeTracing(state, layerName); + + if ( + mappingInfo.mappingStatus === MappingStatusEnum.DISABLED || + mappingInfo.mappingName == null + ) { + return ["REQUESTED-WITHOUT-MAPPING", "NO-LOCAL-MAPPING-APPLIED"]; + } + + if (usesLocalHdf5Mapping || mappingInfo.mappingType === "JSON") { + return ["REQUESTED-WITHOUT-MAPPING", "LOCAL-MAPPING-APPLIED", mappingInfo.mappingName]; + } + + return ["REQUESTED-WITH-MAPPING", mappingInfo.mappingName]; + }), + ); + export function getReadableNameOfVolumeLayer( layer: APIDataLayer, tracing: HybridTracing, diff --git a/frontend/javascripts/oxalis/model/actions/proofread_actions.ts b/frontend/javascripts/oxalis/model/actions/proofread_actions.ts index cd57afad0aa..31ac923ae7d 100644 --- a/frontend/javascripts/oxalis/model/actions/proofread_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/proofread_actions.ts @@ -50,7 +50,7 @@ export const proofreadMerge = ( export const minCutAgglomerateAction = (sourceNodeId: number, targetNodeId: number) => ({ - type: "MIN_CUT_AGGLOMERATE", + type: "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS", sourceNodeId, targetNodeId, }) as const; @@ -61,7 +61,7 @@ export const minCutAgglomerateWithPositionAction = ( agglomerateId?: number | null, ) => ({ - type: "MIN_CUT_AGGLOMERATE_WITH_POSITION", + type: "MIN_CUT_AGGLOMERATE", position, segmentId, agglomerateId, diff --git a/frontend/javascripts/oxalis/model/actions/settings_actions.ts b/frontend/javascripts/oxalis/model/actions/settings_actions.ts index 1255b3930b4..4f1f2b9b78b 100644 --- a/frontend/javascripts/oxalis/model/actions/settings_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/settings_actions.ts @@ -24,6 +24,10 @@ type SetFlightmodeRecordingAction = ReturnType; type InitializeGpuSetupAction = ReturnType; export type SetMappingEnabledAction = ReturnType; +export type FinishMappingInitializationAction = ReturnType< + typeof finishMappingInitializationAction +>; +export type ClearMappingAction = ReturnType; export type SetMappingAction = ReturnType; export type SetMappingNameAction = ReturnType; type SetHideUnmappedIdsAction = ReturnType; @@ -39,6 +43,8 @@ export type SettingAction = | SetFlightmodeRecordingAction | SetControlModeAction | SetMappingEnabledAction + | FinishMappingInitializationAction + | ClearMappingAction | SetMappingAction | SetMappingNameAction | SetHideUnmappedIdsAction @@ -172,12 +178,29 @@ export const setMappingEnabledAction = (layerName: string, isMappingEnabled: boo isMappingEnabled, }) as const; +export const finishMappingInitializationAction = (layerName: string) => + ({ + type: "FINISH_MAPPING_INITIALIZATION", + layerName, + }) as const; + +// This is not the same as disabling a mapping. A disabled mapping can simply be re-enabled. +// Clearing a mapping sets the mapping dictionary to undefined. This is important when a +// locally applied mapping should no longer be applied locally but by the back-end. In that case, +// the mapping is still enabled, but we want to clear the local mapping dictionary. +export const clearMappingAction = (layerName: string) => + ({ + type: "CLEAR_MAPPING", + layerName, + }) as const; + export type OptionalMappingProperties = { mapping?: Mapping; mappingColors?: Array; hideUnmappedIds?: boolean; showLoadingIndicator?: boolean; }; + export const setMappingAction = ( layerName: string, mappingName: string | null | undefined, diff --git a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts index 3d6241e486b..50053c226ca 100644 --- a/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts +++ b/frontend/javascripts/oxalis/model/actions/volumetracing_actions.ts @@ -1,7 +1,7 @@ import type { ServerEditableMapping, ServerVolumeTracing } from "types/api_flow_types"; import type { Vector2, Vector3, OrthoView, ContourMode, BucketAddress } from "oxalis/constants"; import type { BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; -import type { Segment, SegmentGroup, SegmentMap } from "oxalis/store"; +import type { NumberLike, Segment, SegmentGroup, SegmentMap } from "oxalis/store"; import Deferred from "libs/async/deferred"; import type { Dispatch } from "redux"; import { AllUserBoundingBoxActions } from "oxalis/model/actions/annotation_actions"; @@ -42,7 +42,7 @@ export type UpdateSegmentAction = ReturnType; export type RemoveSegmentAction = ReturnType; export type DeleteSegmentDataAction = ReturnType; export type SetSegmentGroupsAction = ReturnType; -export type SetMappingIsEditableAction = ReturnType; +export type SetHasEditableMappingAction = ReturnType; export type SetMappingIsLockedAction = ReturnType; export type ComputeQuickSelectForRectAction = ReturnType; @@ -89,7 +89,7 @@ export type VolumeTracingAction = | ImportVolumeTracingAction | SetLargestSegmentIdAction | SetSelectedSegmentsOrGroupAction - | SetMappingIsEditableAction + | SetHasEditableMappingAction | SetMappingIsLockedAction | InitializeEditableMappingAction | ComputeQuickSelectForRectAction @@ -108,11 +108,12 @@ export const VolumeTracingSaveRelevantActions = [ "REMOVE_SEGMENT", "SET_SEGMENTS", ...AllUserBoundingBoxActions, - // Note that the following two actions are defined in settings_actions.ts + // Note that the following three actions are defined in settings_actions.ts "SET_MAPPING", "SET_MAPPING_ENABLED", + "FINISH_MAPPING_INITIALIZATION_ACTION", "BATCH_UPDATE_GROUPS_AND_SEGMENTS", - "SET_MAPPING_IS_EDITABLE", + "SET_HAS_EDITABLE_MAPPING", "SET_MAPPING_IS_LOCKED", ]; @@ -227,7 +228,7 @@ export const setSegmentsAction = (segments: SegmentMap, layerName: string) => }) as const; export const updateSegmentAction = ( - segmentId: number, + segmentId: NumberLike, segment: Partial, layerName: string, timestamp: number = Date.now(), @@ -235,7 +236,8 @@ export const updateSegmentAction = ( ) => ({ type: "UPDATE_SEGMENT", - segmentId, + // TODO: Proper 64 bit support (#6921) + segmentId: Number(segmentId), segment, layerName, timestamp, @@ -243,13 +245,14 @@ export const updateSegmentAction = ( }) as const; export const removeSegmentAction = ( - segmentId: number, + segmentId: NumberLike, layerName: string, timestamp: number = Date.now(), ) => ({ type: "REMOVE_SEGMENT", - segmentId, + // TODO: Proper 64 bit support (#6921) + segmentId: Number(segmentId), layerName, timestamp, }) as const; @@ -357,9 +360,9 @@ export const dispatchFloodfillAsync = async ( await readyDeferred.promise(); }; -export const setMappingIsEditableAction = () => +export const setHasEditableMappingAction = () => ({ - type: "SET_MAPPING_IS_EDITABLE", + type: "SET_HAS_EDITABLE_MAPPING", }) as const; export const setMappingIsLockedAction = () => diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts index 263d5323446..5ba51744f62 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/bucket.ts @@ -156,7 +156,7 @@ export class DataBucket { // know whether a certain ID is contained in this bucket. To // speed up such requests a cached set of the contained values // can be stored in cachedValueSet. - cachedValueSet: Set | null = null; + cachedValueSet: Set | Set | null = null; constructor( elementClass: ElementClass, @@ -395,6 +395,7 @@ export class DataBucket { this.pendingOperations = newPendingOperations; this.dirty = true; this.endDataMutation(); + this.cube.triggerBucketDataChanged(); } uint8ToTypedBuffer(arrayBuffer: Uint8Array | null | undefined) { @@ -580,7 +581,7 @@ export class DataBucket { } } - receiveData(arrayBuffer: Uint8Array | null | undefined): void { + receiveData(arrayBuffer: Uint8Array | null | undefined, computeValueSet: boolean = false): void { const data = this.uint8ToTypedBuffer(arrayBuffer); const [TypedArrayClass, channelCount] = getConstructorForElementClass(this.elementClass); @@ -613,9 +614,13 @@ export class DataBucket { this.data = data; } this.invalidateValueSet(); + if (computeValueSet) { + this.ensureValueSet(); + } this.state = BucketStateEnum.LOADED; this.trigger("bucketLoaded", data); + this.cube.triggerBucketDataChanged(); break; } @@ -628,16 +633,22 @@ export class DataBucket { this.cachedValueSet = null; } - private recomputeValueSet() { - // @ts-ignore The Set constructor accepts null and BigUint64Arrays just fine. - this.cachedValueSet = new Set(this.data); + private ensureValueSet(): asserts this is { cachedValueSet: Set | Set } { + if (this.cachedValueSet == null) { + // @ts-ignore The Set constructor accepts null and BigUint64Arrays just fine. + this.cachedValueSet = new Set(this.data); + } } containsValue(value: number | bigint): boolean { - if (this.cachedValueSet == null) { - this.recomputeValueSet(); - } - return this.cachedValueSet!.has(value); + this.ensureValueSet(); + // @ts-ignore The Set has function accepts number | bigint values just fine, regardless of what's in it. + return this.cachedValueSet.has(value); + } + + getValueSet(): Set | Set { + this.ensureValueSet(); + return this.cachedValueSet; } markAsPushed(): void { diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts index fa6597c5784..8f7ed067e6f 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/data_cube.ts @@ -1,11 +1,17 @@ import _ from "lodash"; +import { createNanoEvents, Emitter } from "nanoevents"; import type { Bucket, BucketDataArray } from "oxalis/model/bucket_data_handling/bucket"; import { DataBucket, NULL_BUCKET, NullBucket } from "oxalis/model/bucket_data_handling/bucket"; import type { AdditionalAxis, ElementClass } from "types/api_flow_types"; import type { ProgressCallback } from "libs/progress_callback"; import { V3 } from "libs/mjs"; import { VoxelNeighborQueue2D, VoxelNeighborQueue3D } from "oxalis/model/volumetracing/volumelayer"; -import { areBoundingBoxesOverlappingOrTouching, castForArrayType } from "libs/utils"; +import { + areBoundingBoxesOverlappingOrTouching, + castForArrayType, + isNumberMap, + union, +} from "libs/utils"; import { getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; import { getSomeTracing } from "oxalis/model/accessors/tracing_accessor"; import { globalPositionToBucketPosition } from "oxalis/model/helpers/position_converter"; @@ -78,6 +84,8 @@ class DataCube { elementClass: ElementClass; resolutionInfo: ResolutionInfo; layerName: string; + emitter: Emitter; + lastRequestForValueSet: number | null = null; // The cube stores the buckets in a separate array for each zoomStep. For each // zoomStep the cube-array contains the boundaries and an array holding the buckets. @@ -108,6 +116,7 @@ class DataCube { this.resolutionInfo = resolutionInfo; this.layerName = layerName; this.additionalAxes = _.keyBy(additionalAxes, "name"); + this.emitter = createNanoEvents(); this.cubes = {}; this.buckets = []; @@ -170,19 +179,25 @@ class DataCube { : false; } - mapId(idToMap: number): number { - let mappedId = null; + mapId(unmappedId: number): number { + // Note that the return value can be an unmapped id even when + // a mapping is active, if it is a HDF5 mapping that is partially loaded + // and no entry exists yet for the input id. + let mappedId: number | null | undefined = null; const mapping = this.getMapping(); if (mapping != null && this.isMappingEnabled()) { - mappedId = mapping.get(idToMap); + mappedId = isNumberMap(mapping) + ? mapping.get(Number(unmappedId)) + : // TODO: Proper 64 bit support (#6921) + Number(mapping.get(BigInt(unmappedId))); } - - if (this.shouldHideUnmappedIds() && mappedId == null) { - mappedId = 0; + if (mappedId == null || isNaN(mappedId)) { + // The id couldn't be mapped. + return this.shouldHideUnmappedIds() ? 0 : unmappedId; } - return mappedId != null ? mappedId : idToMap; + return mappedId; } private getCubeKey(zoomStep: number, allCoords: AdditionalCoordinate[] | undefined | null) { @@ -373,6 +388,30 @@ class DataCube { this.bucketIterator = notCollectedBuckets.length; } + triggerBucketDataChanged(): void { + this.emitter.emit("bucketDataChanged"); + } + + shouldEagerlyMaintainUsedValueSet() { + // The value set for all buckets in this cube should be maintained eagerly + // if the valueSet was used within the last 2 minutes. + return Date.now() - (this.lastRequestForValueSet || 0) < 2 * 60 * 1000; + } + + getValueSetForAllBuckets(): Set | Set { + this.lastRequestForValueSet = Date.now(); + + // Theoretically, we could ignore coarser buckets for which we know that + // finer buckets are already loaded. However, the current performance + // is acceptable which is why this optimization isn't implemented. + const valueSets = this.buckets + .filter((bucket) => bucket.state === "LOADED") + .map((bucket) => bucket.getValueSet()); + // @ts-ignore The buckets of a single layer all have the same element class, so they are all number or all bigint + const valueSet = union(valueSets); + return valueSet; + } + collectBucket(bucket: DataBucket): void { const address = bucket.zoomedAddress; const [bucketIndex, cube] = this.getBucketIndexAndCube(address); @@ -838,17 +877,21 @@ class DataCube { if (bucket.hasData()) { const data = bucket.getData(); - const dataValue = Number(data[voxelIndex]); + const dataValue = data[voxelIndex]; if (mapping) { - const mappedValue = mapping.get(dataValue); + const mappedValue = isNumberMap(mapping) + ? mapping.get(Number(dataValue)) + : mapping.get(BigInt(dataValue)); if (mappedValue != null) { - return mappedValue; + // TODO: Proper 64 bit support (#6921) + return Number(mappedValue); } } - return dataValue; + // TODO: Proper 64 bit support (#6921) + return Number(dataValue); } return 0; diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts index b9fcced8354..61b1662603b 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/layer_rendering_manager.ts @@ -24,12 +24,12 @@ import UpdatableTexture from "libs/UpdatableTexture"; import type { ViewMode, Vector3, Vector4, BucketAddress } from "oxalis/constants"; import shaderEditor from "oxalis/model/helpers/shader_editor"; import DiffableMap from "libs/diffable_map"; -import { CuckooTable } from "./cuckoo_table"; +import { CuckooTableVec3 } from "libs/cuckoo/cuckoo_table_vec3"; +import { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; import { listenToStoreProperty } from "../helpers/listener_helpers"; import { cachedDiffSegmentLists } from "../sagas/volumetracing_saga"; import { getSegmentsForLayer } from "../accessors/volumetracing_accessor"; import { getViewportRects } from "../accessors/view_mode_accessor"; -import { CuckooTableVec5 } from "./cuckoo_table_vec5"; import { type AdditionalCoordinate } from "types/api_flow_types"; import app from "app"; @@ -127,7 +127,7 @@ export default class LayerRenderingManager { latestTaskExecutor: LatestTaskExecutor = new LatestTaskExecutor(); additionalCoordinates: AdditionalCoordinate[] | null = null; - cuckooTable: CuckooTable | undefined; + cuckooTable: CuckooTableVec3 | undefined; storePropertyUnsubscribers: Array<() => void> = []; constructor( @@ -295,7 +295,7 @@ export default class LayerRenderingManager { "getCustomColorCuckooTable should not be called for non-segmentation layers.", ); } - this.cuckooTable = new CuckooTable(CUSTOM_COLORS_TEXTURE_WIDTH); + this.cuckooTable = new CuckooTableVec3(CUSTOM_COLORS_TEXTURE_WIDTH); return this.cuckooTable; } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts index 99d8326161c..6ed6eaf8f82 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/mappings.ts @@ -1,24 +1,66 @@ -import * as THREE from "three"; -import { message } from "antd"; -import { createUpdatableTexture } from "oxalis/geometries/materials/plane_material_factory_helpers"; -import { getMappings, getMappingInfo } from "oxalis/model/accessors/dataset_accessor"; -import { getRenderer } from "oxalis/controller/renderer"; +import _ from "lodash"; +import { + getMappings, + getMappingInfo, + getElementClass, +} from "oxalis/model/accessors/dataset_accessor"; import { listenToStoreProperty } from "oxalis/model/helpers/listener_helpers"; -import { setMappingEnabledAction } from "oxalis/model/actions/settings_actions"; -import type { Mapping } from "oxalis/store"; +import { finishMappingInitializationAction } from "oxalis/model/actions/settings_actions"; +import type { Mapping, NumberLike } from "oxalis/store"; import Store from "oxalis/store"; import UpdatableTexture from "libs/UpdatableTexture"; -import messages from "messages"; +import { CuckooTableUint64 } from "libs/cuckoo/cuckoo_table_uint64"; +import { CuckooTableUint32 } from "libs/cuckoo/cuckoo_table_uint32"; +import { message } from "antd"; +import { diffMaps } from "libs/utils"; +import memoizeOne from "memoize-one"; +import Toast from "libs/toast"; +// With the default load factor of 0.9, this suffices for mapping +// ~15M uint32 ids. export const MAPPING_TEXTURE_WIDTH = 4096; export const MAPPING_MESSAGE_KEY = "mappings"; +function diffMappings( + mappingA: Mapping, + mappingB: Mapping, + cacheResult?: ReturnType>, +) { + if (cacheResult != null) { + return cacheResult; + } + return diffMaps(mappingA, mappingB); +} + +export const cachedDiffMappings = memoizeOne( + diffMappings, + (newInputs, lastInputs) => + // If cacheResult was passed, the inputs must be considered as not equal + // so that the new result can be set + newInputs[2] == null && newInputs[0] === lastInputs[0] && newInputs[1] === lastInputs[1], +); +export const setCacheResultForDiffMappings = ( + mappingA: Mapping, + mappingB: Mapping, + cacheResult: ReturnType>, +) => { + cachedDiffMappings(mappingA, mappingB, cacheResult); +}; + +const throttledCapacityWarning = _.throttle(() => { + const msg = + "The mapping is becoming too large and will only be partially applied. Please zoom further in to avoid that too many segment ids are present. Also consider refreshing the page."; + console.warn(msg); + Toast.warning(msg); +}, 10000); + class Mappings { layerName: string; - // @ts-expect-error ts-migrate(2564) FIXME: Property 'mappingTexture' has no initializer and i... Remove this comment to see the full error message - mappingTexture: UpdatableTexture; - // @ts-expect-error ts-migrate(2564) FIXME: Property 'mappingLookupTexture' has no initializer... Remove this comment to see the full error message - mappingLookupTexture: UpdatableTexture; + mappingTexture!: UpdatableTexture; + mappingLookupTexture!: UpdatableTexture; + cuckooTable: CuckooTableUint64 | CuckooTableUint32 | null = null; + previousMapping: Mapping | null | undefined = null; + currentKeyCount: number = 0; constructor(layerName: string) { this.layerName = layerName; @@ -30,21 +72,9 @@ class Mappings { // MAPPING TEXTURES setupMappingTextures() { - const renderer = getRenderer(); - this.mappingTexture = createUpdatableTexture( - MAPPING_TEXTURE_WIDTH, - MAPPING_TEXTURE_WIDTH, - 4, - THREE.UnsignedByteType, - renderer, - ); - this.mappingLookupTexture = createUpdatableTexture( - MAPPING_TEXTURE_WIDTH, - MAPPING_TEXTURE_WIDTH, - 4, - THREE.UnsignedByteType, - renderer, - ); + this.cuckooTable = this.is64Bit() + ? new CuckooTableUint64(MAPPING_TEXTURE_WIDTH) + : new CuckooTableUint32(MAPPING_TEXTURE_WIDTH); listenToStoreProperty( (state) => @@ -56,58 +86,70 @@ class Mappings { ); } + is64Bit() { + const elementClass = getElementClass(Store.getState().dataset, this.layerName); + return elementClass === "uint64" || elementClass === "int64"; + } + async updateMappingTextures(mapping: Mapping | null | undefined): Promise { if (mapping == null) return; - console.time("Time to create mapping texture"); - const mappingSize = mapping.size; - // The typed arrays need to be padded with 0s so that their length is a multiple of MAPPING_TEXTURE_WIDTH - const paddedLength = - mappingSize + MAPPING_TEXTURE_WIDTH - (mappingSize % MAPPING_TEXTURE_WIDTH); - const keys = new Uint32Array(paddedLength); - const values = new Uint32Array(paddedLength); - const mappingKeys = Array.from(mapping.keys()); - mappingKeys.sort((a, b) => a - b); - keys.set(mappingKeys); - // @ts-ignore mappingKeys are guaranteed to exist in mapping as they are mapping.keys() - values.set(mappingKeys.map((key) => mapping.get(key))); - // Instantiate the Uint8Arrays with the array buffer from the Uint32Arrays, so that each 32-bit value is converted - // to four 8-bit values correctly - const uint8Keys = new Uint8Array(keys.buffer); - const uint8Values = new Uint8Array(values.buffer); - console.timeEnd("Time to create mapping texture"); - - if (mappingSize > MAPPING_TEXTURE_WIDTH ** 2) { - throw new Error(messages["mapping.too_big"]); + if (this.cuckooTable == null) { + throw new Error("cuckooTable null when updateMappingTextures was called."); } - this.mappingLookupTexture.update( - uint8Keys, - 0, - 0, - MAPPING_TEXTURE_WIDTH, - uint8Keys.length / MAPPING_TEXTURE_WIDTH / 4, - ); - this.mappingTexture.update( - uint8Values, - 0, - 0, - MAPPING_TEXTURE_WIDTH, - uint8Values.length / MAPPING_TEXTURE_WIDTH / 4, - ); + const { changed, onlyA, onlyB } = + this.previousMapping != null + ? cachedDiffMappings(this.previousMapping, mapping) + : { changed: [], onlyA: [], onlyB: Array.from(mapping.keys() as Iterable) }; + + const totalUpdateCount = _.size(changed) + _.size(onlyA) + _.size(onlyB); + const doFullTextureUpdate = totalUpdateCount > 10000; + if (doFullTextureUpdate) { + this.cuckooTable.disableAutoTextureUpdate(); + } + + for (const keyToDelete of onlyA) { + this.cuckooTable.unsetNumberLike(keyToDelete); + this.currentKeyCount--; + } + + for (const key of changed) { + // We know that the lookup of key in mapping has to succeed because + // the diffing wouldn't have returned the id otherwise. + const value = (mapping as Map).get(key) as NumberLike; + this.cuckooTable.setNumberLike(key, value); + } + + for (const key of onlyB) { + if (this.currentKeyCount > this.cuckooTable.getCriticalCapacity()) { + throttledCapacityWarning(); + break; + } + // We know that the lookup of key in mapping has to succeed because + // the diffing wouldn't have returned the id otherwise. + const value = (mapping as Map).get(key) as NumberLike; + this.currentKeyCount++; + this.cuckooTable.setNumberLike(key, value); + } + if (doFullTextureUpdate) { + this.cuckooTable.enableAutoTextureUpdateAndFlush(); + } + + this.previousMapping = mapping; + message.destroy(MAPPING_MESSAGE_KEY); - Store.dispatch(setMappingEnabledAction(this.layerName, true)); + Store.dispatch(finishMappingInitializationAction(this.layerName)); } - getMappingTextures() { - if (this.mappingTexture == null) { + getCuckooTable() { + if (this.cuckooTable == null) { this.setupMappingTextures(); } - - if (this.mappingTexture == null || this.mappingLookupTexture == null) { - throw new Error("Mapping textures are null after initialization."); + if (this.cuckooTable == null) { + throw new Error("cuckooTable null after setupMappingTextures was called."); } - return [this.mappingTexture, this.mappingLookupTexture]; + return this.cuckooTable; } } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts index 8d8b2251d5f..d9c87af9613 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/pullqueue.ts @@ -162,7 +162,15 @@ class PullQueue { const bucket = this.cube.getBucket(bucketAddress); if (bucket.type === "data") { - bucket.receiveData(bucketData); + if (this.cube.shouldEagerlyMaintainUsedValueSet()) { + // If we assume that the value set of the bucket is needed often (for proofreading), + // we compute it here eagerly and then send the data to the bucket. + // That way, the computations of the value set are spread out over time instead of being + // clustered when DataCube.getValueSetForAllBuckets is called. This improves the FPS rate. + bucket.receiveData(bucketData, true); + } else { + bucket.receiveData(bucketData); + } } } diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts index 4aecca39080..d2dcda8e2f1 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/texture_bucket_manager.ts @@ -13,7 +13,7 @@ import UpdatableTexture from "libs/UpdatableTexture"; import constants from "oxalis/constants"; import window from "libs/window"; import type { ElementClass } from "types/api_flow_types"; -import { CuckooTableVec5 } from "./cuckoo_table_vec5"; +import { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; import app from "app"; // A TextureBucketManager instance is responsible for making buckets available diff --git a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts index 2f38f2ef2fa..62ad36c8570 100644 --- a/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts +++ b/frontend/javascripts/oxalis/model/bucket_data_handling/wkstore_adapter.ts @@ -5,10 +5,13 @@ import { doWithToken } from "admin/admin_rest_api"; import { isSegmentationLayer, getByteCountFromLayer, - getMappingInfo, getResolutionInfo, + getMappingInfo, } from "oxalis/model/accessors/dataset_accessor"; -import { getVolumeTracingById } from "oxalis/model/accessors/volumetracing_accessor"; +import { + getVolumeTracingById, + needsLocalHdf5Mapping, +} from "oxalis/model/accessors/volumetracing_accessor"; import { parseMaybe } from "libs/utils"; import type { UpdateAction } from "oxalis/model/sagas/update_actions"; import { updateBucket } from "oxalis/model/sagas/update_actions"; @@ -113,9 +116,17 @@ export async function requestWithFallback( "tracingId" in layerInfo && layerInfo.tracingId != null ? getVolumeTracingById(state.tracing, layerInfo.tracingId) : null; - // For non-segmentation layers and for viewing datasets, we'll always use the datastore URL - const shouldUseDataStore = maybeVolumeTracing == null; - const requestUrl = shouldUseDataStore ? getDataStoreUrl() : getTracingStoreUrl(); + + // For non-segmentation layers and for viewing datasets, we'll always use the datastore URL. + // We also use the data store, if an hdf5 mapping should be locally applied. This is only the + // case if the proofreading tool is active or the layer was already proofread. In that case, + // no bucket data changes can exist on the tracing store. + const shouldUseDataStore = + maybeVolumeTracing == null || needsLocalHdf5Mapping(state, layerInfo.name); + + const requestUrl = shouldUseDataStore + ? getDataStoreUrl(maybeVolumeTracing?.fallbackLayer) + : getTracingStoreUrl(); const bucketBuffers = await requestFromStore(requestUrl, layerInfo, batch, maybeVolumeTracing); const missingBucketIndices = getNullIndices(bucketBuffers); @@ -128,7 +139,7 @@ export async function requestWithFallback( missingBucketIndices.length > 0 && maybeVolumeTracing != null && maybeVolumeTracing.fallbackLayer != null && - !maybeVolumeTracing.mappingIsEditable; + !maybeVolumeTracing.hasEditableMapping; if (!retry) { return bucketBuffers; @@ -167,24 +178,39 @@ export async function requestFromStore( const state = Store.getState(); const isSegmentation = isSegmentationLayer(state.dataset, layerInfo.name); const fourBit = state.datasetConfiguration.fourBit && !isSegmentation; - const activeMapping = getMappingInfo( - state.temporaryConfiguration.activeMappingByLayer, - layerInfo.name, - ); - const applyAgglomerates = - isSegmentation && - activeMapping != null && // Start to request mapped data during mapping activation phase already - activeMapping.mappingStatus !== MappingStatusEnum.DISABLED && - activeMapping.mappingType === "HDF5" + + // Mappings can be applied in the frontend or on the server. + const agglomerateMappingNameToApplyOnServer = (() => { + if (!isSegmentation) { + return null; + } + if (needsLocalHdf5Mapping(state, layerInfo.name)) { + return null; + } + const activeMapping = getMappingInfo( + state.temporaryConfiguration.activeMappingByLayer, + layerInfo.name, + ); + return activeMapping != null && // Start to request mapped data during mapping activation phase already + activeMapping.mappingStatus !== MappingStatusEnum.DISABLED && + activeMapping.mappingType === "HDF5" ? activeMapping.mappingName : null; + })(); + const resolutionInfo = getResolutionInfo(layerInfo.resolutions); const version = !isVolumeFallback && isSegmentation && maybeVolumeTracing != null ? maybeVolumeTracing.version : null; const bucketInfo = batch.map((zoomedAddress) => - createRequestBucketInfo(zoomedAddress, resolutionInfo, fourBit, applyAgglomerates, version), + createRequestBucketInfo( + zoomedAddress, + resolutionInfo, + fourBit, + agglomerateMappingNameToApplyOnServer, + version, + ), ); try { diff --git a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts index f991ebad605..7d444e1b9a3 100644 --- a/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts +++ b/frontend/javascripts/oxalis/model/helpers/action_logger_middleware.ts @@ -1,8 +1,16 @@ import _ from "lodash"; import type { Dispatch } from "redux"; import type { Action } from "oxalis/model/actions/actions"; + const MAX_ACTION_LOG_LENGTH = 250; let actionLog: string[] = []; + +// For grouping consecutive action types +let lastActionName: string | null = null; +let lastActionCount: number = 0; + +const DEBUG_OUTPUT_FOR_ACTIONS = false; + const actionBlacklist = [ "ADD_TO_LAYER", "MOVE_FLYCAM", @@ -31,9 +39,26 @@ export default function actionLoggerMiddleware(): ( const isBlackListed = actionBlacklist.includes(action.type); if (!isBlackListed) { - actionLog.push(action.type); + if (lastActionName == null || lastActionName !== action.type) { + actionLog.push(action.type); + lastActionCount = 1; + } else { + lastActionCount++; + actionLog[actionLog.length - 1] = lastActionName + " * " + lastActionCount; + } + lastActionName = action.type; + const overflowCount = Math.max(actionLog.length - MAX_ACTION_LOG_LENGTH, 0); actionLog = _.drop(actionLog, overflowCount); + + if (DEBUG_OUTPUT_FOR_ACTIONS) { + console.group(action.type); + console.info("dispatching", action); + let result = next(action); + // console.log('next state', store.getState()) + console.groupEnd(); + return result; + } } return next(action); diff --git a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts index 2590f4a8888..cd3430779d9 100644 --- a/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts +++ b/frontend/javascripts/oxalis/model/helpers/proto_helpers.ts @@ -4,6 +4,10 @@ import type { ServerTracing } from "types/api_flow_types"; import SkeletonTracingProto from "SkeletonTracing.proto"; // @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'VolumeTracing.proto' or its co... Remove this comment to see the full error message import VolumeTracingProto from "VolumeTracing.proto"; +// @ts-expect-error ts-migrate(2307) FIXME: Cannot find module 'ListOfLong.proto' or its co... Remove this comment to see the full error message +import ListOfLongProto from "ListOfLong.proto"; +import { isBigInt } from "libs/utils"; + const PROTO_FILES = { skeleton: SkeletonTracingProto, volume: VolumeTracingProto, @@ -13,21 +17,51 @@ const PROTO_TYPES = { skeleton: `${PROTO_PACKAGE}.SkeletonTracing`, volume: `${PROTO_PACKAGE}.VolumeTracing`, }; + export function parseProtoTracing( tracingArrayBuffer: ArrayBuffer, - annotationType: string, + annotationType: "skeleton" | "volume", ): ServerTracing { - // @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message const protoRoot = Root.fromJSON(PROTO_FILES[annotationType]); - // @ts-expect-error ts-migrate(7053) FIXME: Element implicitly has an 'any' type because expre... Remove this comment to see the full error message const messageType = protoRoot.lookupType(PROTO_TYPES[annotationType]); const message = messageType.decode(new Uint8Array(tracingArrayBuffer)); - // @ts-expect-error ts-migrate(2322) FIXME: Type '{ [k: string]: any; }' is not assignable to ... Remove this comment to see the full error message return messageType.toObject(message, { arrays: true, objects: true, enums: String, longs: Number, - }); + }) as ServerTracing; +} + +export function serializeProtoListOfLong( + numbersOrBigInts: Array, +): ArrayBuffer { + // TODO: Proper 64 bit support (#6921) + const numbers = + numbersOrBigInts.length > 0 && isBigInt(numbersOrBigInts[0]) + ? numbersOrBigInts.map((val) => Number(val)) + : numbersOrBigInts; + + const listOfLong = { items: numbers }; + const protoRoot = Root.fromJSON(ListOfLongProto); + const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.ListOfLong`); + const errMsg = messageType.verify(listOfLong); + if (errMsg) throw Error(errMsg); + const message = messageType.create(listOfLong); + return messageType.encode(message).finish(); +} + +export function parseProtoListOfLong( + listArrayBuffer: ArrayBuffer, +): Array { + const protoRoot = Root.fromJSON(ListOfLongProto); + const messageType = protoRoot.lookupType(`${PROTO_PACKAGE}.ListOfLong`); + const message = messageType.decode(new Uint8Array(listArrayBuffer)); + return messageType.toObject(message, { + arrays: true, + objects: true, + enums: String, + longs: Number, + }).items; } export default {}; diff --git a/frontend/javascripts/oxalis/model/helpers/shader_editor.ts b/frontend/javascripts/oxalis/model/helpers/shader_editor.ts index c1db5b9c0f5..920164aa969 100644 --- a/frontend/javascripts/oxalis/model/helpers/shader_editor.ts +++ b/frontend/javascripts/oxalis/model/helpers/shader_editor.ts @@ -77,7 +77,13 @@ window._setupShaderReporting = () => { for (const line of args[0].split("\n")) { const maybeLineNum = line.split(":")[0]; if (!isNaN(parseInt(maybeLineNum))) { - linesByLineNum[maybeLineNum] = line; + if (linesByLineNum[maybeLineNum] != null) { + // Sometimes errors appear in fragment as well as vertex shaders. Simply + // show both. + linesByLineNum[maybeLineNum] += "\nor:\n" + line; + } else { + linesByLineNum[maybeLineNum] = line; + } } } diff --git a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts index b4690e2cd01..295a6d411ad 100644 --- a/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/dataset_reducer.ts @@ -46,7 +46,6 @@ function DatasetReducer(state: OxalisState, action: Action): OxalisState { mappingColors: null, hideUnmappedIds: false, mappingStatus: MappingStatusEnum.DISABLED, - mappingSize: 0, mappingType: "JSON", })), }, diff --git a/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts b/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts index b32cec588d5..9a702951eb1 100644 --- a/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/settings_reducer.ts @@ -222,6 +222,16 @@ function SettingsReducer(state: OxalisState, action: Action): OxalisState { }); } + case "FINISH_MAPPING_INITIALIZATION": { + const { layerName } = action; + return updateActiveMapping( + state, + { + mappingStatus: MappingStatusEnum.ENABLED, + }, + layerName, + ); + } case "SET_MAPPING_ENABLED": { const { isMappingEnabled, layerName } = action; @@ -267,7 +277,6 @@ function SettingsReducer(state: OxalisState, action: Action): OxalisState { mapping, mappingColors, mappingType, - mappingSize: mapping != null ? mapping.size : 0, hideUnmappedIds, mappingStatus: mappingName != null ? MappingStatusEnum.ACTIVATING : MappingStatusEnum.DISABLED, @@ -276,6 +285,18 @@ function SettingsReducer(state: OxalisState, action: Action): OxalisState { ); } + case "CLEAR_MAPPING": { + const { layerName } = action; + + return updateActiveMapping( + state, + { + mapping: undefined, + }, + layerName, + ); + } + case "SET_MAPPING_NAME": { const { mappingName, layerName } = action; diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts index d625b36c5a9..31037d3ca38 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer.ts @@ -44,6 +44,7 @@ import DiffableMap from "libs/diffable_map"; import * as Utils from "libs/utils"; import type { AdditionalCoordinate, ServerVolumeTracing } from "types/api_flow_types"; import { + FinishMappingInitializationAction, SetMappingAction, SetMappingEnabledAction, SetMappingNameAction, @@ -259,7 +260,7 @@ export function serverVolumeToClientVolumeTracing(tracing: ServerVolumeTracing): fallbackLayer: tracing.fallbackLayer, userBoundingBoxes, mappingName: tracing.mappingName, - mappingIsEditable: tracing.mappingIsEditable, + hasEditableMapping: tracing.hasEditableMapping, mappingIsLocked: tracing.mappingIsLocked, hasSegmentIndex: tracing.hasSegmentIndex || false, additionalAxes: convertServerAdditionalAxesToFrontEnd(tracing.additionalAxes), @@ -269,7 +270,12 @@ export function serverVolumeToClientVolumeTracing(tracing: ServerVolumeTracing): function VolumeTracingReducer( state: OxalisState, - action: VolumeTracingAction | SetMappingAction | SetMappingEnabledAction | SetMappingNameAction, + action: + | VolumeTracingAction + | SetMappingAction + | FinishMappingInitializationAction + | SetMappingEnabledAction + | SetMappingNameAction, ): OxalisState { switch (action.type) { case "INITIALIZE_VOLUMETRACING": { @@ -418,8 +424,17 @@ function VolumeTracingReducer( } case "SET_MAPPING": { + // We only need to store the name of the mapping here. Also see the settings_reducer where + // SET_MAPPING is also handled. return setMappingNameReducer(state, volumeTracing, action.mappingName, action.mappingType); } + case "FINISH_MAPPING_INITIALIZATION": { + const { mappingName, mappingType } = getMappingInfo( + state.temporaryConfiguration.activeMappingByLayer, + action.layerName, + ); + return setMappingNameReducer(state, volumeTracing, mappingName, mappingType, true); + } case "SET_MAPPING_ENABLED": { const { mappingName, mappingType } = getMappingInfo( @@ -437,19 +452,19 @@ function VolumeTracingReducer( case "SET_MAPPING_NAME": { // Editable mappings cannot be disabled or switched for now - if (volumeTracing.mappingIsEditable || volumeTracing.mappingIsLocked) return state; + if (volumeTracing.hasEditableMapping || volumeTracing.mappingIsLocked) return state; const { mappingName, mappingType } = action; return setMappingNameReducer(state, volumeTracing, mappingName, mappingType); } - case "SET_MAPPING_IS_EDITABLE": { + case "SET_HAS_EDITABLE_MAPPING": { // Editable mappings cannot be disabled or switched for now. - if (volumeTracing.mappingIsEditable || volumeTracing.mappingIsLocked) return state; + if (volumeTracing.hasEditableMapping || volumeTracing.mappingIsLocked) return state; // An editable mapping is always locked. return updateVolumeTracing(state, volumeTracing.tracingId, { - mappingIsEditable: true, + hasEditableMapping: true, mappingIsLocked: true, }); } diff --git a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts index e7a7c26a833..dc7e0260476 100644 --- a/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts +++ b/frontend/javascripts/oxalis/model/reducers/volumetracing_reducer_helpers.ts @@ -159,7 +159,7 @@ export function setMappingNameReducer( isMappingEnabled: boolean = true, ) { // Editable mappings or locked mappings cannot be disabled or switched for now - if (volumeTracing.mappingIsEditable || volumeTracing.mappingIsLocked) { + if (volumeTracing.hasEditableMapping || volumeTracing.mappingIsLocked) { return state; } // Only HDF5 mappings are persisted in volume annotations for now diff --git a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx index 89ffabed346..c57a2970d25 100644 --- a/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/annotation_saga.tsx @@ -8,6 +8,7 @@ import { type SetOthersMayEditForAnnotationAction, } from "oxalis/model/actions/annotation_actions"; import type { EditableAnnotation } from "admin/admin_rest_api"; +import { ActionPattern } from "redux-saga/effects"; import { editAnnotation, updateAnnotationLayer, @@ -45,9 +46,11 @@ import { mayEditAnnotationProperties } from "../accessors/annotation_accessor"; import { determineLayout } from "oxalis/view/layouting/default_layout_configs"; import { getLastActiveLayout, getLayoutConfig } from "oxalis/view/layouting/layout_persistence"; import { is3dViewportMaximized } from "oxalis/view/layouting/flex_layout_helper"; +import { needsLocalHdf5Mapping } from "../accessors/volumetracing_accessor"; -/* Note that this must stay in sync with the back-end constant - compare https://github.com/scalableminds/webknossos/issues/5223 */ +/* Note that this must stay in sync with the back-end constant MaxMagForAgglomerateMapping + compare https://github.com/scalableminds/webknossos/issues/5223. + */ const MAX_MAG_FOR_AGGLOMERATE_MAPPING = 16; export function* pushAnnotationUpdateAsync(action: Action) { @@ -146,18 +149,18 @@ export function* warnAboutSegmentationZoom(): Saga { return; } - const isAgglomerateMappingEnabled = yield* select((storeState) => { + const isRemoteAgglomerateMappingEnabled = yield* select((storeState) => { if (!segmentationLayer) { return false; } - const mappingInfo = getMappingInfo( storeState.temporaryConfiguration.activeMappingByLayer, segmentationLayer.name, ); return ( mappingInfo.mappingStatus === MappingStatusEnum.ENABLED && - mappingInfo.mappingType === "HDF5" + mappingInfo.mappingType === "HDF5" && + !needsLocalHdf5Mapping(storeState, segmentationLayer.name) ); }); const isZoomThresholdExceeded = yield* select( @@ -166,7 +169,11 @@ export function* warnAboutSegmentationZoom(): Saga { Math.log2(MAX_MAG_FOR_AGGLOMERATE_MAPPING), ); - if (shouldDisplaySegmentationData() && isAgglomerateMappingEnabled && isZoomThresholdExceeded) { + if ( + shouldDisplaySegmentationData() && + isRemoteAgglomerateMappingEnabled && + isZoomThresholdExceeded + ) { Toast.error(messages["tracing.segmentation_zoom_warning_agglomerate"], { sticky: false, timeout: 3000, @@ -192,11 +199,12 @@ export function* warnAboutSegmentationZoom(): Saga { "SET_STORED_LAYOUTS", "SET_MAPPING", "SET_MAPPING_ENABLED", + "FINISH_MAPPING_INITIALIZATION", (action: Action) => action.type === "UPDATE_LAYER_SETTING" && action.layerName === segmentationLayerName && action.propertyName === "alpha", - ]); + ] as ActionPattern); yield* warnMaybe(); } } @@ -210,8 +218,9 @@ export function* watchAnnotationAsync(): Saga { yield* takeLatest("SET_ANNOTATION_VISIBILITY", pushAnnotationUpdateAsync); yield* takeLatest("SET_ANNOTATION_DESCRIPTION", pushAnnotationUpdateAsync); yield* takeLatest( - (action: Action) => - action.type === "UPDATE_LAYER_SETTING" && action.propertyName === "isDisabled", + ((action: Action) => + action.type === "UPDATE_LAYER_SETTING" && + action.propertyName === "isDisabled") as ActionPattern, pushAnnotationUpdateAsync, ); yield* takeLatest("EDIT_ANNOTATION_LAYER", pushAnnotationLayerUpdateAsync); diff --git a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts index e8345af38df..167d5ba7316 100644 --- a/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mapping_saga.ts @@ -1,20 +1,39 @@ import _ from "lodash"; import type { Saga } from "oxalis/model/sagas/effect-generators"; -import { all, call, takeEvery, takeLatest, take, put, fork, actionChannel } from "typed-redux-saga"; +import { + all, + call, + cancel, + fork, + takeEvery, + takeLatest, + take, + put, + race, + actionChannel, + flush, +} from "typed-redux-saga"; +import { api } from "oxalis/singletons"; +import { buffers, eventChannel } from "redux-saga"; import { select } from "oxalis/model/sagas/effect-generators"; import { message } from "antd"; import type { OptionalMappingProperties, SetMappingAction, - SetMappingEnabledAction, } from "oxalis/model/actions/settings_actions"; -import { setMappingAction, setMappingEnabledAction } from "oxalis/model/actions/settings_actions"; +import { + clearMappingAction, + finishMappingInitializationAction, + setMappingAction, +} from "oxalis/model/actions/settings_actions"; import { fetchMapping, getMappingsForDatasetLayer, getAgglomeratesForDatasetLayer, + getAgglomeratesForSegmentsFromDatastore, + getAgglomeratesForSegmentsFromTracingstore, } from "admin/admin_rest_api"; -import type { APIMapping } from "types/api_flow_types"; +import type { APIDataLayer, APIMapping } from "types/api_flow_types"; import { EnsureLayerMappingsAreLoadedAction, setLayerMappingsAction, @@ -22,37 +41,104 @@ import { import { getLayerByName, getMappingInfo, + getSegmentationLayers, getVisibleSegmentationLayer, } from "oxalis/model/accessors/dataset_accessor"; -import type { ActiveMappingInfo, Mapping } from "oxalis/store"; +import type { + ActiveMappingInfo, + Mapping, + MappingType, + NumberLike, + NumberLikeMap, +} from "oxalis/store"; import ErrorHandling from "libs/error_handling"; -import { MAPPING_MESSAGE_KEY } from "oxalis/model/bucket_data_handling/mappings"; -import { api } from "oxalis/singletons"; -import { MappingStatusEnum } from "oxalis/constants"; -import { isMappingActivationAllowed } from "oxalis/model/accessors/volumetracing_accessor"; +import { + MAPPING_MESSAGE_KEY, + setCacheResultForDiffMappings, +} from "oxalis/model/bucket_data_handling/mappings"; +import { Model } from "oxalis/singletons"; +import { + isMappingActivationAllowed, + getEditableMappingForVolumeTracingId, + needsLocalHdf5Mapping as getNeedsLocalHdf5Mapping, + getBucketRetrievalSourceFn, + BucketRetrievalSource, +} from "oxalis/model/accessors/volumetracing_accessor"; import Toast from "libs/toast"; import { jsHsv2rgb } from "oxalis/shaders/utils.glsl"; import { updateSegmentAction } from "../actions/volumetracing_actions"; +import { MappingStatusEnum } from "oxalis/constants"; +import DataCube from "../bucket_data_handling/data_cube"; +import { fastDiffSetAndMap, sleep } from "libs/utils"; +import { Action } from "../actions/actions"; +import { ActionPattern } from "redux-saga/effects"; +import { listenToStoreProperty } from "../helpers/listener_helpers"; + type APIMappings = Record; +type Container = { value: T }; -const isAgglomerate = (mapping: ActiveMappingInfo) => { - if (!mapping) { - return false; - } +const takeLatestMappingChange = ( + oldActiveMappingByLayer: Container>, + layerName: string, +) => { + return fork(function* () { + let lastWatcherTask; + let lastBucketRetrievalSource; + + const bucketRetrievalSourceChannel = createBucketRetrievalSourceChannel(layerName); + const getBucketRetrievalSourceForLayer = getBucketRetrievalSourceFn(layerName); + while (true) { + lastBucketRetrievalSource = yield* select((state) => getBucketRetrievalSourceForLayer(state)); + const bucketRetrievalSource = yield* take(bucketRetrievalSourceChannel); + + const activeMappingByLayer = yield* select( + (state) => state.temporaryConfiguration.activeMappingByLayer, + ); + const mapping = getMappingInfo(activeMappingByLayer, layerName); + + console.log("Changed from", lastBucketRetrievalSource, "to", bucketRetrievalSource); + + if (lastWatcherTask) { + console.log("Cancel old bucket watcher"); + yield cancel(lastWatcherTask); + lastWatcherTask = null; + } - return mapping.mappingType === "HDF5"; + // Changing between REQUESTED-WITH-MAPPING <> REQUESTED-WITHOUT-MAPPING + if (lastBucketRetrievalSource[0] !== bucketRetrievalSource[0]) { + yield* call(reloadData, oldActiveMappingByLayer, { layerName }); + } + + const needsLocalHdf5Mapping = yield* select((state) => + getNeedsLocalHdf5Mapping(state, layerName), + ); + if (needsLocalHdf5Mapping) { + // Start a new watcher + console.log("Start new bucket watcher for layer", layerName); + lastWatcherTask = yield* fork(watchChangedBucketsForLayer, layerName); + } else if ( + lastBucketRetrievalSource[0] === "REQUESTED-WITHOUT-MAPPING" && + lastBucketRetrievalSource[1] === "LOCAL-MAPPING-APPLIED" && + mapping.mappingType !== "JSON" + ) { + // needsLocalHdf5Mapping is false, but in the last iteration, a local mapping + // was applied. In case of a HDF5 mapping, this means that the mapping should + // now be applied by the back-end. We have to clear the mapping so that + // the data from the back-end is not mapped again. + yield* put(clearMappingAction(layerName)); + } + } + }); }; export default function* watchActivatedMappings(): Saga { - const oldActiveMappingByLayer = yield* select( - (state) => state.temporaryConfiguration.activeMappingByLayer, - ); + const oldActiveMappingByLayer = { + value: yield* select((state) => state.temporaryConfiguration.activeMappingByLayer), + }; // Buffer actions since they might be dispatched before WK_READY const setMappingActionChannel = yield* actionChannel("SET_MAPPING"); - const mappingChangeActionChannel = yield* actionChannel(["SET_MAPPING_ENABLED"]); yield* take("WK_READY"); yield* takeLatest(setMappingActionChannel, handleSetMapping, oldActiveMappingByLayer); - yield* takeEvery(mappingChangeActionChannel, maybeReloadData, oldActiveMappingByLayer); yield* takeEvery( "ENSURE_LAYER_MAPPINGS_ARE_LOADED", function* handler(action: EnsureLayerMappingsAreLoadedAction) { @@ -63,35 +149,151 @@ export default function* watchActivatedMappings(): Saga { } }, ); + const segmentationLayers = yield* select((state) => getSegmentationLayers(state.dataset)); + for (const layer of segmentationLayers) { + // The following saga will fork internally. + yield* takeLatestMappingChange(oldActiveMappingByLayer, layer.name); + } } -function* maybeReloadData( - oldActiveMappingByLayer: Record, - action: SetMappingAction | SetMappingEnabledAction, +const isAgglomerate = (mapping: ActiveMappingInfo) => { + return mapping.mappingType === "HDF5"; +}; + +function* reloadData( + oldActiveMappingByLayer: Container>, + action: { layerName: string }, ): Saga { const { layerName } = action; - const oldMapping = getMappingInfo(oldActiveMappingByLayer, layerName); const activeMappingByLayer = yield* select( (state) => state.temporaryConfiguration.activeMappingByLayer, ); const mapping = getMappingInfo(activeMappingByLayer, layerName); - const isAgglomerateMappingInvolved = isAgglomerate(oldMapping) || isAgglomerate(mapping); - const hasChanged = oldMapping !== mapping; - const shouldReload = isAgglomerateMappingInvolved && hasChanged; - if (shouldReload) { - yield* call([api.data, api.data.reloadBuckets], layerName); + // Especially, when switching between move tool and proofreading tool for the first time + // a latency is to be expected. Therefore, we want to notify the user about what's happening. + // After a proofreading action has been made, tool switching won't cause a layer reload. + message.loading({ + content: "Reloading segmentation data...", + duration: 1, + }); + yield* call([api.data, api.data.reloadBuckets], layerName); + + const needsLocalHdf5Mapping = yield* select((state) => + getNeedsLocalHdf5Mapping(state, layerName), + ); + + // If an agglomerate mapping is being activated (that is applied remotely), the data + // reload is the last step of the mapping activation. For JSON mappings or locally applied + // HDF5 mappings, the last step of the mapping activation is the texture creation in mappings.ts + if (isAgglomerate(mapping) && !needsLocalHdf5Mapping) { + if (mapping.mappingStatus === MappingStatusEnum.ACTIVATING) { + yield* put(finishMappingInitializationAction(layerName)); + message.destroy(MAPPING_MESSAGE_KEY); + } else if (mapping.mappingStatus === MappingStatusEnum.ENABLED) { + // If the mapping is already enabled (happens when an annotation was loaded initially + // with a remotely applied hdf5 mapping), ensure that the message to the user is hidden, too. + message.destroy(MAPPING_MESSAGE_KEY); + } } - // If an agglomerate mapping is being activated, the data reload is the last step - // of the mapping activation. For JSON mappings, the last step of the mapping activation - // is the texture creation in mappings.js - if (isAgglomerate(mapping) && mapping.mappingStatus === MappingStatusEnum.ACTIVATING) { - yield* put(setMappingEnabledAction(layerName, true)); - message.destroy(MAPPING_MESSAGE_KEY); + oldActiveMappingByLayer.value = activeMappingByLayer; +} + +function createBucketDataChangedChannel(dataCube: DataCube) { + return eventChannel((emit) => { + const bucketDataChangedHandler = () => { + emit("BUCKET_DATA_CHANGED"); + }; + + const unbind = dataCube.emitter.on("bucketDataChanged", bucketDataChangedHandler); + return unbind; + }, buffers.sliding(1)); +} + +function createBucketRetrievalSourceChannel(layerName: string) { + const getBucketRetrievalSourceForLayer = getBucketRetrievalSourceFn(layerName); + return eventChannel((emit) => { + const unbind = listenToStoreProperty( + (state) => getBucketRetrievalSourceForLayer(state), + (retrievalSource) => emit(retrievalSource), + ); + return unbind; + }, buffers.sliding(1)); +} + +function* watchChangedBucketsForLayer(layerName: string): Saga { + const dataCube = yield* call([Model, Model.getCubeByLayerName], layerName); + const bucketChannel = yield* call(createBucketDataChangedChannel, dataCube); + + while (true) { + yield take(bucketChannel); + // We received a BUCKET_DATA_CHANGED event. `handler` needs to be invoked. + // However, let's throttle¹ this by waiting and then discarding all other events + // that might have accumulated in between. + yield* call(sleep, 500); + yield flush(bucketChannel); + // After flushing and while the handler below is running, + // the bucketChannel might fill up again. This means, the + // next loop will immediately take from the channel which + // is what we need. + yield* call(handler); + + // Addendum: + // ¹ We don't use redux-saga's throttle, because that would + // call `handler` in parallel if enough events are + // consumed over the throttling duration. + // However, running `handler` in parallel would be a waste + // of computation. Therefore, we invoke `handler` strictly + // sequentially. } - oldActiveMappingByLayer = activeMappingByLayer; + function* handler() { + const dataset = yield* select((state) => state.dataset); + const layerInfo = getLayerByName(dataset, layerName); + const mappingInfo = yield* select((state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, layerName), + ); + const { mappingName, mappingType, mappingStatus } = mappingInfo; + + if (mappingName == null || mappingStatus !== MappingStatusEnum.ENABLED) { + return; + } + + // Updating the HDF5 mapping is an async task which requires communication with + // the back-end. If the front-end does a proofreading operation in parallel, + // there is a risk of a race condition. Therefore, we cancel the updateHdf5 + // saga as soon as WK enters a busy state and retry afterwards. + while (true) { + let isBusy = yield* select((state) => state.uiInformation.busyBlockingInfo.isBusy); + if (!isBusy) { + const { cancel } = yield* race({ + updateHdf5: call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName, mappingType), + cancel: take( + ((action: Action) => + action.type === "SET_BUSY_BLOCKING_INFO_ACTION" && + action.value.isBusy) as ActionPattern, + ), + }); + if (!cancel) { + return; + } + console.log("Cancelled updateHdf5"); + } + + isBusy = yield* select((state) => state.uiInformation.busyBlockingInfo.isBusy); + if (isBusy) { + // Wait until WK is not busy anymore. + yield* take( + ((action: Action) => + action.type === "SET_BUSY_BLOCKING_INFO_ACTION" && + !action.value.isBusy) as ActionPattern, + ); + } + + console.log("Retrying updateHdf5..."); + } + } } function* loadLayerMappings(layerName: string, updateInStore: boolean): Saga<[string[], string[]]> { @@ -126,7 +328,7 @@ function* loadLayerMappings(layerName: string, updateInStore: boolean): Saga<[st } function* handleSetMapping( - oldActiveMappingByLayer: Record, + oldActiveMappingByLayer: Container>, action: SetMappingAction, ): Saga { const { @@ -152,7 +354,7 @@ function* handleSetMapping( // Only the custom colors have to be configured, if they // were passed. if (action.mappingColors) { - const classes = convertMappingObjectToClasses(existingMapping); + const classes = convertMappingObjectToEquivalenceClasses(existingMapping); yield* call(setCustomColors, action, classes, layerName); } return; @@ -162,56 +364,130 @@ function* handleSetMapping( message.loading({ content: "Activating Mapping", key: MAPPING_MESSAGE_KEY, + duration: 0, }); } + const layerInfo = yield* select((state) => getLayerByName(state.dataset, layerName)); + + const success = yield* call( + ensureMappingsAreLoadedAndRequestedMappingExists, + layerInfo, + mappingName, + mappingType, + ); + if (!success) { + return; + } + + if (mappingType === "JSON") { + yield* call(handleSetJsonMapping, layerName, mappingName, mappingType); + } else if (mappingType === "HDF5") { + yield* call( + handleSetHdf5Mapping, + layerName, + layerInfo, + mappingName, + mappingType, + action, + oldActiveMappingByLayer, + ); + } +} + +function* handleSetHdf5Mapping( + layerName: string, + layerInfo: APIDataLayer, + mappingName: string, + mappingType: MappingType, + action: SetMappingAction, + oldActiveMappingByLayer: Container>, +): Saga { + if (yield* select((state) => getNeedsLocalHdf5Mapping(state, layerName))) { + yield* call(updateLocalHdf5Mapping, layerName, layerInfo, mappingName, mappingType); + } else { + // An HDF5 mapping was set that is applied remotely. A reload is necessary. + yield* call(reloadData, oldActiveMappingByLayer, action); + } +} + +function* updateLocalHdf5Mapping( + layerName: string, + layerInfo: APIDataLayer, + mappingName: string, + mappingType: MappingType, +): Saga { const dataset = yield* select((state) => state.dataset); - const layerInfo = getLayerByName(dataset, layerName); + const annotation = yield* select((state) => state.tracing); + // If there is a fallbackLayer, request mappings for that instead of the tracing segmentation layer + const mappingLayerName = + "fallbackLayer" in layerInfo && layerInfo.fallbackLayer != null + ? layerInfo.fallbackLayer + : layerName; - // Make sure the available mappings are persisted in the store if they are not already - const areServerHdf5MappingsInStore = - "agglomerates" in layerInfo && layerInfo.agglomerates != null; - const [jsonMappings, serverHdf5Mappings] = yield* call( - loadLayerMappings, - layerName, - !areServerHdf5MappingsInStore, + const editableMapping = yield* select((state) => + getEditableMappingForVolumeTracingId(state, layerName), ); - const editableMappings = yield* select((state) => - state.tracing.volumes - .filter((volumeTracing) => volumeTracing.mappingIsEditable) - .map((volumeTracing) => volumeTracing.mappingName), + const cube = Model.getCubeByLayerName(layerName); + const segmentIds = cube.getValueSetForAllBuckets(); + + const previousMapping = yield* select( + (store) => + store.temporaryConfiguration.activeMappingByLayer[layerName].mapping || + (new Map() as Mapping), ); - const hdf5Mappings = [...serverHdf5Mappings, ...editableMappings]; - const mappingsWithCorrectType = mappingType === "JSON" ? jsonMappings : hdf5Mappings; - if (!mappingsWithCorrectType.includes(mappingName)) { - // Mapping does not exist, set mappingName back to null - const availableMappings = mappingsWithCorrectType.join(","); - const availableMappingsString = - availableMappings.length > 0 - ? `Available ${mappingType} mappings are ${availableMappings}` - : `There are no available ${mappingType} mappings`; - const errorMessage = `Mapping with name ${mappingName} and type ${mappingType} does not exist. ${availableMappingsString}.`; - message.error({ - content: errorMessage, - key: MAPPING_MESSAGE_KEY, - duration: 10, - }); - console.error(errorMessage); - yield* put(setMappingAction(layerName, null, mappingType)); - return; + const { + aWithoutB: newSegmentIds, + bWithoutA: deletedValues, + // The `intersection` value returned by diffSetAndMap + // is a fresh Map instance which is why we may mutate + // that without any problems. This is done later to + // avoid duplicating data for performance reasons. + intersection: mutableRemainingEntries, + } = fastDiffSetAndMap(segmentIds as Set, previousMapping); + + const newEntries = + editableMapping != null + ? yield* call( + getAgglomeratesForSegmentsFromTracingstore, + annotation.tracingStore.url, + editableMapping.tracingId, + Array.from(newSegmentIds), + ) + : yield* call( + getAgglomeratesForSegmentsFromDatastore, + dataset.dataStore.url, + dataset, + mappingLayerName, + mappingName, + Array.from(newSegmentIds), + ); + + // It is safe to mutate mutableRemainingEntries to compute the merged, + // new mapping. See the definition of mutableRemainingEntries. + const mapping = mutableRemainingEntries as Mapping; + for (const [key, val] of newEntries.entries()) { + // @ts-ignore + mapping.set(key, val); } - // Call maybeReloadData only after it was checked whether the activated mapping is valid, otherwise there would - // be a race between the maybeReloadData and handleSetMapping sagas - yield* fork(maybeReloadData, oldActiveMappingByLayer, action); + setCacheResultForDiffMappings(previousMapping, mapping, { + changed: [], + onlyA: deletedValues, + onlyB: newSegmentIds, + }); - if (mappingType !== "JSON") { - // Only JSON mappings need to be fetched, HDF5 mappings are applied by the server - return; - } + yield* put(setMappingAction(layerName, mappingName, mappingType, { mapping })); +} +function* handleSetJsonMapping( + layerName: string, + mappingName: string, + mappingType: MappingType, +): Saga { + console.time("MappingSaga JSON"); const fetchedMappings: APIMappings = {}; try { yield* call(fetchMappings, layerName, mappingName, fetchedMappings); @@ -241,24 +517,18 @@ function* handleSetMapping( yield* call(setCustomColors, mappingProperties, fetchedMapping.classes || [], layerName); } - if (layerInfo.elementClass === "uint64") { - yield* call( - [Toast, Toast.warning], - "The activated mapping will only be valid for the lower 32-bits of the active 64-bit segmentation.", - { sticky: true }, - ); - } - + console.timeEnd("MappingSaga JSON"); yield* put(setMappingAction(layerName, mappingName, mappingType, mappingProperties)); } -function convertMappingObjectToClasses(existingMapping: Mapping) { +function convertMappingObjectToEquivalenceClasses(existingMapping: Mapping) { const classesByRepresentative: Record = {}; - for (const unmapped of existingMapping.keys()) { - // @ts-ignore unmapped is guaranteed to exist in existingMapping as it was obtained using existingMapping.keys() - const mapped: number = existingMapping.get(unmapped); + for (let [unmapped, mapped] of existingMapping.entries()) { + // TODO: Proper 64 bit support (#6921) + unmapped = Number(unmapped); + mapped = Number(mapped); classesByRepresentative[mapped] = classesByRepresentative[mapped] || []; - classesByRepresentative[mapped].push(unmapped); + classesByRepresentative[mapped].push(Number(unmapped)); } const classes = Object.values(classesByRepresentative); return classes; @@ -277,12 +547,13 @@ function* setCustomColors( const firstIdEntry = aClass[0]; if (firstIdEntry == null) continue; - const representativeId = mappingProperties.mapping.get(firstIdEntry); + const representativeId = (mappingProperties.mapping as NumberLikeMap).get(firstIdEntry); if (representativeId == null) continue; const hueValue = mappingProperties.mappingColors[classIdx]; const color = jsHsv2rgb(360 * hueValue, 1, 1); - yield* put(updateSegmentAction(representativeId, { color }, layerName)); + // TODO: Proper 64 bit support (#6921) + yield* put(updateSegmentAction(Number(representativeId), { color }, layerName)); classIdx++; } @@ -315,7 +586,7 @@ function* fetchMappings( } function buildMappingObject(mappingName: string, fetchedMappings: APIMappings): Mapping { - const mappingObject: Mapping = new Map(); + const mappingObject = new Map(); for (const currentMappingName of getMappingChain(mappingName, fetchedMappings)) { const mapping = fetchedMappings[currentMappingName]; @@ -352,3 +623,47 @@ function getMappingChain(mappingName: string, fetchedMappings: APIMappings): Arr return chain; } + +function* ensureMappingsAreLoadedAndRequestedMappingExists( + layerInfo: APIDataLayer, + mappingName: string, + mappingType: MappingType, +) { + const { name: layerName } = layerInfo; + // Make sure the available mappings are persisted in the store if they are not already + const areServerHdf5MappingsInStore = + "agglomerates" in layerInfo && layerInfo.agglomerates != null; + const [jsonMappings, serverHdf5Mappings] = yield* call( + loadLayerMappings, + layerName, + !areServerHdf5MappingsInStore, + ); + + const editableMappings = yield* select((state) => + state.tracing.volumes + .filter((volumeTracing) => volumeTracing.hasEditableMapping) + .map((volumeTracing) => volumeTracing.mappingName), + ); + const hdf5Mappings = [...serverHdf5Mappings, ...editableMappings]; + const mappingsWithCorrectType = mappingType === "JSON" ? jsonMappings : hdf5Mappings; + + if (!mappingsWithCorrectType.includes(mappingName)) { + // Mapping does not exist, set mappingName back to null + const availableMappings = mappingsWithCorrectType.join(","); + const availableMappingsString = + availableMappings.length > 0 + ? `Available ${mappingType} mappings are ${availableMappings}` + : `There are no available ${mappingType} mappings`; + const errorMessage = `Mapping with name ${mappingName} and type ${mappingType} does not exist. ${availableMappingsString}.`; + message.error({ + content: errorMessage, + key: MAPPING_MESSAGE_KEY, + duration: 10, + }); + console.error(errorMessage); + yield* put(setMappingAction(layerName, null, mappingType)); + return false; + } + + return true; +} diff --git a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts index 954a4be2d32..9db5661b548 100644 --- a/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/mesh_saga.ts @@ -6,6 +6,7 @@ import ErrorHandling from "libs/error_handling"; import type { APIDataset, APIMeshFile, APISegmentationLayer } from "types/api_flow_types"; import { mergeBufferGeometries } from "libs/BufferGeometryUtils"; import Deferred from "libs/async/deferred"; +import { ActionPattern } from "redux-saga/effects"; import Store from "oxalis/store"; import { @@ -293,10 +294,10 @@ function* loadAdHocMesh( removeExistingMesh, ), cancel: take( - (action: Action) => + ((action: Action) => action.type === "REMOVE_MESH" && action.segmentId === segmentId && - action.layerName === layer.name, + action.layerName === layer.name) as ActionPattern, ), }); removeMeshWithoutVoxels(segmentId, layer.name, seedAdditionalCoordinates); @@ -351,7 +352,7 @@ function* loadFullAdHocMesh( if (meshExtraInfo.useDataStore != null) { // ... except if the caller specified whether to use the data store ... useDataStore = meshExtraInfo.useDataStore; - } else if (volumeTracing?.mappingIsEditable) { + } else if (volumeTracing?.hasEditableMapping) { // ... or if an editable mapping is active. useDataStore = false; } @@ -360,7 +361,7 @@ function* loadFullAdHocMesh( // and that don't have editable mappings. const usePositionsFromSegmentIndex = volumeTracing?.hasSegmentIndex && - !volumeTracing.mappingIsEditable && + !volumeTracing.hasEditableMapping && visibleSegmentationLayer?.tracingId != null; let positionsToRequest = usePositionsFromSegmentIndex ? yield* getChunkPositionsFromSegmentIndex( @@ -754,10 +755,10 @@ function* loadPrecomputedMesh(action: LoadPrecomputedMeshAction) { mergeChunks, ), cancel: take( - (otherAction: Action) => + ((otherAction: Action) => otherAction.type === "REMOVE_MESH" && otherAction.segmentId === segmentId && - otherAction.layerName === layer.name, + otherAction.layerName === layer.name) as ActionPattern, ), }); } diff --git a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts index f880bcb4998..1d3ecb155ee 100644 --- a/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts +++ b/frontend/javascripts/oxalis/model/sagas/proofread_saga.ts @@ -13,10 +13,14 @@ import { import { initializeEditableMappingAction, removeSegmentAction, - setMappingIsEditableAction, + setHasEditableMappingAction, updateSegmentAction, } from "oxalis/model/actions/volumetracing_actions"; -import type { ProofreadAtPositionAction } from "oxalis/model/actions/proofread_actions"; +import type { + MinCutAgglomerateWithPositionAction, + ProofreadAtPositionAction, + ProofreadMergeAction, +} from "oxalis/model/actions/proofread_actions"; import { enforceSkeletonTracing, findTreeByNodeId, @@ -49,13 +53,13 @@ import { } from "oxalis/model/accessors/dataset_accessor"; import { NeighborInfo, - getAgglomerateIdForSegmentId, + getAgglomeratesForSegmentsFromTracingstore, getEdgesForAgglomerateMinCut, getNeighborsForAgglomerateNode, getPositionForSegmentInAgglomerate, makeMappingEditable, } from "admin/admin_rest_api"; -import { setMappingNameAction } from "oxalis/model/actions/settings_actions"; +import { setMappingAction, setMappingNameAction } from "oxalis/model/actions/settings_actions"; import { getSegmentIdForPositionAsync } from "oxalis/controller/combinations/volume_handlers"; import { loadAdHocMeshAction, @@ -67,37 +71,55 @@ import { refreshMeshAction, removeMeshAction, } from "oxalis/model/actions/annotation_actions"; -import { getConstructorForElementClass } from "oxalis/model/bucket_data_handling/bucket"; -import { Tree, VolumeTracing } from "oxalis/store"; +import { ActiveMappingInfo, Mapping, NumberLikeMap, Tree, VolumeTracing } from "oxalis/store"; import _ from "lodash"; import { type AdditionalCoordinate } from "types/api_flow_types"; import { takeEveryUnlessBusy } from "./saga_helpers"; import { Action } from "../actions/actions"; +import { isBigInt, isNumberMap, SoftError } from "libs/utils"; +import { getCurrentResolution } from "../accessors/flycam_accessor"; + +function runSagaAndCatchSoftError(saga: (...args: any[]) => Saga) { + return function* (...args: any[]) { + try { + yield* call(saga, ...args); + } catch (exception) { + if (exception instanceof SoftError) { + yield* call([Toast, Toast.warning], exception.message); + return; + } + throw exception; + } + }; +} export default function* proofreadRootSaga(): Saga { yield* take("INITIALIZE_SKELETONTRACING"); yield* take("WK_READY"); yield* takeEveryUnlessBusy( - ["DELETE_EDGE", "MERGE_TREES", "MIN_CUT_AGGLOMERATE"], - handleSkeletonProofreadingAction, + ["DELETE_EDGE", "MERGE_TREES", "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS"], + runSagaAndCatchSoftError(handleSkeletonProofreadingAction), "Proofreading in progress", ); - yield* takeEvery(["PROOFREAD_AT_POSITION"], proofreadAtPosition); - yield* takeEvery(["CLEAR_PROOFREADING_BY_PRODUCTS"], clearProofreadingByproducts); + yield* takeEvery(["PROOFREAD_AT_POSITION"], runSagaAndCatchSoftError(proofreadAtPosition)); + yield* takeEvery( + ["CLEAR_PROOFREADING_BY_PRODUCTS"], + runSagaAndCatchSoftError(clearProofreadingByproducts), + ); yield* takeEveryUnlessBusy( - ["PROOFREAD_MERGE", "MIN_CUT_AGGLOMERATE_WITH_POSITION"], - handleProofreadMergeOrMinCut, + ["PROOFREAD_MERGE", "MIN_CUT_AGGLOMERATE"], + runSagaAndCatchSoftError(handleProofreadMergeOrMinCut), "Proofreading in progress", ); yield* takeEveryUnlessBusy( ["CUT_AGGLOMERATE_FROM_NEIGHBORS"], - handleProofreadCutNeighbors, + runSagaAndCatchSoftError(handleProofreadCutFromNeighbors), "Proofreading in progress", ); yield* takeEvery( ["CREATE_NODE", "DELETE_NODE", "SET_NODE_POSITION"], - checkForAgglomerateSkeletonModification, + runSagaAndCatchSoftError(checkForAgglomerateSkeletonModification), ); } @@ -121,7 +143,10 @@ function* loadCoarseMesh( position: Vector3, additionalCoordinates: AdditionalCoordinate[] | undefined, ): Saga { - if ((yield* select((state) => state.userConfiguration.autoRenderMeshInProofreading)) === false) { + const autoRenderMeshInProofreading = yield* select( + (state) => state.userConfiguration.autoRenderMeshInProofreading, + ); + if (!autoRenderMeshInProofreading) { return; } const dataset = yield* select((state) => state.dataset); @@ -249,7 +274,7 @@ function* createEditableMapping(): Saga { // The server increments the volume tracing's version by 1 when switching the mapping to an editable one yield* put(setVersionNumberAction(upToDateVolumeTracing.version + 1, "volume", volumeTracingId)); yield* put(setMappingNameAction(layerName, serverEditableMapping.mappingName, "HDF5")); - yield* put(setMappingIsEditableAction()); + yield* put(setHasEditableMappingAction()); yield* put(initializeEditableMappingAction(serverEditableMapping)); return serverEditableMapping.mappingName; } @@ -280,7 +305,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { if ( action.type !== "MERGE_TREES" && action.type !== "DELETE_EDGE" && - action.type !== "MIN_CUT_AGGLOMERATE" + action.type !== "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" ) { return; } @@ -329,11 +354,12 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { return; } - const preparation = yield* call(prepareSplitOrMerge); + const preparation = yield* call(prepareSplitOrMerge, true); if (!preparation) { return; } - const { agglomerateFileMag, getDataValue, volumeTracing } = preparation; + + const { agglomerateFileMag, getDataValue, activeMapping, volumeTracing } = preparation; const { tracingId: volumeTracingId } = volumeTracing; // Use untransformedPosition because agglomerate trees should not have @@ -376,6 +402,17 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { agglomerateFileMag, ), ); + const mergedMapping = yield* call( + mergeAgglomeratesInMapping, + activeMapping, + targetAgglomerateId, + sourceAgglomerateId, + ); + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: mergedMapping, + }), + ); } else if (action.type === "DELETE_EDGE") { if (sourceAgglomerateId !== targetAgglomerateId) { Toast.error("Segments that should be split need to be in the same agglomerate."); @@ -389,7 +426,7 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { agglomerateFileMag, ), ); - } else if (action.type === "MIN_CUT_AGGLOMERATE") { + } else if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS") { const hasErrored = yield* call( performMinCut, sourceAgglomerateId, @@ -414,18 +451,40 @@ function* handleSkeletonProofreadingAction(action: Action): Saga { yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); yield* call([Model, Model.ensureSavedState]); - /* Reload the segmentation */ - yield* call([api.data, api.data.reloadBuckets], volumeTracingId, (bucket) => - bucket.containsValue(targetAgglomerateId), + if (action.type === "MIN_CUT_AGGLOMERATE_WITH_NODE_IDS" || action.type === "DELETE_EDGE") { + if (sourceAgglomerateId !== targetAgglomerateId) { + Toast.error( + "The selected positions are not part of the same agglomerate and cannot be split.", + ); + return; + } + + // Because we ensured a saved state a few lines above, we can now split the mapping locally + // as this still requires some communication with the back-end. + const splitMapping = yield* splitAgglomerateInMapping( + activeMapping, + sourceAgglomerateId, + volumeTracingId, + ); + + console.log("dispatch setMappingAction in proofreading saga"); + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: splitMapping, + }), + ); + } + + const newMapping = yield* select( + (store) => store.temporaryConfiguration.activeMappingByLayer[volumeTracingId].mapping, ); const [newSourceAgglomerateId, newTargetAgglomerateId] = yield* all([ - call(getDataValue, sourceNodePosition), - call(getDataValue, targetNodePosition), + call(getDataValue, sourceNodePosition, newMapping), + call(getDataValue, targetNodePosition, newMapping), ]); /* Rename agglomerate skeleton(s) according to their new id and mapping name */ - yield* put( setTreeNameAction( getTreeNameForAgglomerateSkeleton(newSourceAgglomerateId, volumeTracing.mappingName), @@ -508,6 +567,14 @@ function* performMinCut( yield* put(deleteEdgeAction(firstNodeId, secondNodeId, Date.now(), "PROOFREADING")); } + console.log( + "Splitting agglomerate", + sourceAgglomerateId, + "with segment ids", + edge.segmentId1, + "and", + edge.segmentId2, + ); items.push( splitAgglomerate(sourceAgglomerateId, edge.segmentId1, edge.segmentId2, agglomerateFileMag), ); @@ -615,97 +682,20 @@ const MISSING_INFORMATION_WARNING = function* handleProofreadMergeOrMinCut(action: Action) { // Actually, action is ProofreadMergeAction | MinCutAgglomerateWithPositionAction // but the takeEveryUnlessBusy wrapper does not understand this. - if (action.type !== "PROOFREAD_MERGE" && action.type !== "MIN_CUT_AGGLOMERATE_WITH_POSITION") { + if (action.type !== "PROOFREAD_MERGE" && action.type !== "MIN_CUT_AGGLOMERATE") { return; } const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); if (!allowUpdate) return; - const preparation = yield* call(prepareSplitOrMerge); + const preparation = yield* call(prepareSplitOrMerge, false); if (!preparation) { return; } - const { agglomerateFileMag, volumeTracing } = preparation; - const { tracingId: volumeTracingId, activeCellId, activeUnmappedSegmentId } = volumeTracing; - if (activeCellId === 0) return; - - const segments = yield* select((store) => getSegmentsForLayer(store, volumeTracingId)); - const activeSegment = segments.getNullable(activeCellId); - if (activeSegment == null) return; - const activeSegmentPositionFloat = activeSegment.somePosition; - if (activeSegmentPositionFloat == null) return; - - const activeSegmentPosition = V3.floor(activeSegmentPositionFloat); - let sourcePosition; - - let targetPosition: Vector3 | undefined; - let idInfos; - if (action.position) { - if (activeUnmappedSegmentId != null) { - // The user has selected a super-voxel in the 3D viewport and then clicked - // in a data viewport to select the second merge partner. However, this mix - // is currently not supported. - Toast.warning(MISSING_INFORMATION_WARNING); - return; - } - // The action was triggered via a data viewport (not 3D). In this case, - // the active segment's position can be used as a source. - sourcePosition = activeSegmentPosition; - targetPosition = V3.floor(action.position); - idInfos = yield* call(getAgglomerateInfos, preparation.getMappedAndUnmapped, [ - sourcePosition, - targetPosition, - ]); - } else { - // The action was triggered in the 3D viewport. In this case, we don't have - // a mouse position and also the active segment position isn't necessarily - // a position of the clicked super-voxel. - if ( - action.agglomerateId == null || - activeCellId == null || - activeUnmappedSegmentId == null || - action.segmentId == null - ) { - Toast.warning(MISSING_INFORMATION_WARNING); - console.log("Some fields were null:", { - agglomerateId: action.agglomerateId, - activeCellId, - activeUnmappedSegmentId, - segmentId: action.segmentId, - }); - return; - } - const targetSegmentId = action.segmentId; - if (targetSegmentId == null) { - Toast.warning(MISSING_INFORMATION_WARNING); - console.log(`No position is known for agglomerate ${action.agglomerateId}`); - return; - } - if (action.type === "PROOFREAD_MERGE") { - // When merging two segments, they can share the same seed position afterwards. - // Also, using the active segment position is fine because it's definitely - // matching the active agglomerate. - // Therefore, we do so to avoid another roundtrip to the server. - sourcePosition = activeSegmentPosition; - targetPosition = activeSegmentPosition; - } else { - // When splitting two segments, we don't really have reliable positions at hand. - // For the source position, we cannot rely on the active segment position, because - // the active super-voxel doesn't necessarily match the last click position within - // the data viewports. - // For the target position, we also don't have reliable information available. - [sourcePosition, targetPosition] = yield* all([ - call(getPositionForSegmentId, volumeTracing, activeUnmappedSegmentId), - call(getPositionForSegmentId, volumeTracing, targetSegmentId), - ]); - } - - idInfos = [ - { agglomerateId: activeCellId, unmappedId: activeUnmappedSegmentId }, - { agglomerateId: action.agglomerateId, unmappedId: action.segmentId }, - ]; - } + const { agglomerateFileMag, volumeTracing, activeMapping } = preparation; + const { tracingId: volumeTracingId } = volumeTracing; + const idInfos = yield* call(gatherInfoForOperation, action, preparation); if (idInfos == null) { return; @@ -713,8 +703,8 @@ function* handleProofreadMergeOrMinCut(action: Action) { const [sourceInfo, targetInfo] = idInfos; const sourceAgglomerateId = sourceInfo.agglomerateId; const targetAgglomerateId = targetInfo.agglomerateId; - const sourceAgglomerate = volumeTracing.segments.getNullable(sourceAgglomerateId); - const targetAgglomerate = volumeTracing.segments.getNullable(targetAgglomerateId); + const sourceAgglomerate = volumeTracing.segments.getNullable(Number(sourceAgglomerateId)); + const targetAgglomerate = volumeTracing.segments.getNullable(Number(targetAgglomerateId)); /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ @@ -726,6 +716,7 @@ function* handleProofreadMergeOrMinCut(action: Action) { Toast.error("Segments that should be merged need to be in different agglomerates."); return; } + items.push( mergeAgglomerate( sourceAgglomerateId, @@ -735,7 +726,29 @@ function* handleProofreadMergeOrMinCut(action: Action) { agglomerateFileMag, ), ); - } else if (action.type === "MIN_CUT_AGGLOMERATE_WITH_POSITION") { + + console.log( + "Merging agglomerate", + sourceAgglomerateId, + "with", + targetAgglomerateId, + "and segment ids", + sourceInfo.unmappedId, + targetInfo.unmappedId, + ); + const mergedMapping = yield* call( + mergeAgglomeratesInMapping, + activeMapping, + targetAgglomerateId, + sourceAgglomerateId, + ); + + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: mergedMapping, + }), + ); + } else if (action.type === "MIN_CUT_AGGLOMERATE") { if (sourceInfo.unmappedId === targetInfo.unmappedId) { Toast.error( "The selected positions are both part of the same base segment and cannot be split. Please select another position or use the nodes of the agglomerate skeleton to perform the split.", @@ -766,29 +779,50 @@ function* handleProofreadMergeOrMinCut(action: Action) { yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); yield* call([Model, Model.ensureSavedState]); - /* Reload the segmentation */ - yield* call([api.data, api.data.reloadBuckets], volumeTracingId, (bucket) => - bucket.containsValue(targetAgglomerateId), - ); + if (action.type === "MIN_CUT_AGGLOMERATE") { + console.log("start updating the mapping after a min-cut"); + if (sourceAgglomerateId !== targetAgglomerateId) { + Toast.error( + "The selected positions are not part of the same agglomerate and cannot be split.", + ); + return; + } + + // Now that the changes are saved, we can split the mapping locally (because it requires + // communication with the back-end). + const splitMapping = yield* splitAgglomerateInMapping( + activeMapping, + sourceAgglomerateId, + volumeTracingId, + ); + + console.log("dispatch setMappingAction in proofreading saga"); + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: splitMapping, + }), + ); + console.log("finished updating the mapping after a min-cut"); + } if (action.type === "PROOFREAD_MERGE") { // Remove the segment that doesn't exist anymore. - yield* put(removeSegmentAction(targetAgglomerateId, volumeTracing.tracingId)); + yield* put(removeSegmentAction(targetAgglomerateId, volumeTracingId)); } /* Reload meshes */ - const tracingStoreUrl = yield* select((state) => state.tracing.tracingStore.url); + const newMapping = yield* select( + (store) => store.temporaryConfiguration.activeMappingByLayer[volumeTracingId].mapping, + ); const newSourceAgglomerateId = yield* call( - getAgglomerateIdForSegmentId, - tracingStoreUrl, - volumeTracing.tracingId, + preparation.mapSegmentId, sourceInfo.unmappedId, + newMapping, ); const newTargetAgglomerateId = yield* call( - getAgglomerateIdForSegmentId, - tracingStoreUrl, - volumeTracing.tracingId, + preparation.mapSegmentId, targetInfo.unmappedId, + newMapping, ); // Preserving custom names across merges & splits. if ( @@ -805,14 +839,14 @@ function* handleProofreadMergeOrMinCut(action: Action) { Toast.info(`Renamed segment "${getSegmentName(sourceAgglomerate)}" to "${mergedName}."`); } } else if ( - action.type === "MIN_CUT_AGGLOMERATE_WITH_POSITION" && + action.type === "MIN_CUT_AGGLOMERATE" && sourceAgglomerate && sourceAgglomerate.name != null ) { // Assign custom name to split-off target. yield* put( updateSegmentAction( - newTargetAgglomerateId, + Number(newTargetAgglomerateId), { name: sourceAgglomerate.name }, volumeTracingId, ), @@ -825,17 +859,17 @@ function* handleProofreadMergeOrMinCut(action: Action) { { agglomerateId: sourceAgglomerateId, newAgglomerateId: newSourceAgglomerateId, - nodePosition: sourcePosition, + nodePosition: sourceInfo.position, }, { agglomerateId: targetAgglomerateId, newAgglomerateId: newTargetAgglomerateId, - nodePosition: targetPosition, + nodePosition: targetInfo.position, }, ]); } -function* handleProofreadCutNeighbors(action: Action) { +function* handleProofreadCutFromNeighbors(action: Action) { // Actually, action is CutAgglomerateFromNeighborsAction but the // takeEveryUnlessBusy wrapper does not understand this. if (action.type !== "CUT_AGGLOMERATE_FROM_NEIGHBORS") { @@ -848,11 +882,11 @@ function* handleProofreadCutNeighbors(action: Action) { const allowUpdate = yield* select((state) => state.tracing.restrictions.allowUpdate); if (!allowUpdate) return; - const preparation = yield* call(prepareSplitOrMerge); + const preparation = yield* call(prepareSplitOrMerge, false); if (!preparation) { return; } - const { agglomerateFileMag, getDataValue, volumeTracing } = preparation; + const { agglomerateFileMag, getDataValue, volumeTracing, activeMapping } = preparation; const { tracingId: volumeTracingId } = volumeTracing; let idInfos; @@ -881,7 +915,7 @@ function* handleProofreadCutNeighbors(action: Action) { const editableMappingId = volumeTracing.mappingName; - const targetAgglomerate = volumeTracing.segments.getNullable(targetAgglomerateId); + const targetAgglomerate = volumeTracing.segments.getNullable(Number(targetAgglomerateId)); /* Send the respective split/merge update action to the backend (by pushing to the save queue and saving immediately) */ @@ -899,25 +933,33 @@ function* handleProofreadCutNeighbors(action: Action) { action.tree, items, ); - if (didCancel) { - return; - } - - if (items.length === 0) { + if (didCancel || items.length === 0) { return; } yield* put(pushSaveQueueTransaction(items, "mapping", volumeTracingId)); yield* call([Model, Model.ensureSavedState]); - /* Reload the segmentation */ - yield* call([api.data, api.data.reloadBuckets], volumeTracingId, (bucket) => - bucket.containsValue(targetAgglomerateId), + // Now that the changes are saved, we can split the mapping locally (because it requires + // communication with the back-end). + const mappingAfterSplit = yield* splitAgglomerateInMapping( + activeMapping, + targetAgglomerateId, + volumeTracingId, + ); + + console.log("dispatch setMappingAction in proofreading saga"); + yield* put( + setMappingAction(volumeTracingId, activeMapping.mappingName, activeMapping.mappingType, { + mapping: mappingAfterSplit, + }), ); const [newTargetAgglomerateId, ...newNeighborAgglomerateIds] = yield* all([ - call(getDataValue, targetPosition), - ...neighborInfo.neighbors.map((neighbor) => call(getDataValue, neighbor.position)), + call(getDataValue, targetPosition, mappingAfterSplit), + ...neighborInfo.neighbors.map((neighbor) => + call(getDataValue, neighbor.position, mappingAfterSplit), + ), ]); if (targetAgglomerate != null && targetAgglomerate.name != null) { @@ -925,7 +967,7 @@ function* handleProofreadCutNeighbors(action: Action) { const updateNeighborNamesActions = newNeighborAgglomerateIds.map((newNeighborAgglomerateId) => put( updateSegmentAction( - newNeighborAgglomerateId, + Number(newNeighborAgglomerateId), { name: targetAgglomerate.name }, volumeTracingId, ), @@ -953,14 +995,18 @@ function* handleProofreadCutNeighbors(action: Action) { // Helper functions -function* prepareSplitOrMerge(): Saga<{ +type Preparation = { agglomerateFileMag: Vector3; - getDataValue: (position: Vector3) => Promise; + getDataValue: (position: Vector3, overrideMapping?: Mapping | null) => Promise; + mapSegmentId: (segmentId: number, overrideMapping?: Mapping | null) => number; getMappedAndUnmapped: ( position: Vector3, ) => Promise<{ agglomerateId: number; unmappedId: number }>; + activeMapping: ActiveMappingInfo; volumeTracing: VolumeTracing & { mappingName: string }; -} | null> { +}; + +function* prepareSplitOrMerge(isSkeletonProofreading: boolean): Saga { const volumeTracingLayer = yield* select((state) => getActiveSegmentationTracingLayer(state)); const volumeTracing = yield* select((state) => getActiveSegmentationTracing(state)); if (volumeTracingLayer == null || volumeTracing == null) { @@ -976,7 +1022,7 @@ function* prepareSplitOrMerge(): Saga<{ return null; } - if (!volumeTracing.mappingIsEditable) { + if (!volumeTracing.hasEditableMapping) { try { mappingName = yield* call(createEditableMapping); } catch (e) { @@ -986,11 +1032,19 @@ function* prepareSplitOrMerge(): Saga<{ } const resolutionInfo = getResolutionInfo(volumeTracingLayer.resolutions); - // The mag the agglomerate skeleton corresponds to should be the finest available mag of the volume tracing layer - const agglomerateFileMag = resolutionInfo.getFinestResolution(); - const agglomerateFileZoomstep = resolutionInfo.getFinestResolutionIndex(); + const currentMag = yield* select((state) => getCurrentResolution(state, volumeTracingLayer.name)); + + const agglomerateFileMag = isSkeletonProofreading + ? // In case of skeleton proofreading, the finest resolution should be used. + resolutionInfo.getFinestResolution() + : // For non-skeleton proofreading, the active resolution suffices + currentMag; + if (agglomerateFileMag == null) { + return null; + } + const agglomerateFileZoomstep = resolutionInfo.getIndexByResolution(agglomerateFileMag); - const getDataValue = (position: Vector3) => { + const getUnmappedDataValue = (position: Vector3): Promise => { const { additionalCoordinates } = Store.getState().flycam; return api.data.getDataValue( volumeTracing.tracingId, @@ -1000,20 +1054,80 @@ function* prepareSplitOrMerge(): Saga<{ ); }; - const getUnmappedDataValue = yield* call(createGetUnmappedDataValueFn, volumeTracing); + console.log("Accessing mapping for proofreading"); + const mapping = yield* select( + (state) => + getMappingInfo(state.temporaryConfiguration.activeMappingByLayer, volumeTracing.tracingId) + .mapping, + ); + + if (mapping == null) { + Toast.warning("Mapping is not available, cannot proofread."); + return null; + } + + const getDataValue = async ( + position: Vector3, + overrideMapping: Mapping | null = null, + ): Promise => { + const unmappedId = await getUnmappedDataValue(position); + return mapSegmentId(unmappedId, overrideMapping); + }; + + const mapSegmentId = (segmentId: number, overrideMapping: Mapping | null = null): number => { + const mappingToAccess = overrideMapping ?? mapping; + const mappedId = isNumberMap(mappingToAccess) + ? mappingToAccess.get(Number(segmentId)) + : // TODO: Proper 64 bit support (#6921) + Number(mappingToAccess.get(BigInt(segmentId))); + if (mappedId == null) { + // It could happen that the user tries to perform a proofreading operation + // that involves an id for which the mapped id wasn't fetched yet. + // In that case, we currently just throw an error. A toast will appear + // that asks the user to retry. If we notice that this happens in production, + // we can think about a better way to handle this. + throw new SoftError( + `Could not map id ${segmentId}. The mapped partner might not be known yet. Please retry.`, + ); + } + return mappedId; + }; const getMappedAndUnmapped = async (position: Vector3) => { - const [agglomerateId, unmappedId] = await Promise.all([ - getDataValue(position), - getUnmappedDataValue(position), - ]); + const unmappedId = await getUnmappedDataValue(position); + const agglomerateId = isNumberMap(mapping) + ? mapping.get(unmappedId) + : // TODO: Proper 64 bit support (#6921) + Number(mapping.get(BigInt(unmappedId))); + + if (agglomerateId == null) { + // It could happen that the user tries to perform a proofreading operation + // that involves an id for which the mapped id wasn't fetched yet. + // In that case, we currently just throw an error. A toast will appear + // that asks the user to retry. If we notice that this happens in production, + // we can think about a better way to handle this. + throw new SoftError( + `Could not map id ${unmappedId} at position ${position}. The mapped partner might not be known yet. Please retry.`, + ); + } return { agglomerateId, unmappedId }; }; + const activeMapping = yield* select( + (store) => store.temporaryConfiguration.activeMappingByLayer[volumeTracing.tracingId], + ); + + if (activeMapping.mapping == null) { + Toast.error("Mapping is not available, cannot proofread."); + return null; + } + return { agglomerateFileMag, getDataValue, getMappedAndUnmapped, + mapSegmentId, + activeMapping, volumeTracing: { ...volumeTracing, mappingName }, }; } @@ -1027,15 +1141,21 @@ function* getAgglomerateInfos( agglomerateId: number; unmappedId: number; }> | null> { - const idInfos = yield* all(positions.map((pos) => call(getMappedAndUnmapped, pos))); - if (idInfos.find((idInfo) => idInfo.agglomerateId === 0 || idInfo.unmappedId === 0) != null) { - Toast.warning( - "One of the selected segments has the id 0 which is the background. Cannot merge/split.", - ); - console.warn("At least one id was zero:", idInfos); + try { + const idInfos = yield* all(positions.map((pos) => call(getMappedAndUnmapped, pos))); + if (idInfos.find((idInfo) => idInfo.agglomerateId === 0 || idInfo.unmappedId === 0) != null) { + Toast.warning( + "One of the selected segments has the id 0 which is the background. Cannot merge/split.", + ); + console.warn("At least one id was zero:", idInfos); + return null; + } + return idInfos; + } catch (exception) { + Toast.error("Cannot perform proofreading operation. Please retry. See console for details."); + console.error(exception); return null; } - return idInfos; } function* refreshAffectedMeshes( @@ -1063,14 +1183,14 @@ function* refreshAffectedMeshes( for (const item of items) { // Remove old agglomerate mesh(es) and load updated agglomerate mesh(es) if (!removedIds.has(item.agglomerateId)) { - yield* put(removeMeshAction(layerName, item.agglomerateId)); + yield* put(removeMeshAction(layerName, Number(item.agglomerateId))); removedIds.add(item.agglomerateId); } if (!newlyLoadedIds.has(item.newAgglomerateId)) { yield* call( loadCoarseMesh, layerName, - item.newAgglomerateId, + Number(item.newAgglomerateId), item.nodePosition, additionalCoordinates, ); @@ -1079,40 +1199,9 @@ function* refreshAffectedMeshes( } } -function* createGetUnmappedDataValueFn( - volumeTracing: VolumeTracing, -): Saga<(nodePosition: Vector3) => Promise> { - const layerName = volumeTracing.tracingId; - const layer = yield* select((state) => getLayerByName(state.dataset, layerName)); - - const resolutionInfo = getResolutionInfo(layer.resolutions); - const mag = resolutionInfo.getFinestResolution(); - - const fallbackLayerName = volumeTracing.fallbackLayer; - if (fallbackLayerName == null) { - // Proofreading is done on editable mappings which only exist when there is - // an agglomerate file (which is only possible when there is a segmentation layer - // in the dataset). - throw new Error("No fallback layer exists for volume tracing during proofreading."); - } - - const TypedArrayClass = getConstructorForElementClass(layer.elementClass)[0]; - - return async (nodePosition: Vector3) => { - const buffer = await api.data.getRawDataCuboid( - fallbackLayerName, - nodePosition, - V3.add(nodePosition, mag), - mag, - ); - - return Number(new TypedArrayClass(buffer)[0]); - }; -} - function getDeleteEdgeActionForEdgePositions( sourceTree: Tree, - edge: { position1: Vector3; position2: Vector3; segmentId1: number; segmentId2: number }, + edge: { position1: Vector3; position2: Vector3 }, ) { let firstNodeId; let secondNodeId; @@ -1158,3 +1247,166 @@ function* getPositionForSegmentId(volumeTracing: VolumeTracing, segmentId: numbe ); return position; } + +function* splitAgglomerateInMapping( + activeMapping: ActiveMappingInfo, + sourceAgglomerateId: number, + volumeTracingId: string, +) { + // Obtain all segment ids that map to sourceAgglomerateId + const mappingEntries = Array.from(activeMapping.mapping as NumberLikeMap); + + const adaptToType = + mappingEntries.length > 0 && isBigInt(mappingEntries[0][0]) + ? (el: number) => BigInt(el) + : (el: number) => el; + + // If the mapping contains BigInts, we need a BigInt for the filtering + const comparableSourceAgglomerateId = adaptToType(sourceAgglomerateId); + const splitSegmentIds = mappingEntries + .filter(([_segmentId, agglomerateId]) => agglomerateId === comparableSourceAgglomerateId) + .map(([segmentId, _agglomerateId]) => segmentId); + + const tracingStoreHost = yield* select((state) => state.tracing.tracingStore.url); + // Ask the server to map the (split) segment ids. This creates a partial mapping + // that only contains these ids. + const mappingAfterSplit = yield* call( + getAgglomeratesForSegmentsFromTracingstore, + tracingStoreHost, + volumeTracingId, + splitSegmentIds, + ); + + // Create a new mapping which is equal to the old one with the difference that + // ids from splitSegmentIds are mapped to their new target agglomerate ids. + const splitMapping = new Map( + Array.from(activeMapping.mapping as NumberLikeMap, ([segmentId, agglomerateId]) => { + // @ts-ignore get() is expected to accept the type that segmentId has + const mappedId = mappingAfterSplit.get(segmentId); + if (mappedId != null) { + return [segmentId, mappedId]; + } + return [segmentId, agglomerateId]; + }), + ) as Mapping; + return splitMapping; +} + +function mergeAgglomeratesInMapping( + activeMapping: ActiveMappingInfo, + targetAgglomerateId: number, + sourceAgglomerateId: number, +): Mapping { + const adaptToType = + activeMapping.mapping && isNumberMap(activeMapping.mapping) + ? (el: number) => el + : (el: number) => BigInt(el); + + const typedTargetAgglomerateId = adaptToType(targetAgglomerateId); + const typedSourceAgglomerateId = adaptToType(sourceAgglomerateId); + return new Map( + Array.from(activeMapping.mapping as NumberLikeMap, ([key, value]) => + value === typedTargetAgglomerateId ? [key, typedSourceAgglomerateId] : [key, value], + ), + ) as Mapping; +} + +function* gatherInfoForOperation( + action: ProofreadMergeAction | MinCutAgglomerateWithPositionAction, + preparation: Preparation, +): Saga | null> { + const { volumeTracing } = preparation; + const { tracingId: volumeTracingId, activeCellId, activeUnmappedSegmentId } = volumeTracing; + if (activeCellId === 0) return null; + + const segments = yield* select((store) => getSegmentsForLayer(store, volumeTracingId)); + const activeSegment = segments.getNullable(activeCellId); + if (activeSegment == null) return null; + const activeSegmentPositionFloat = activeSegment.somePosition; + if (activeSegmentPositionFloat == null) return null; + + const activeSegmentPosition = V3.floor(activeSegmentPositionFloat); + + let sourcePosition: Vector3 | undefined; + let targetPosition: Vector3 | undefined; + + if (action.position) { + // The action was triggered via a data viewport (not 3D). In this case, + // the active segment's position can be used as a source. + if (activeUnmappedSegmentId != null) { + // The user has selected a super-voxel in the 3D viewport and then clicked + // in a data viewport to select the second merge partner. However, this mix + // is currently not supported. + Toast.warning(MISSING_INFORMATION_WARNING); + return null; + } + sourcePosition = activeSegmentPosition; + targetPosition = V3.floor(action.position); + const idInfos = yield* call(getAgglomerateInfos, preparation.getMappedAndUnmapped, [ + sourcePosition, + targetPosition, + ]); + if (idInfos == null) { + return null; + } + const [idInfo1, idInfo2] = idInfos; + return [ + { ...idInfo1, position: sourcePosition }, + { ...idInfo2, position: targetPosition }, + ]; + } + + // The action was triggered in the 3D viewport. In this case, we don't have + // a mouse position and also the active segment position isn't necessarily + // a position of the clicked super-voxel. + if ( + action.agglomerateId == null || + activeCellId == null || + activeUnmappedSegmentId == null || + action.segmentId == null + ) { + Toast.warning(MISSING_INFORMATION_WARNING); + console.log("Some fields were null:", { + agglomerateId: action.agglomerateId, + activeCellId, + activeUnmappedSegmentId, + segmentId: action.segmentId, + }); + return null; + } + const targetSegmentId = action.segmentId; + if (targetSegmentId == null) { + Toast.warning(MISSING_INFORMATION_WARNING); + console.log(`No position is known for agglomerate ${action.agglomerateId}`); + return null; + } + if (action.type === "PROOFREAD_MERGE") { + // When merging two segments, they can share the same seed position afterwards. + // Also, using the active segment position is fine because it's definitely + // matching the active agglomerate. + // Therefore, we do so to avoid another roundtrip to the server. + sourcePosition = activeSegmentPosition; + targetPosition = activeSegmentPosition; + } else { + // When splitting two segments, we don't really have reliable positions at hand. + // For the source position, we cannot rely on the active segment position, because + // the active super-voxel doesn't necessarily match the last click position within + // the data viewports. + // For the target position, we also don't have reliable information available. + [sourcePosition, targetPosition] = yield* all([ + call(getPositionForSegmentId, volumeTracing, activeUnmappedSegmentId), + call(getPositionForSegmentId, volumeTracing, targetSegmentId), + ]); + } + + const idInfos = [ + { agglomerateId: activeCellId, unmappedId: activeUnmappedSegmentId, position: sourcePosition }, + { agglomerateId: action.agglomerateId, unmappedId: action.segmentId, position: targetPosition }, + ]; + + return idInfos; +} diff --git a/frontend/javascripts/oxalis/model/sagas/update_actions.ts b/frontend/javascripts/oxalis/model/sagas/update_actions.ts index 350225969d4..7df4a0179e2 100644 --- a/frontend/javascripts/oxalis/model/sagas/update_actions.ts +++ b/frontend/javascripts/oxalis/model/sagas/update_actions.ts @@ -7,6 +7,7 @@ import type { TreeGroup, UserBoundingBox, SegmentGroup, + NumberLike, } from "oxalis/store"; import { convertUserBoundingBoxesFromFrontendToServer } from "oxalis/model/reducers/reducer_helpers"; import { AdditionalCoordinate } from "types/api_flow_types"; @@ -433,62 +434,64 @@ export function updateMappingName( } as const; } export function splitAgglomerate( - agglomerateId: number, - segmentId1: number, - segmentId2: number, + agglomerateId: NumberLike, + segmentId1: NumberLike, + segmentId2: NumberLike, mag: Vector3, ): { name: "splitAgglomerate"; value: { agglomerateId: number; - mag: Vector3; - // For backwards compatibility reasons, - // older segments are defined using their positions - // instead of their unmapped ids. segmentId1: number | undefined; segmentId2: number | undefined; + // For backwards compatibility reasons, + // older segments are defined using their positions (and mag) + // instead of their unmapped ids. segmentPosition1?: Vector3 | undefined; segmentPosition2?: Vector3 | undefined; + mag: Vector3; }; } { return { name: "splitAgglomerate", value: { - agglomerateId, - segmentId1, - segmentId2, + // TODO: Proper 64 bit support (#6921) + agglomerateId: Number(agglomerateId), + segmentId1: Number(segmentId1), + segmentId2: Number(segmentId2), mag, }, } as const; } export function mergeAgglomerate( - agglomerateId1: number, - agglomerateId2: number, - segmentId1: number, - segmentId2: number, + agglomerateId1: NumberLike, + agglomerateId2: NumberLike, + segmentId1: NumberLike, + segmentId2: NumberLike, mag: Vector3, ): { name: "mergeAgglomerate"; value: { agglomerateId1: number; agglomerateId2: number; - mag: Vector3; - // For backwards compatibility reasons, - // older segments are defined using their positions - // instead of their unmapped ids. segmentId1: number | undefined; segmentId2: number | undefined; + // For backwards compatibility reasons, + // older segments are defined using their positions (and mag) + // instead of their unmapped ids. segmentPosition1?: Vector3 | undefined; segmentPosition2?: Vector3 | undefined; + mag: Vector3; }; } { return { name: "mergeAgglomerate", value: { - agglomerateId1, - agglomerateId2, - segmentId1, - segmentId2, + // TODO: Proper 64 bit support (#6921) + agglomerateId1: Number(agglomerateId1), + agglomerateId2: Number(agglomerateId2), + segmentId1: Number(segmentId1), + segmentId2: Number(segmentId2), mag, }, } as const; diff --git a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx index 905a639a945..39f056615f4 100644 --- a/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx +++ b/frontend/javascripts/oxalis/model/sagas/volumetracing_saga.tsx @@ -113,6 +113,7 @@ import { import maybeInterpolateSegmentationLayer from "./volume/volume_interpolation_saga"; import messages from "messages"; import { pushSaveQueueTransaction } from "../actions/save_actions"; +import { ActionPattern } from "redux-saga/effects"; const OVERWRITE_EMPTY_WARNING_KEY = "OVERWRITE-EMPTY-WARNING"; @@ -720,7 +721,7 @@ export function* diffVolumeTracing( // In case no mapping is active, this is denoted by setting the mapping name to null. const action = updateMappingName( volumeTracing.mappingName || null, - volumeTracing.mappingIsEditable || null, + volumeTracing.hasEditableMapping || null, volumeTracing.mappingIsLocked, ); yield action; @@ -823,18 +824,28 @@ function* updateHoveredSegmentId(): Saga { } const globalMousePosition = yield* call(getGlobalMousePosition); - const hoveredCellInfo = yield* call( + const hoveredSegmentInfo = yield* call( { context: Model, fn: Model.getHoveredCellId }, globalMousePosition, ); - const id = hoveredCellInfo != null ? hoveredCellInfo.id : 0; + // Note that hoveredSegmentInfo.id can be an unmapped id even when + // a mapping is active, if it is a HDF5 mapping that is partially loaded + // and no entry exists yet for the input id. + const id = hoveredSegmentInfo != null ? hoveredSegmentInfo.id : 0; + const unmappedId = hoveredSegmentInfo != null ? hoveredSegmentInfo.unmappedId : 0; const oldHoveredSegmentId = yield* select( (store) => store.temporaryConfiguration.hoveredSegmentId, ); + const oldHoveredUnmappedSegmentId = yield* select( + (store) => store.temporaryConfiguration.hoveredUnmappedSegmentId, + ); if (oldHoveredSegmentId !== id) { yield* put(updateTemporarySettingAction("hoveredSegmentId", id)); } + if (oldHoveredUnmappedSegmentId !== unmappedId) { + yield* put(updateTemporarySettingAction("hoveredUnmappedSegmentId", unmappedId)); + } } export function* updateClickedSegments( @@ -917,7 +928,7 @@ function* ensureValidBrushSize(): Saga { "WK_READY", (action: Action) => action.type === "UPDATE_LAYER_SETTING" && action.propertyName === "isDisabled", - ], + ] as ActionPattern, maybeClampBrushSize, ); } diff --git a/frontend/javascripts/oxalis/model_initialization.ts b/frontend/javascripts/oxalis/model_initialization.ts index cf737760e43..4ab0fa63b9b 100644 --- a/frontend/javascripts/oxalis/model_initialization.ts +++ b/frontend/javascripts/oxalis/model_initialization.ts @@ -251,7 +251,7 @@ async function fetchEditableMappings( serverVolumeTracings: ServerVolumeTracing[], ): Promise { const promises = serverVolumeTracings - .filter((tracing) => tracing.mappingIsEditable) + .filter((tracing) => tracing.hasEditableMapping) .map((tracing) => getEditableMappingInfo(tracingStoreUrl, tracing.id)); return Promise.all(promises); } diff --git a/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts b/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts index 6e641bb9ce0..759d333868a 100644 --- a/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/main_data_shaders.glsl.ts @@ -83,10 +83,10 @@ uniform highp uint LOOKUP_CUCKOO_TWIDTH; // Custom color cuckoo table uniform highp usampler2D custom_color_texture; uniform highp uint custom_color_seeds[3]; - uniform highp uint CUCKOO_ENTRY_CAPACITY; - uniform highp uint CUCKOO_ELEMENTS_PER_ENTRY; - uniform highp uint CUCKOO_ELEMENTS_PER_TEXEL; - uniform highp uint CUCKOO_TWIDTH; + uniform highp uint COLOR_CUCKOO_ENTRY_CAPACITY; + uniform highp uint COLOR_CUCKOO_ELEMENTS_PER_ENTRY; + uniform highp uint COLOR_CUCKOO_ELEMENTS_PER_TEXEL; + uniform highp uint COLOR_CUCKOO_TWIDTH; uniform vec4 activeCellIdHigh; uniform vec4 activeCellIdLow; @@ -96,14 +96,20 @@ uniform highp uint LOOKUP_CUCKOO_TWIDTH; uniform bool isUnmappedSegmentHighlighted; uniform float segmentationPatternOpacity; - uniform bool isMappingEnabled; - uniform float mappingSize; + uniform bool shouldApplyMappingOnGPU; + uniform bool mappingIsPartial; uniform bool hideUnmappedIds; - uniform sampler2D segmentation_mapping_texture; - uniform sampler2D segmentation_mapping_lookup_texture; + uniform bool is_mapping_64bit; + uniform highp uint mapping_seeds[3]; + uniform highp uint MAPPING_CUCKOO_ENTRY_CAPACITY; + uniform highp uint MAPPING_CUCKOO_ELEMENTS_PER_ENTRY; + uniform highp uint MAPPING_CUCKOO_ELEMENTS_PER_TEXEL; + uniform highp uint MAPPING_CUCKOO_TWIDTH; + uniform highp usampler2D segmentation_mapping_texture; <% } %> uniform float sphericalCapRadius; +uniform bool selectiveVisibilityInProofreading; uniform float viewMode; uniform float alpha; uniform bool renderBucketIndices; @@ -121,6 +127,8 @@ uniform float planeID; uniform vec3 addressSpaceDimensions; uniform vec4 hoveredSegmentIdLow; uniform vec4 hoveredSegmentIdHigh; +uniform vec4 hoveredUnmappedSegmentIdLow; +uniform vec4 hoveredUnmappedSegmentIdHigh; // For some reason, taking the dataset scale from the uniform results in imprecise // rendering of the brush circle (and issues in the arbitrary modes). That's why it @@ -190,14 +198,20 @@ void main() { vec4 data_color = vec4(0.0); <% _.each(segmentationLayerNames, function(segmentationName, layerIndex) { %> - vec4 <%= segmentationName%>_id_low = vec4(0.); - vec4 <%= segmentationName%>_id_high = vec4(0.); - float <%= segmentationName%>_effective_alpha = <%= segmentationName %>_alpha * (1. - <%= segmentationName %>_unrenderable); - - if (<%= segmentationName%>_effective_alpha > 0.) { - vec4[2] segmentationId = getSegmentId_<%= segmentationName%>(worldCoordUVW); - <%= segmentationName%>_id_low = segmentationId[1]; - <%= segmentationName%>_id_high = segmentationId[0]; + vec4 <%= segmentationName %>_id_low = vec4(0.); + vec4 <%= segmentationName %>_id_high = vec4(0.); + vec4 <%= segmentationName %>_unmapped_id_low = vec4(0.); + vec4 <%= segmentationName %>_unmapped_id_high = vec4(0.); + float <%= segmentationName %>_effective_alpha = <%= segmentationName %>_alpha * (1. - <%= segmentationName %>_unrenderable); + + if (<%= segmentationName %>_effective_alpha > 0.) { + vec4[2] unmapped_segment_id; + vec4[2] segment_id; + getSegmentId_<%= segmentationName %>(worldCoordUVW, unmapped_segment_id, segment_id); + <%= segmentationName %>_unmapped_id_low = unmapped_segment_id[1]; + <%= segmentationName %>_unmapped_id_high = unmapped_segment_id[0]; + <%= segmentationName %>_id_low = segment_id[1]; + <%= segmentationName %>_id_high = segment_id[0]; } <% }) %> @@ -269,25 +283,42 @@ void main() { <% _.each(segmentationLayerNames, function(segmentationName, layerIndex) { %> // Color map (<= to fight rounding mistakes) - if ( length(<%= segmentationName%>_id_low) > 0.1 || length(<%= segmentationName%>_id_high) > 0.1 ) { + if ( length(<%= segmentationName %>_id_low) > 0.1 || length(<%= segmentationName %>_id_high) > 0.1 ) { // Increase cell opacity when cell is hovered or if it is the active activeCell - bool isHoveredCell = hoveredSegmentIdLow == <%= segmentationName%>_id_low - && hoveredSegmentIdHigh == <%= segmentationName%>_id_high; - bool isActiveCell = activeCellIdLow == <%= segmentationName%>_id_low - && activeCellIdHigh == <%= segmentationName%>_id_high; + bool isHoveredSegment = hoveredSegmentIdLow == <%= segmentationName %>_id_low + && hoveredSegmentIdHigh == <%= segmentationName %>_id_high; + bool isHoveredUnmappedSegment = hoveredUnmappedSegmentIdLow == <%= segmentationName %>_unmapped_id_low + && hoveredUnmappedSegmentIdHigh == <%= segmentationName %>_unmapped_id_high; + bool isActiveCell = activeCellIdLow == <%= segmentationName %>_id_low + && activeCellIdHigh == <%= segmentationName %>_id_high; // Highlight cell only if it's hovered or active during proofreading // and if segmentation opacity is not zero - float hoverAlphaIncrement = isHoveredCell && <%= segmentationName%>_alpha > 0.0 ? 0.2 : 0.0; - float proofreadingAlphaIncrement = isActiveCell && isProofreading && <%= segmentationName%>_alpha > 0.0 ? 0.4 : 0.0; + float alphaIncrement = isProofreading + ? (isActiveCell + ? (isHoveredUnmappedSegment + ? 0.4 // Highlight the hovered super-voxel of the active segment + : (isHoveredSegment + ? 0.15 // Highlight the not-hovered super-voxels of the hovered segment + : 0.0 + ) + ) + : (isHoveredSegment + ? 0.2 + // We are in proofreading mode, but the current voxel neither belongs + // to the active segment nor is it hovered. When selective visibility + // is enabled, lower the opacity. + : (selectiveVisibilityInProofreading ? -<%= segmentationName %>_alpha : 0.0) + ) + ) : (isHoveredSegment ? 0.2 : 0.0); gl_FragColor = vec4(mix( data_color.rgb, - convertCellIdToRGB(<%= segmentationName%>_id_high, <%= segmentationName%>_id_low), - <%= segmentationName%>_alpha + max(hoverAlphaIncrement, proofreadingAlphaIncrement) + convertCellIdToRGB(<%= segmentationName %>_id_high, <%= segmentationName %>_id_low), + <%= segmentationName %>_alpha + alphaIncrement ), 1.0); } - vec4 <%= segmentationName%>_brushOverlayColor = getBrushOverlay(worldCoordUVW); - <%= segmentationName%>_brushOverlayColor.xyz = convertCellIdToRGB(activeCellIdHigh, activeCellIdLow); - gl_FragColor = mix(gl_FragColor, <%= segmentationName%>_brushOverlayColor, <%= segmentationName%>_brushOverlayColor.a); + vec4 <%= segmentationName %>_brushOverlayColor = getBrushOverlay(worldCoordUVW); + <%= segmentationName %>_brushOverlayColor.xyz = convertCellIdToRGB(activeCellIdHigh, activeCellIdLow); + gl_FragColor = mix(gl_FragColor, <%= segmentationName %>_brushOverlayColor, <%= segmentationName %>_brushOverlayColor.a); gl_FragColor.a = 1.0; <% }) %> diff --git a/frontend/javascripts/oxalis/shaders/mappings.glsl.ts b/frontend/javascripts/oxalis/shaders/mappings.glsl.ts index 5865326286f..6d7879b63bf 100644 --- a/frontend/javascripts/oxalis/shaders/mappings.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/mappings.glsl.ts @@ -1,25 +1,41 @@ +import { hashCombine } from "./hashing.glsl"; import type { ShaderModule } from "./shader_module_system"; -import { getRgbaAtIndex } from "./texture_access.glsl"; -import { greaterThanVec4 } from "./utils.glsl"; -export const binarySearchIndex: ShaderModule = { - requirements: [greaterThanVec4, getRgbaAtIndex], + +export const attemptMappingLookUp: ShaderModule = { + requirements: [hashCombine], code: ` - float binarySearchIndex(sampler2D dtexture, float maxIndex, vec4 value) { - float low = 0.0; - float high = maxIndex - 1.0; - // maxIndex is at most MAPPING_TEXTURE_WIDTH**2, requiring a maximum of log2(MAPPING_TEXTURE_WIDTH**2)+1 loop passes - for (float i = 0.0; i < <%= formatNumberAsGLSLFloat(Math.log2(mappingTextureWidth**2) + 1.0) %>; i++) { - float mid = floor((low + high) / 2.0); - vec4 cur = getRgbaAtIndex(dtexture, <%= mappingTextureWidth %>, mid); - if (cur == value) { - return mid; - } else if (greaterThanVec4(cur, value)) { - high = mid - 1.0; - } else { - low = mid + 1.0; - } + ivec2 attemptMappingLookUp32(uint value, uint seed) { + highp uint h0 = hashCombine(seed, value); + // See getDiminishedEntryCapacity() for an explanation about the -1 + h0 = h0 % (MAPPING_CUCKOO_ENTRY_CAPACITY - 1u); + h0 = uint(h0 * MAPPING_CUCKOO_ELEMENTS_PER_ENTRY / MAPPING_CUCKOO_ELEMENTS_PER_TEXEL); + + highp uint x = h0 % MAPPING_CUCKOO_TWIDTH; + highp uint y = h0 / MAPPING_CUCKOO_TWIDTH; + + uvec4 customEntry = texelFetch(segmentation_mapping_texture, ivec2(x, y), 0); + + if (customEntry.r != value) { + return ivec2(-1.); } - return -1.0; + + return ivec2(0u, customEntry.g); + } + ivec2 attemptMappingLookUp64(uint high, uint low, uint seed) { + highp uint h0 = hashCombine(seed, high); + h0 = hashCombine(h0, low); + h0 = h0 % MAPPING_CUCKOO_ENTRY_CAPACITY; + h0 = uint(h0 * MAPPING_CUCKOO_ELEMENTS_PER_ENTRY / MAPPING_CUCKOO_ELEMENTS_PER_TEXEL); + highp uint x = h0 % MAPPING_CUCKOO_TWIDTH; + highp uint y = h0 / MAPPING_CUCKOO_TWIDTH; + + uvec4 customEntry = texelFetch(segmentation_mapping_texture, ivec2(x, y), 0); + + if (customEntry.r != uint(high) || customEntry.g != uint(low)) { + return ivec2(-1.); + } + + return ivec2(customEntry.ba); } `, }; diff --git a/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts b/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts index 39b7c796ca3..c3fb311f28e 100644 --- a/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/segmentation.glsl.ts @@ -9,9 +9,9 @@ import { } from "oxalis/shaders/utils.glsl"; import { Vector3, Vector4 } from "oxalis/constants"; import type { ShaderModule } from "./shader_module_system"; -import { binarySearchIndex } from "./mappings.glsl"; import { getRgbaAtIndex } from "./texture_access.glsl"; import { hashCombine } from "./hashing.glsl"; +import { attemptMappingLookUp } from "./mappings.glsl"; export const convertCellIdToRGB: ShaderModule = { requirements: [ @@ -23,16 +23,28 @@ export const convertCellIdToRGB: ShaderModule = { hashCombine, ], code: ` - uint vec4ToUint(vec4 idLow) { + highp uint vec4ToUint(vec4 idLow) { uint integerValue = (uint(idLow.a) << 24) | (uint(idLow.b) << 16) | (uint(idLow.g) << 8) | uint(idLow.r); return integerValue; } + vec4 uintToVec4(uint integerValue) { + float r = float(integerValue & uint(0xFF)); + float g = float((integerValue >> 8) & uint(0xFF)); + float b = float((integerValue >> 16) & uint(0xFF)); + float a = float((integerValue >> 24) & uint(0xFF)); + + vec4 id = vec4(r, g, b, a); + return id; + } + vec3 attemptCustomColorLookUp(uint integerValue, uint seed) { - highp uint h0 = hashCombine(seed, integerValue) % CUCKOO_ENTRY_CAPACITY; - h0 = uint(h0 * CUCKOO_ELEMENTS_PER_ENTRY / CUCKOO_ELEMENTS_PER_TEXEL); - highp uint x = h0 % CUCKOO_TWIDTH; - highp uint y = h0 / CUCKOO_TWIDTH; + highp uint h0 = hashCombine(seed, integerValue); + // See getDiminishedEntryCapacity() for an explanation about the -1 + h0 = h0 % (COLOR_CUCKOO_ENTRY_CAPACITY - 1u); + h0 = uint(h0 * COLOR_CUCKOO_ELEMENTS_PER_ENTRY / COLOR_CUCKOO_ELEMENTS_PER_TEXEL); + highp uint x = h0 % COLOR_CUCKOO_TWIDTH; + highp uint y = h0 / COLOR_CUCKOO_TWIDTH; uvec4 customEntry = texelFetch(custom_color_texture, ivec2(x, y), 0); uvec3 customColor = customEntry.gba; @@ -41,8 +53,9 @@ export const convertCellIdToRGB: ShaderModule = { return vec3(-1); } - return vec3(customEntry.gba) / 255.; + return vec3(customColor) / 255.; } + vec3 convertCellIdToRGB(vec4 idHigh, vec4 idLow) { /* This function maps from a segment id to a color with a pattern. @@ -71,9 +84,6 @@ export const convertCellIdToRGB: ShaderModule = { float colorHue = rgb2hsv(colormapJet(colorValueDecimal)).x; float colorSaturation = 1.; float colorValue = 1.; - // For historical reference: the old color generation was: - // float lastEightBits = id.r; - // float colorHue = mod(lastEightBits * (golden_ratio - 1.0), 1.0); uint integerValue = vec4ToUint(idLow); vec3 customColor = attemptCustomColorLookUp(integerValue, custom_color_seeds[0]); @@ -269,18 +279,22 @@ export const getCrossHairOverlay: ShaderModule = { }; export const getSegmentId: ShaderModule = { - requirements: [binarySearchIndex, getRgbaAtIndex], + requirements: [getRgbaAtIndex, convertCellIdToRGB, attemptMappingLookUp], code: ` <% _.each(segmentationLayerNames, function(segmentationName, layerIndex) { %> - vec4[2] getSegmentId_<%= segmentationName %>(vec3 worldPositionUVW) { - vec4[2] volume_color; + void getSegmentId_<%= segmentationName %>(vec3 worldPositionUVW, out vec4[2] segment_id, out vec4[2] mapped_id) { vec3 transformedCoordUVW = transDim((<%= segmentationName %>_transform * vec4(transDim(worldPositionUVW), 1.0)).xyz); if (isOutsideOfBoundingBox(transformedCoordUVW)) { - return volume_color; + // Some GPUs don't null-initialize the variables. + segment_id[0] = vec4(0.); + segment_id[1] = vec4(0.); + mapped_id[0] = vec4(0.); + mapped_id[1] = vec4(0.); + return; } - volume_color = + segment_id = getSegmentIdOrFallback( <%= formatNumberAsGLSLFloat(colorLayerNames.length + layerIndex) %>, <%= segmentationName %>_data_texture_width, @@ -291,38 +305,48 @@ export const getSegmentId: ShaderModule = { ); // Depending on the packing degree, the returned volume color contains extra values - // which should be ignored (in the binary search as well as when comparing - // a cell id with the hovered cell passed via uniforms, for example). + // which should be ignored (e.g., when comparing a cell id with the hovered cell + // passed via uniforms). <% if (textureLayerInfos[segmentationName].packingDegree === 4) { %> - volume_color[1] = vec4(volume_color[1].r, 0.0, 0.0, 0.0); + segment_id[1] = vec4(segment_id[1].r, 0.0, 0.0, 0.0); <% } else if (textureLayerInfos[segmentationName].packingDegree === 2) { %> - volume_color[1] = vec4(volume_color[1].r, volume_color[1].g, 0.0, 0.0); + segment_id[1] = vec4(segment_id[1].r, segment_id[1].g, 0.0, 0.0); <% } %> - if (isMappingEnabled) { - // Note that currently only the lower 32 bits of the segmentation - // are used for applying the JSON mapping. + mapped_id[0] = 255. * segment_id[0]; // High + mapped_id[1] = 255. * segment_id[1]; // Low - float index = binarySearchIndex( - segmentation_mapping_lookup_texture, - mappingSize, - volume_color[1] - ); - if (index != -1.0) { - volume_color[1] = getRgbaAtIndex( - segmentation_mapping_texture, - <%= mappingTextureWidth %>, - index - ); - } else if (hideUnmappedIds) { - volume_color[1] = vec4(0.0); + uint high_integer = vec4ToUint(mapped_id[0]); + uint low_integer = vec4ToUint(mapped_id[1]); + + if (shouldApplyMappingOnGPU) { + ivec2 mapped_entry = is_mapping_64bit + ? attemptMappingLookUp64(high_integer, low_integer, mapping_seeds[0]) + : attemptMappingLookUp32(low_integer, mapping_seeds[0]); + if (mapped_entry.r == -1) { + mapped_entry = is_mapping_64bit + ? attemptMappingLookUp64(high_integer, low_integer, mapping_seeds[1]) + : attemptMappingLookUp32(low_integer, mapping_seeds[1]); + } + if (mapped_entry.r == -1) { + mapped_entry = is_mapping_64bit + ? attemptMappingLookUp64(high_integer, low_integer, mapping_seeds[2]) + : attemptMappingLookUp32(low_integer, mapping_seeds[2]); + } + if (mapped_entry.r != -1) { + mapped_id[0] = uintToVec4(uint(mapped_entry[0])); + mapped_id[1] = uintToVec4(uint(mapped_entry[1])); + } else if (hideUnmappedIds || mappingIsPartial) { + // If the mapping is partially known to the front-end (this is the case for HDF5 mappings), + // we hide unmapped ids. As soon as they are loaded, the segments will appear. + mapped_id[0] = vec4(0.0); + mapped_id[1] = vec4(0.0); } } - volume_color[0] *= 255.0; - volume_color[1] *= 255.0; - return volume_color; + segment_id[0] *= 255.0; + segment_id[1] *= 255.0; } <% }) %> `, diff --git a/frontend/javascripts/oxalis/shaders/utils.glsl.ts b/frontend/javascripts/oxalis/shaders/utils.glsl.ts index c46d5a6191c..b61728698eb 100644 --- a/frontend/javascripts/oxalis/shaders/utils.glsl.ts +++ b/frontend/javascripts/oxalis/shaders/utils.glsl.ts @@ -293,20 +293,6 @@ export const vec4ToFloat: ShaderModule = { } `, }; -export const greaterThanVec4: ShaderModule = { - code: ` - bool greaterThanVec4(vec4 x, vec4 y) { - if (x.a > y.a) return true; - if (x.a < y.a) return false; - if (x.b > y.b) return true; - if (x.b < y.b) return false; - if (x.g > y.g) return true; - if (x.g < y.g) return false; - if (x.r > y.r) return true; - else return false; - } - `, -}; export const transDim: ShaderModule = { code: ` // Similar to the transDim function in dimensions.js, this function transposes dimensions for the current plane. diff --git a/frontend/javascripts/oxalis/store.ts b/frontend/javascripts/oxalis/store.ts index 3cf1c78cb7c..5a20f53bc40 100644 --- a/frontend/javascripts/oxalis/store.ts +++ b/frontend/javascripts/oxalis/store.ts @@ -1,4 +1,4 @@ -import { createStore, applyMiddleware } from "redux"; +import { createStore, applyMiddleware, Middleware } from "redux"; import { enableBatching } from "redux-batched-actions"; import createSagaMiddleware, { type Saga } from "redux-saga"; import type { @@ -255,7 +255,7 @@ export type VolumeTracing = TracingBase & { readonly contourList: Array; readonly fallbackLayer?: string; readonly mappingName?: string | null | undefined; - readonly mappingIsEditable?: boolean; + readonly hasEditableMapping?: boolean; readonly mappingIsLocked?: boolean; readonly hasSegmentIndex: boolean; }; @@ -347,6 +347,7 @@ export type QuickSelectConfig = { export type UserConfiguration = { readonly autoSaveLayouts: boolean; readonly autoRenderMeshInProofreading: boolean; + readonly selectiveVisibilityInProofreading: boolean; readonly brushSize: number; readonly clippingDistance: number; readonly clippingDistanceArbitrary: number; @@ -393,7 +394,10 @@ export type RecommendedConfiguration = Partial< // A histogram value of undefined indicates that the histogram hasn't been fetched yet // whereas a value of null indicates that the histogram couldn't be fetched export type HistogramDataForAllLayers = Record; -export type Mapping = Map; +export type Mapping = Map | Map; +export type NumberLike = number | bigint; +export type NumberLikeMap = Map; + export type MappingType = "JSON" | "HDF5"; export type ActiveMappingInfo = { readonly mappingName: string | null | undefined; @@ -401,7 +405,6 @@ export type ActiveMappingInfo = { readonly mappingColors: number[] | null | undefined; readonly hideUnmappedIds: boolean; readonly mappingStatus: MappingStatus; - readonly mappingSize: number; readonly mappingType: MappingType; }; export type TemporaryConfiguration = { @@ -411,6 +414,7 @@ export type TemporaryConfiguration = { readonly controlMode: ControlMode; readonly mousePosition: Vector2 | null | undefined; readonly hoveredSegmentId: number | null; + readonly hoveredUnmappedSegmentId: number | null; readonly activeMappingByLayer: Record; readonly isMergerModeEnabled: boolean; readonly gpuSetup: { @@ -631,7 +635,7 @@ const combinedReducers = reduceReducers( const store = createStore( enableBatching(combinedReducers), defaultState, - applyMiddleware(actionLoggerMiddleware, overwriteActionMiddleware, sagaMiddleware), + applyMiddleware(actionLoggerMiddleware, overwriteActionMiddleware, sagaMiddleware as Middleware), ); export function startSagas(rootSaga: Saga) { diff --git a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx index 70b48a40281..c78d34c5c36 100644 --- a/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx +++ b/frontend/javascripts/oxalis/view/action-bar/toolbar_view.tsx @@ -149,6 +149,10 @@ const handleToggleAutomaticMeshRendering = (value: boolean) => { Store.dispatch(updateUserSettingAction("autoRenderMeshInProofreading", value)); }; +const handleToggleSelectiveVisibilityInProofreading = (value: boolean) => { + Store.dispatch(updateUserSettingAction("selectiveVisibilityInProofreading", value)); +}; + const handleSetTool = (event: RadioChangeEvent) => { const value = event.target.value as AnnotationTool; Store.dispatch(setToolAction(value)); @@ -413,7 +417,7 @@ function AdditionalSkeletonModesButtons() { getActiveSegmentationTracing(state), ); const isEditableMappingActive = - segmentationTracingLayer != null && !!segmentationTracingLayer.mappingIsEditable; + segmentationTracingLayer != null && !!segmentationTracingLayer.hasEditableMapping; const isMappingLocked = segmentationTracingLayer != null && !!segmentationTracingLayer.mappingIsLocked; const isMergerModeDisabled = isEditableMappingActive || isMappingLocked; @@ -485,10 +489,14 @@ function AdditionalSkeletonModesButtons() { } const mapId = (volumeTracingId: string | null | undefined, id: number) => { + // Note that the return value can be an unmapped id even when + // a mapping is active, if it is a HDF5 mapping that is partially loaded + // and no entry exists yet for the input id. if (!volumeTracingId) { return null; } const { cube } = Model.getSegmentationTracingLayer(volumeTracingId); + return cube.mapId(id); }; @@ -863,9 +871,8 @@ export default function ToolbarView() { const hasSkeleton = useSelector((state: OxalisState) => state.tracing.skeleton != null); const isAgglomerateMappingEnabled = useSelector(hasAgglomerateMapping); - const [lastForcefulDisabledTool, setLastForcefulDisabledTool] = useState( - null, - ); + const [lastForcefullyDisabledTool, setLastForcefullyDisabledTool] = + useState(null); const isVolumeModificationAllowed = useSelector( (state: OxalisState) => !hasEditableMapping(state), ); @@ -897,25 +904,32 @@ export default function ToolbarView() { // the tools via the w shortcut. In that case, the effect-hook is re-executed // and the tool is switched to MOVE. const disabledInfoForCurrentTool = disabledInfosForTools[activeTool]; + const isLastForcefullyDisabledToolAvailable = + lastForcefullyDisabledTool != null && + !disabledInfosForTools[lastForcefullyDisabledTool].isDisabled; - // biome-ignore lint/correctness/useExhaustiveDependencies: Adding disabledInfosForTools[lastForcefulDisabledTool].isDisabled as dependency requires another null-check which makes the dependency itself quite tedious. useEffect(() => { if (disabledInfoForCurrentTool.isDisabled) { - setLastForcefulDisabledTool(activeTool); + setLastForcefullyDisabledTool(activeTool); Store.dispatch(setToolAction(AnnotationToolEnum.MOVE)); } else if ( - lastForcefulDisabledTool != null && - !disabledInfosForTools[lastForcefulDisabledTool].isDisabled && + lastForcefullyDisabledTool != null && + isLastForcefullyDisabledToolAvailable && activeTool === AnnotationToolEnum.MOVE ) { // Re-enable the tool that was disabled before. - setLastForcefulDisabledTool(null); - Store.dispatch(setToolAction(lastForcefulDisabledTool)); + setLastForcefullyDisabledTool(null); + Store.dispatch(setToolAction(lastForcefullyDisabledTool)); } else if (activeTool !== AnnotationToolEnum.MOVE) { // Forget the last disabled tool as another tool besides the move tool was selected. - setLastForcefulDisabledTool(null); + setLastForcefullyDisabledTool(null); } - }, [activeTool, disabledInfoForCurrentTool, lastForcefulDisabledTool]); + }, [ + activeTool, + disabledInfoForCurrentTool, + isLastForcefullyDisabledToolAvailable, + lastForcefullyDisabledTool, + ]); const isShiftPressed = useKeyPress("Shift"); const isControlOrMetaPressed = useKeyPress("ControlOrMeta"); @@ -1383,9 +1397,16 @@ function ProofReadingComponents() { const autoRenderMeshes = useSelector( (state: OxalisState) => state.userConfiguration.autoRenderMeshInProofreading, ); - const buttonStyle = autoRenderMeshes ? ACTIVE_BUTTON_STYLE : NARROW_BUTTON_STYLE; + const selectiveVisibilityInProofreading = useSelector( + (state: OxalisState) => state.userConfiguration.selectiveVisibilityInProofreading, + ); + return ( - <> + handleToggleAutomaticMeshRendering(!autoRenderMeshes)} > - + + handleToggleSelectiveVisibilityInProofreading(!selectiveVisibilityInProofreading) + } + > + + + ); } diff --git a/frontend/javascripts/oxalis/view/context_menu.tsx b/frontend/javascripts/oxalis/view/context_menu.tsx index 1b6eb5c1f8a..770c296e954 100644 --- a/frontend/javascripts/oxalis/view/context_menu.tsx +++ b/frontend/javascripts/oxalis/view/context_menu.tsx @@ -1382,7 +1382,7 @@ function WkContextMenu() { hideContextMenu={hideContextMenu} contextMenuPosition={props.contextMenuPosition} > - + {props.contextMenuPosition != null ? :
} ); } @@ -1496,10 +1496,6 @@ function ContextMenuInner(propsWithInputRef: Props) { [contextMenuPosition, clickedSegmentOrMeshId, lastTimeSegmentInfoShouldBeFetched], ); - if (contextMenuPosition == null || maybeViewport == null) { - return <>; - } - const activeTreeId = skeletonTracing != null ? skeletonTracing.activeTreeId : null; const activeNodeId = skeletonTracing?.activeNodeId; @@ -1700,19 +1696,21 @@ function ContextMenuInner(propsWithInputRef: Props) { }, mode: "vertical", items: - maybeClickedNodeId != null - ? getNodeContextMenuOptions({ - clickedNodeId: maybeClickedNodeId, - infoRows, - viewport: maybeViewport, - ...props, - }) - : getNoNodeContextMenuOptions({ - segmentIdAtPosition, - infoRows, - viewport: maybeViewport, - ...props, - }), + maybeViewport == null + ? [] + : maybeClickedNodeId != null + ? getNodeContextMenuOptions({ + clickedNodeId: maybeClickedNodeId, + infoRows, + viewport: maybeViewport, + ...props, + }) + : getNoNodeContextMenuOptions({ + segmentIdAtPosition, + infoRows, + viewport: maybeViewport, + ...props, + }), }; if (inputRef == null || inputRef.current == null) return null; @@ -1721,12 +1719,12 @@ function ContextMenuInner(propsWithInputRef: Props) { return ( + refContent} - // @ts-ignore destroyPopupOnHide >
diff --git a/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx b/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx index 89ba4495261..3ce6d24ca1c 100644 --- a/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx +++ b/frontend/javascripts/oxalis/view/left-border-tabs/mapping_settings_view.tsx @@ -187,13 +187,16 @@ class MappingSettingsView extends React.Component { // or a mapping was activated, e.g. from the API or by selecting one from the dropdown (this.props.isMappingEnabled). const shouldMappingBeEnabled = this.state.shouldMappingBeEnabled || isMappingEnabled; const renderHideUnmappedSegmentsSwitch = - (shouldMappingBeEnabled || isMergerModeEnabled) && mapping && hideUnmappedIds != null; + (shouldMappingBeEnabled || isMergerModeEnabled) && + mapping && + this.props.mappingType === "JSON" && + hideUnmappedIds != null; const isDisabled = isEditableMappingActive || isMappingLocked || isAnnotationLockedByOwner; const disabledMessage = !allowUpdate ? messages["tracing.read_only_mode_notification"](isAnnotationLockedByOwner, isOwner) : isEditableMappingActive ? "The mapping has been edited through proofreading actions and can no longer be disabled or changed." - : mapping + : isMappingEnabled ? "This mapping has been locked to this annotation, because the segmentation was modified while it was active. It can no longer be disabled or changed." : "The segmentation was modified while no mapping was active. To ensure a consistent state, mappings can no longer be enabled."; return ( diff --git a/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx b/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx index 284b0cc8108..a92cc666f4f 100644 --- a/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx +++ b/frontend/javascripts/oxalis/view/merger_mode_modal_view.tsx @@ -1,15 +1,13 @@ import * as React from "react"; -import { Modal, Button, Spin, Tooltip, Alert } from "antd"; -import { useSelector } from "react-redux"; -import { hasVisibleUint64Segmentation } from "oxalis/model/accessors/dataset_accessor"; +import { Modal, Button, Spin, Tooltip } from "antd"; + type Props = { isCloseable: boolean; onClose: () => void; progress: number; }; -export default function MergerModeModalView({ isCloseable, onClose, progress }: Props) { - const isUint64SegmentationVisible = useSelector(hasVisibleUint64Segmentation); +export default function MergerModeModalView({ isCloseable, onClose, progress }: Props) { const closeButton = (
} > - {isUint64SegmentationVisible && ( - - )} You just enabled the merger mode. This mode allows to merge segmentation cells by creating trees and nodes. Each tree maps the marked segments (the ones where nodes were created in) to one new segment. Create separate trees for different segments. diff --git a/frontend/javascripts/oxalis/view/plane_view.ts b/frontend/javascripts/oxalis/view/plane_view.ts index c80e6b0a864..9614eaa86b9 100644 --- a/frontend/javascripts/oxalis/view/plane_view.ts +++ b/frontend/javascripts/oxalis/view/plane_view.ts @@ -267,9 +267,16 @@ class PlaneView { if (segmentationTracing == null) { return null; } - return segmentationTracing.activeUnmappedSegmentId; + // If the proofreading tool is not active, pretend that + // activeUnmappedSegmentId is null so that no super-voxel + // is highlighted. + return storeState.uiInformation.activeTool === "PROOFREAD" + ? segmentationTracing.activeUnmappedSegmentId + : null; }, (activeUnmappedSegmentId) => + // Note that this code is responsible for highlighting the *active* + // (not necessarily hovered) segment. segmentMeshController.highlightUnmappedSegmentId(activeUnmappedSegmentId), true, ), diff --git a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx index 8b2170da54c..e6bdcaac5c7 100644 --- a/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx +++ b/frontend/javascripts/oxalis/view/right-border-tabs/segments_tab/segment_list_item.tsx @@ -186,10 +186,8 @@ const getMakeSegmentActiveMenuItem = ( type Props = { segment: Segment; - mapId: (arg0: number) => number; - isJSONMappingEnabled: boolean; mappingInfo: ActiveMappingInfo; - centeredSegmentId: number | null | undefined; + isCentered: boolean; selectedSegmentIds: number[] | null | undefined; activeCellId: number | null | undefined; setHoveredSegmentId: (arg0: number | null | undefined) => void; @@ -382,10 +380,8 @@ const MeshInfoItem = React.memo(_MeshInfoItem); function _SegmentListItem({ segment, - mapId, - isJSONMappingEnabled, mappingInfo, - centeredSegmentId, + isCentered, selectedSegmentIds, activeCellId, setHoveredSegmentId, @@ -412,10 +408,8 @@ function _SegmentListItem({ const { modal } = App.useApp(); const isEditingDisabled = !allowUpdate; - const mappedId = mapId(segment.id); - const segmentColorHSLA = useSelector( - (state: OxalisState) => getSegmentColorAsHSLA(state, mappedId), + (state: OxalisState) => getSegmentColorAsHSLA(state, segment.id), (a: Vector4, b: Vector4) => V4.isEqual(a, b), ); const isHoveredSegmentId = useSelector( @@ -424,10 +418,6 @@ function _SegmentListItem({ const segmentColorRGBA = Utils.hslaToRgba(segmentColorHSLA); - if (mappingInfo.hideUnmappedIds && mappedId === 0) { - return null; - } - const andCloseContextMenu = (_ignore?: any) => handleSegmentDropdownMenuVisibility(false, 0); const createSegmentContextMenu = (): MenuProps => ({ @@ -455,13 +445,6 @@ function _SegmentListItem({ ), { key: "changeSegmentColor", - /* - * Disable the change-color menu if the segment was mapped to another segment, because - * changing the color wouldn't do anything as long as the mapping is still active. - * This is because the id (A) is mapped to another one (B). So, the user would need - * to change the color of B to see the effect for A. - */ - disabled: segment.id !== mappedId, label: ( - - {segment.id} → {mappedId} - - - ); // Only if segment.name is truthy, render additional info. return segment.name ? ( @@ -646,7 +621,7 @@ function _SegmentListItem({ {/* Show Default Segment Name if another one is already defined*/} {getSegmentIdDetails()} - {segment.id === centeredSegmentId ? ( + {isCentered ? ( ; dataset: APIDataset; - isJSONMappingEnabled: boolean; mappingInfo: ActiveMappingInfo; centeredSegmentId: number; hasVolumeTracing: boolean | undefined; @@ -192,8 +190,6 @@ const mapStateToProps = (state: OxalisState): StateProps => { activeCellId: activeVolumeTracing?.activeCellId, meshes: meshesForCurrentAdditionalCoordinates || EMPTY_OBJECT, // satisfy ts dataset: state.dataset, - isJSONMappingEnabled: - mappingInfo.mappingStatus === MappingStatusEnum.ENABLED && mappingInfo.mappingType === "JSON", mappingInfo, centeredSegmentId: getSegmentIdForPosition(getPosition(state.flycam)), hasVolumeTracing: state.tracing.volumes.length > 0, @@ -346,16 +342,6 @@ const formatMeshFile = (meshFile: APIMeshFile | null | undefined): string | null return `${meshFile.meshFileName} (${meshFile.mappingName})`; }; -function _getMapIdFn(visibleSegmentationLayer: APISegmentationLayer | null | undefined) { - const dataLayer = - visibleSegmentationLayer != null ? Model.getLayerByName(visibleSegmentationLayer.name) : null; - - const mapId = dataLayer != null ? (id: number) => dataLayer.cube.mapId(id) : (id: number) => id; - return mapId; -} - -const getMapIdFn = memoizeOne(_getMapIdFn); - function renderEmptyMeshFileSelect() { return ( { const isSegmentHierarchyEmpty = !( allSegments?.size() || this.props.segmentGroups.length ); - const mapId = getMapIdFn(this.props.visibleSegmentationLayer); if (!this.props.visibleSegmentationLayer) { return ( @@ -1715,15 +1700,13 @@ class SegmentsView extends React.Component { return ( { currentMeshFile={this.props.currentMeshFile} onRenameStart={this.onRenameStart} onRenameEnd={this.onRenameEnd} + // TODO #7895: The line below causes SegmentItems to always rerender + // if SegmentsView rerenders. multiSelectMenu={multiSelectMenu()} activeVolumeTracing={this.props.activeVolumeTracing} /> diff --git a/frontend/javascripts/oxalis/view/statusbar.tsx b/frontend/javascripts/oxalis/view/statusbar.tsx index 861c3eae2c5..c492bfae9cc 100644 --- a/frontend/javascripts/oxalis/view/statusbar.tsx +++ b/frontend/javascripts/oxalis/view/statusbar.tsx @@ -371,6 +371,7 @@ function SegmentInfo() { const hoveredSegmentId = useSelector( (state: OxalisState) => state.temporaryConfiguration.hoveredSegmentId, ); + if (hasVisibleSegmentation == null) { return null; } diff --git a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts index 55fd87b239f..caafb57d2bc 100644 --- a/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts +++ b/frontend/javascripts/test/model/binary/layers/wkstore_adapter.spec.ts @@ -27,6 +27,7 @@ const mockedCube = { [1, 1, 1], [2, 2, 2], ]), + triggerBucketDataChanged: () => {}, }; const StoreMock = { getState: () => ({ @@ -44,6 +45,7 @@ const StoreMock = { name: "localhost", url: "http://localhost:9000", }, + volumes: [], }, datasetConfiguration: { fourBit: _fourBit, diff --git a/frontend/javascripts/test/model/binary/pullqueue.spec.ts b/frontend/javascripts/test/model/binary/pullqueue.spec.ts index 94e0055446c..1d6d6ab209a 100644 --- a/frontend/javascripts/test/model/binary/pullqueue.spec.ts +++ b/frontend/javascripts/test/model/binary/pullqueue.spec.ts @@ -22,6 +22,8 @@ const WkstoreAdapterMock = { mockRequire("oxalis/model/bucket_data_handling/wkstore_adapter", WkstoreAdapterMock); const mockedCube = { isSegmentation: true, + shouldEagerlyMaintainUsedValueSet: () => false, + triggerBucketDataChanged: () => false, }; const layer = { url: "url", @@ -57,6 +59,7 @@ test.beforeEach((t) => { containsBucket: sinon.stub().returns(true), removeOutsideArea: sinon.stub(), }, + shouldEagerlyMaintainUsedValueSet: () => false, }; const connectionInfo = { log: sinon.stub(), diff --git a/frontend/javascripts/test/model/binary/temporal_bucket_manager.spec.ts b/frontend/javascripts/test/model/binary/temporal_bucket_manager.spec.ts index 7963ea99ff5..d1cf2dd5d67 100644 --- a/frontend/javascripts/test/model/binary/temporal_bucket_manager.spec.ts +++ b/frontend/javascripts/test/model/binary/temporal_bucket_manager.spec.ts @@ -21,6 +21,7 @@ const test: TestInterface<{ isSegmentation: boolean; pushQueue: any; pullQueue: any; + triggerBucketDataChanged: () => void; }; manager: typeof TemporalBucketManager; }> = anyTest as any; @@ -37,6 +38,7 @@ test.beforeEach((t) => { isSegmentation: true, pushQueue, pullQueue, + triggerBucketDataChanged: () => {}, }; const manager = new TemporalBucketManager(pullQueue, pushQueue); t.context.cube = mockedCube; diff --git a/frontend/javascripts/test/model/cuckoo_table.spec.ts b/frontend/javascripts/test/model/cuckoo_table.spec.ts index 175bf7ca242..ec8977bc125 100644 --- a/frontend/javascripts/test/model/cuckoo_table.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table.spec.ts @@ -8,7 +8,7 @@ import "test/mocks/updatable_texture.mock"; type Entry = [number, Vector3]; -const { CuckooTable } = mock.reRequire("oxalis/model/bucket_data_handling/cuckoo_table"); +const { CuckooTableVec3 } = mock.reRequire("libs/cuckoo/cuckoo_table_vec3"); function generateRandomEntry(): [number, Vector3] { return [ @@ -21,12 +21,14 @@ function generateRandomEntry(): [number, Vector3] { ]; } -function generateRandomEntrySet() { - const count = 1600; +export function generateRandomCuckooEntrySet( + generateEntry: () => [K, V], + count: number = 1600, +) { const set = new Set(); const entries = []; for (let i = 0; i < count; i++) { - const entry = generateRandomEntry(); + const entry = generateEntry(); const entryKey = entry[0]; if (set.has(entryKey)) { i--; @@ -49,9 +51,9 @@ function isValueEqual(t: ExecutionContext, val1: Vector3, val2: Vector3) { t.true(val1[2] === val2[2]); } -test.serial("CuckooTable: Basic", (t) => { - const entries = generateRandomEntrySet(); - const ct = CuckooTable.fromCapacity(entries.length); +test("CuckooTableVec3: Basic", (t) => { + const entries = generateRandomCuckooEntrySet(generateRandomEntry); + const ct = CuckooTableVec3.fromCapacity(entries.length); for (const entry of entries) { ct.set(entry[0], entry[1]); @@ -67,10 +69,10 @@ test.serial("CuckooTable: Basic", (t) => { } }); -test.serial("CuckooTable: Speed should be alright", (t) => { +test("CuckooTableVec3: Speed should be alright", (t) => { const RUNS = 100; - const hashSets = _.range(RUNS).map(() => generateRandomEntrySet()); - const tables = _.range(RUNS).map(() => CuckooTable.fromCapacity(hashSets[0].length)); + const hashSets = _.range(RUNS).map(() => generateRandomCuckooEntrySet(generateRandomEntry)); + const tables = _.range(RUNS).map(() => CuckooTableVec3.fromCapacity(hashSets[0].length)); const durations = []; for (let idx = 0; idx < RUNS; idx++) { @@ -87,8 +89,8 @@ test.serial("CuckooTable: Speed should be alright", (t) => { t.true(_.mean(durations) < 0.1); }); -test.serial("CuckooTable: Repeated sets should work", (t) => { - const ct = CuckooTable.fromCapacity(1); +test("CuckooTableVec3: Repeated sets should work", (t) => { + const ct = CuckooTableVec3.fromCapacity(1); // This is a regression test for a bug which resulted in the // same key being multiple times in the table. Due to the random @@ -104,8 +106,8 @@ test.serial("CuckooTable: Repeated sets should work", (t) => { } }); -test.serial("CuckooTable: Should throw error when exceeding capacity", (t) => { - const ct = CuckooTable.fromCapacity(1); +test("CuckooTableVec3: Should throw error when exceeding capacity", (t) => { + const ct = CuckooTableVec3.fromCapacity(1); t.throws(() => { for (let _idx = 0; _idx < ct.entryCapacity + 1; _idx++) { @@ -116,3 +118,24 @@ test.serial("CuckooTable: Should throw error when exceeding capacity", (t) => { } }); }); + +test("CuckooTableVec3: Maxing out capacity", (t) => { + const base = 128; + const attemptCount = 10; + for (let attempt = 0; attempt < attemptCount; attempt++) { + let entries; + let ct; + + ct = new CuckooTableVec3(base); + entries = generateRandomCuckooEntrySet(generateRandomEntry, ct.getCriticalCapacity()); + for (const entry of entries) { + ct.set(entry[0], entry[1]); + } + + // Check that all previously set items are still + // intact. + for (const innerEntry of entries) { + isValueEqual(t, innerEntry[1], ct.get(innerEntry[0])); + } + } +}); diff --git a/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts b/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts new file mode 100644 index 00000000000..f7e6e97a84a --- /dev/null +++ b/frontend/javascripts/test/model/cuckoo_table_uint32.spec.ts @@ -0,0 +1,48 @@ +import mock from "mock-require"; +import test, { ExecutionContext } from "ava"; +import _ from "lodash"; + +import "test/mocks/globals.mock"; +import "test/mocks/updatable_texture.mock"; +import { generateRandomCuckooEntrySet } from "./cuckoo_table.spec"; + +type Key = number; +type Value = number; +type Entry = [Key, Value]; + +const { CuckooTableUint32 } = mock.reRequire("libs/cuckoo/cuckoo_table_uint32"); + +function generateRandomEntry(): Entry { + return [Math.floor(Math.random() * 2 ** 32), Math.floor(Math.random() * 2 ** 32)]; +} + +function isValueEqual(t: ExecutionContext, val1: Value, val2: Value) { + if (!(val1 === val2)) { + // Throw an error to avoid that ava executes the rest of the test. + throw new Error(`${val1} !== ${val2}`); + } + + t.true(val1 === val2); +} + +test("CuckooTableUint32: Maxing out capacity", (t) => { + const base = 128; + + const attemptCount = 10; + for (let attempt = 0; attempt < attemptCount; attempt++) { + let entries; + let ct; + + ct = new CuckooTableUint32(base); + entries = generateRandomCuckooEntrySet(generateRandomEntry, ct.getCriticalCapacity()); + for (const entry of entries) { + ct.set(entry[0], entry[1]); + } + + // Check that all previously set items are still + // intact. + for (const innerEntry of entries) { + isValueEqual(t, innerEntry[1], ct.get(innerEntry[0])); + } + } +}); diff --git a/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts b/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts new file mode 100644 index 00000000000..9ef50edd560 --- /dev/null +++ b/frontend/javascripts/test/model/cuckoo_table_uint64.spec.ts @@ -0,0 +1,51 @@ +import mock from "mock-require"; +import test, { ExecutionContext } from "ava"; +import _ from "lodash"; + +import "test/mocks/globals.mock"; +import "test/mocks/updatable_texture.mock"; +import { generateRandomCuckooEntrySet } from "./cuckoo_table.spec"; + +type Key = [number, number]; +type Value = [number, number]; +type Entry = [Key, Value]; + +const { CuckooTableUint64 } = mock.reRequire("libs/cuckoo/cuckoo_table_uint64"); + +function generateRandomEntry(): Entry { + return [ + [Math.floor(Math.random() * 2 ** 32), Math.floor(Math.random() * 2 ** 32)], + [Math.floor(Math.random() * 2 ** 32), Math.floor(Math.random() * 2 ** 32)], + ]; +} + +function isValueEqual(t: ExecutionContext, val1: Value, val2: Value) { + if (!(val1[0] === val2[0] && val1[1] === val2[1])) { + // Throw an error to avoid that ava executes the rest of the test. + throw new Error(`${val1} !== ${val2}`); + } + + t.deepEqual(val1, val2); +} + +test("CuckooTableUint64: Maxing out capacity", (t) => { + const base = 128; + const attemptCount = 10; + for (let attempt = 0; attempt < attemptCount; attempt++) { + let entries; + let ct; + + ct = new CuckooTableUint64(base); + entries = generateRandomCuckooEntrySet(generateRandomEntry, ct.getCriticalCapacity()); + + for (const entry of entries) { + ct.set(entry[0], entry[1]); + } + + // Check that all previously set items are still + // intact. + for (const innerEntry of entries) { + isValueEqual(t, innerEntry[1], ct.get(innerEntry[0])); + } + } +}); diff --git a/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts b/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts index 9d1b0dc6097..3e5e882d2fb 100644 --- a/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts +++ b/frontend/javascripts/test/model/cuckoo_table_vec5.spec.ts @@ -4,13 +4,14 @@ import _ from "lodash"; import "test/mocks/globals.mock"; import "test/mocks/updatable_texture.mock"; +import { generateRandomCuckooEntrySet } from "./cuckoo_table.spec"; type Vector5 = [number, number, number, number, number]; type Key = Vector5; // [x, y, z, layerIdx, requestedMagIdx] type Value = number; // [address, actualMagIdx] type Entry = [Key, Value]; -const { CuckooTableVec5 } = mock.reRequire("oxalis/model/bucket_data_handling/cuckoo_table_vec5"); +const { CuckooTableVec5 } = mock.reRequire("libs/cuckoo/cuckoo_table_vec5"); function generateRandomEntry(): Entry { return [ @@ -25,23 +26,6 @@ function generateRandomEntry(): Entry { ]; } -function generateRandomEntrySet() { - const count = 1600; - const set = new Set(); - const entries = []; - for (let i = 0; i < count; i++) { - const entry = generateRandomEntry(); - const entryKey = entry[0]; - if (set.has(entryKey)) { - i--; - continue; - } - set.add(entryKey); - entries.push(entry); - } - return entries; -} - function isValueEqual(t: ExecutionContext, val1: Value, val2: Value) { if (!(val1 === val2)) { // Throw an error to avoid that ava executes the rest of the test. @@ -51,7 +35,7 @@ function isValueEqual(t: ExecutionContext, val1: Value, val2: Value) { t.true(val1 === val2); } -test.serial("CuckooTableVec5: Compression/Decompression roundtrip", (t) => { +test("CuckooTableVec5: Compression/Decompression roundtrip", (t) => { const ct = CuckooTableVec5.fromCapacity(0); const expectedEntry = [[363, 213, 995, 28, 58], 1547497]; @@ -60,8 +44,8 @@ test.serial("CuckooTableVec5: Compression/Decompression roundtrip", (t) => { t.deepEqual(expectedEntry, actualEntry); }); -test.serial("CuckooTableVec5: Basic", (t) => { - const entries = generateRandomEntrySet(); +test("CuckooTableVec5: Basic", (t) => { + const entries = generateRandomCuckooEntrySet(generateRandomEntry); const ct = CuckooTableVec5.fromCapacity(entries.length); for (const entry of entries) { @@ -78,9 +62,9 @@ test.serial("CuckooTableVec5: Basic", (t) => { } }); -test.serial("CuckooTableVec5: Speed should be alright", (t) => { +test("CuckooTableVec5: Speed should be alright", (t) => { const RUNS = 100; - const hashSets = _.range(RUNS).map(() => generateRandomEntrySet()); + const hashSets = _.range(RUNS).map(() => generateRandomCuckooEntrySet(generateRandomEntry)); const tables = _.range(RUNS).map(() => CuckooTableVec5.fromCapacity(hashSets[0].length)); const durations = []; @@ -98,7 +82,7 @@ test.serial("CuckooTableVec5: Speed should be alright", (t) => { t.true(_.mean(durations) < 0.1); }); -test.serial("CuckooTableVec5: Repeated sets should work", (t) => { +test("CuckooTableVec5: Repeated sets should work", (t) => { const ct = CuckooTableVec5.fromCapacity(1); // This is a regression test for a bug which resulted in the @@ -115,7 +99,7 @@ test.serial("CuckooTableVec5: Repeated sets should work", (t) => { } }); -test.serial("CuckooTableVec5: Should throw error when exceeding capacity", (t) => { +test("CuckooTableVec5: Should throw error when exceeding capacity", (t) => { const ct = CuckooTableVec5.fromCapacity(1); t.throws(() => { @@ -127,3 +111,23 @@ test.serial("CuckooTableVec5: Should throw error when exceeding capacity", (t) = } }); }); + +test("CuckooTableVec5: Maxing out capacity", (t) => { + const base = 128; + const attemptCount = 10; + for (let attempt = 0; attempt < attemptCount; attempt++) { + let entries; + let ct; + ct = new CuckooTableVec5(base); + entries = generateRandomCuckooEntrySet(generateRandomEntry, ct.getCriticalCapacity()); + for (const entry of entries) { + ct.set(entry[0], entry[1]); + } + + // Check that all previously set items are still + // intact. + for (const innerEntry of entries) { + isValueEqual(t, innerEntry[1], ct.get(innerEntry[0])); + } + } +}); diff --git a/frontend/javascripts/test/model/texture_bucket_manager.spec.ts b/frontend/javascripts/test/model/texture_bucket_manager.spec.ts index 9840110963d..c58160c2988 100644 --- a/frontend/javascripts/test/model/texture_bucket_manager.spec.ts +++ b/frontend/javascripts/test/model/texture_bucket_manager.spec.ts @@ -5,7 +5,7 @@ import { Vector4 } from "oxalis/constants"; import "test/mocks/globals.mock"; import "test/mocks/updatable_texture.mock"; -import { CuckooTableVec5 } from "oxalis/model/bucket_data_handling/cuckoo_table_vec5"; +import { CuckooTableVec5 } from "libs/cuckoo/cuckoo_table_vec5"; const LAYER_INDEX = 0; const CUCKOO_TEXTURE_WIDTH = 64; @@ -15,6 +15,7 @@ const temporalBucketManagerMock = { }; const mockedCube = { isSegmentation: false, + triggerBucketDataChanged: () => {}, }; const { default: TextureBucketManager } = mock.reRequire( "oxalis/model/bucket_data_handling/texture_bucket_manager", diff --git a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts index 9b9aed8b78b..d3be538af72 100644 --- a/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts +++ b/frontend/javascripts/test/sagas/volumetracing/volumetracing_saga.spec.ts @@ -124,7 +124,6 @@ const dummyActiveMapping: ActiveMappingInfo = { mappingColors: [], hideUnmappedIds: false, mappingStatus: "ENABLED", - mappingSize: 0, mappingType: "HDF5", }; diff --git a/frontend/javascripts/types/api_flow_types.ts b/frontend/javascripts/types/api_flow_types.ts index 7f2f2535e0d..f36f901890d 100644 --- a/frontend/javascripts/types/api_flow_types.ts +++ b/frontend/javascripts/types/api_flow_types.ts @@ -818,7 +818,7 @@ export type ServerVolumeTracing = ServerTracingBase & { // https://github.com/scalableminds/webknossos/pull/4755 resolutions?: Array; mappingName?: string | null | undefined; - mappingIsEditable?: boolean; + hasEditableMapping?: boolean; mappingIsLocked?: boolean; hasSegmentIndex?: boolean; }; diff --git a/package.json b/package.json index c92e26547a9..d2947ce43b6 100644 --- a/package.json +++ b/package.json @@ -210,7 +210,7 @@ "react-virtualized": "^9.22.2", "redux": "^3.6.0", "redux-batched-actions": "^0.5.0", - "redux-saga": "^1.0.0", + "redux-saga": "^1.3.0", "resumablejs": "^1.1.0", "saxophone": "^0.8.0", "three": "^0.137.0", diff --git a/tools/proxy/proxy.js b/tools/proxy/proxy.js index 6af1b11c4d5..6b8077e9160 100644 --- a/tools/proxy/proxy.js +++ b/tools/proxy/proxy.js @@ -1,4 +1,3 @@ -// @noflow const express = require("express"); const httpProxy = require("http-proxy"); const { spawn, exec } = require("child_process"); @@ -91,10 +90,34 @@ process.on("SIGINT", shutdown); proxy.on("error", (err, req, res) => { console.error(loggingPrefix, "Sending Bad gateway due to the following error: ", err); - res.writeHead(503); - res.end( - "Bad gateway. The server might still be starting up, please try again in a few seconds or check console output.", - ); + res.writeHead(503, { 'Content-Type': 'text/html' }); + res.end(` + + + 503 Service Unavailable + + + +

Bad gateway

+

The server might still be starting up, please try again in a few seconds or check console output.

+

Reloading in 5 seconds...

+ + + `); }); function toBackend(req, res) { diff --git a/tools/test.sh b/tools/test.sh index 4a06f176691..405df82dd39 100755 --- a/tools/test.sh +++ b/tools/test.sh @@ -2,6 +2,7 @@ testBundlePath="public-test/test-bundle" jsPath="frontend/javascripts" +proto_dir="webknossos-datastore/proto" FIND=find if [ -x "$(command -v gfind)" ]; then @@ -17,8 +18,10 @@ mkdir -p "$testBundlePath" function prepare { rm -rf "$testBundlePath" && mkdir "$testBundlePath" # Webpack with the proto loader isn't used when running the tests, so the proto files need to be prepared manually - pbjs -t json "webknossos-datastore/proto/SkeletonTracing.proto" > "$testBundlePath/SkeletonTracing.proto.json" - pbjs -t json "webknossos-datastore/proto/VolumeTracing.proto" > "$testBundlePath/VolumeTracing.proto.json" + for proto_file in "$proto_dir"/*.proto; do + output_file="$testBundlePath/$(basename "$proto_file").json" + pbjs -t json "$proto_file" > "$output_file" + done # Beginning from target==node13, dynamic imports are not converted anymore by esbuild. Tests which use code # that relies on dynamic imports fails then because the module cannot be found for some reason. diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala index 6459dec6ff4..5220d151368 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/controllers/DataSourceController.scala @@ -355,6 +355,31 @@ class DataSourceController @Inject()( } } + def agglomerateIdsForAllSegmentIds( + token: Option[String], + organizationName: String, + datasetName: String, + dataLayerName: String, + mappingName: String + ): Action[ListOfLong] = Action.async(validateProto[ListOfLong]) { implicit request => + accessTokenService.validateAccess(UserAccessRequest.readDataSources(DataSourceId(datasetName, organizationName)), + urlOrHeaderToken(token, request)) { + for { + agglomerateService <- binaryDataServiceHolder.binaryDataService.agglomerateServiceOpt.toFox + agglomerateIds: Array[Long] <- agglomerateService + .agglomerateIdsForAllSegmentIds( + AgglomerateFileKey( + organizationName, + datasetName, + dataLayerName, + mappingName + ) + ) + .toFox + } yield Ok(Json.toJson(agglomerateIds)) + } + } + def update(token: Option[String], organizationName: String, datasetName: String): Action[DataSource] = Action.async(validateJson[DataSource]) { implicit request => accessTokenService.validateAccess(UserAccessRequest.writeDataSource(DataSourceId(datasetName, organizationName)), diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala index fc272329b48..bb7af9becad 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/AgglomerateService.scala @@ -271,6 +271,15 @@ class AgglomerateService @Inject()(config: DataStoreConfig) extends DataConverte } + def agglomerateIdsForAllSegmentIds(agglomerateFileKey: AgglomerateFileKey): Box[Array[Long]] = { + val file = agglomerateFileKey.path(dataBaseDir, agglomerateDir, agglomerateFileExtension).toFile + tryo { + val reader = HDF5FactoryProvider.get.openForReading(file) + val agglomerateIds: Array[Long] = reader.uint64().readArray("/segment_to_agglomerate") + agglomerateIds + } + } + def positionForSegmentId(agglomerateFileKey: AgglomerateFileKey, segmentId: Long): Box[Vec3Int] = { val hdfFile = agglomerateFileKey.path(dataBaseDir, agglomerateDir, agglomerateFileExtension).toFile val reader: IHDF5Reader = HDF5FactoryProvider.get.openForReading(hdfFile) diff --git a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala index 3833c82207b..a82ffa6557f 100644 --- a/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala +++ b/webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/BinaryDataService.scala @@ -27,8 +27,8 @@ class BinaryDataService(val dataBaseDir: Path, with DatasetDeleter with LazyLogging { - /* Note that this must stay in sync with the front-end constant - compare https://github.com/scalableminds/webknossos/issues/5223 */ + /* Note that this must stay in sync with the front-end constant MAX_MAG_FOR_AGGLOMERATE_MAPPING + compare https://github.com/scalableminds/webknossos/issues/5223 */ private val MaxMagForAgglomerateMapping = 16 private lazy val bucketProviderCache = new BucketProviderCache(maxEntries = 5000) diff --git a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes index e796fc871d3..333787f6b64 100644 --- a/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes +++ b/webknossos-datastore/conf/com.scalableminds.webknossos.datastore.routes @@ -52,6 +52,7 @@ GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/agg GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomerateGraph/:agglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateGraph(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, mappingName: String, agglomerateId: Long) GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/largestAgglomerateId @com.scalableminds.webknossos.datastore.controllers.DataSourceController.largestAgglomerateId(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, mappingName: String) POST /datasets/:organizationName/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForSegmentIds(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, mappingName: String) +GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/agglomeratesForAllSegments @com.scalableminds.webknossos.datastore.controllers.DataSourceController.agglomerateIdsForAllSegmentIds(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, mappingName: String) GET /datasets/:organizationName/:datasetName/layers/:dataLayerName/agglomerates/:mappingName/positionForSegment @com.scalableminds.webknossos.datastore.controllers.DataSourceController.positionForSegmentViaAgglomerateFile(token: Option[String], organizationName: String, datasetName: String, dataLayerName: String, mappingName: String, segmentId: Long) # Mesh files diff --git a/webknossos-datastore/proto/VolumeTracing.proto b/webknossos-datastore/proto/VolumeTracing.proto index 2d5af3f8130..88980ae7e7b 100644 --- a/webknossos-datastore/proto/VolumeTracing.proto +++ b/webknossos-datastore/proto/VolumeTracing.proto @@ -40,7 +40,7 @@ message VolumeTracing { repeated Vec3IntProto resolutions = 15; repeated Segment segments = 16; optional string mappingName = 17; // either a mapping present in the fallback layer, or an editable mapping on the tracingstore - optional bool mappingIsEditable = 18; // the selected mapping is an editable mapping + optional bool hasEditableMapping = 18; // the selected mapping is an editable mapping repeated SegmentGroup segmentGroups = 19; optional bool hasSegmentIndex = 20; repeated AdditionalCoordinateProto editPositionAdditionalCoordinates = 21; diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala index 8280f681dd8..67f9c7c3275 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingController.scala @@ -6,6 +6,7 @@ import com.scalableminds.util.time.Instant import com.scalableminds.util.tools.ExtendedTypes.ExtendedString import com.scalableminds.util.tools.Fox import com.scalableminds.webknossos.datastore.AgglomerateGraph.AgglomerateGraph +import com.scalableminds.webknossos.datastore.ListOfLong.ListOfLong import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings} import com.scalableminds.webknossos.datastore.geometry.ListOfVec3IntProto import com.scalableminds.webknossos.datastore.helpers.{ @@ -178,7 +179,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (data, indices) <- if (tracing.mappingIsEditable.getOrElse(false)) + (data, indices) <- if (tracing.getHasEditableMapping) editableMappingService.volumeData(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) else tracingService.data(tracingId, tracing, request.body) } yield Ok(data).withHeaders(getMissingBucketsHeaders(indices): _*) @@ -213,9 +214,9 @@ class VolumeTracingController @Inject()( editPositionParsed <- Fox.runOptional(editPosition)(Vec3Int.fromUriLiteral) editRotationParsed <- Fox.runOptional(editRotation)(Vec3Double.fromUriLiteral) boundingBoxParsed <- Fox.runOptional(boundingBox)(BoundingBox.fromLiteral) - remoteFallbackLayerOpt <- Fox.runIf(tracing.mappingIsEditable.contains(true))( + remoteFallbackLayerOpt <- Fox.runIf(tracing.getHasEditableMapping)( tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId)) - newEditableMappingId <- Fox.runIf(tracing.mappingIsEditable.contains(true))( + newEditableMappingId <- Fox.runIf(tracing.getHasEditableMapping)( editableMappingService.duplicate(tracing.mappingName, version = None, remoteFallbackLayerOpt, userToken)) (newId, newTracing) <- tracingService.duplicate( tracingId, @@ -303,7 +304,7 @@ class VolumeTracingController @Inject()( // consecutive 3D points (i.e., nine floats) form a triangle. // There are no shared vertices between triangles. tracing <- tracingService.find(tracingId) ?~> Messages("tracing.notFound") - (vertices, neighbors) <- if (tracing.mappingIsEditable.getOrElse(false)) + (vertices, neighbors) <- if (tracing.getHasEditableMapping) editableMappingService.createAdHocMesh(tracing, tracingId, request.body, urlOrHeaderToken(token, request)) else tracingService.createAdHocMesh(tracingId, request.body, urlOrHeaderToken(token, request)) } yield { @@ -345,7 +346,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Cannot query agglomerate skeleton for volume annotation" + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Cannot query agglomerate skeleton for volume annotation" mappingName <- tracing.mappingName ?~> "annotation.agglomerateSkeleton.noMappingSet" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) agglomerateSkeletonBytes <- editableMappingService.getAgglomerateSkeletonWithFallback( @@ -404,7 +405,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Mapping is not editable" + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) edges <- editableMappingService.agglomerateGraphMinCut(request.body, remoteFallbackLayer, token) } yield Ok(Json.toJson(edges)) @@ -418,7 +419,7 @@ class VolumeTracingController @Inject()( accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Mapping is not editable" + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) (segmentId, edges) <- editableMappingService.agglomerateGraphNeighbors(request.body, remoteFallbackLayer, @@ -434,7 +435,7 @@ class VolumeTracingController @Inject()( for { tracing <- tracingService.find(tracingId) mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Mapping is not editable" + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" currentVersion <- editableMappingService.getClosestMaterializableVersionOrZero(mappingName, None) _ <- bool2Fox(request.body.length == 1) ?~> "Editable mapping update request must contain exactly one update group" updateGroup <- request.body.headOption.toFox @@ -448,7 +449,12 @@ class VolumeTracingController @Inject()( urlOrHeaderToken(token, request) ) _ <- remoteWebknossosClient.reportTracingUpdates(report) - _ <- editableMappingService.update(mappingName, updateGroup, updateGroup.version) + remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) + _ <- editableMappingService.update(mappingName, + updateGroup, + updateGroup.version, + remoteFallbackLayer, + urlOrHeaderToken(token, request)) } yield Ok } } @@ -460,7 +466,7 @@ class VolumeTracingController @Inject()( for { tracing <- tracingService.find(tracingId) mappingName <- tracing.mappingName.toFox - _ <- bool2Fox(tracing.getMappingIsEditable) ?~> "Mapping is not editable" + _ <- bool2Fox(tracing.getHasEditableMapping) ?~> "Mapping is not editable" updateLog <- editableMappingService.updateActionLog(mappingName) } yield Ok(updateLog) } @@ -488,21 +494,28 @@ class VolumeTracingController @Inject()( } } - def editableMappingAgglomerateIdForSegmentId(token: Option[String], - tracingId: String, - segmentId: Long): Action[AnyContent] = - Action.async { implicit request => + def editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String): Action[ListOfLong] = + Action.async(validateProto[ListOfLong]) { implicit request => log() { accessTokenService.validateAccess(UserAccessRequest.readTracing(tracingId), urlOrHeaderToken(token, request)) { for { tracing <- tracingService.find(tracingId) - mappingName <- tracing.mappingName.toFox + editableMappingId <- tracing.mappingName.toFox remoteFallbackLayer <- tracingService.remoteFallbackLayerFromVolumeTracing(tracing, tracingId) - agglomerateId <- editableMappingService.agglomerateIdForSegmentId(mappingName, - segmentId, - remoteFallbackLayer, - urlOrHeaderToken(token, request)) - } yield Ok(Json.obj("agglomerateId" -> agglomerateId)) + (editableMappingInfo, editableMappingVersion) <- editableMappingService.getInfoAndActualVersion( + editableMappingId, + requestedVersion = None, + remoteFallbackLayer = remoteFallbackLayer, + userToken = urlOrHeaderToken(token, request)) + relevantMapping: Map[Long, Long] <- editableMappingService.generateCombinedMappingForSegmentIds( + request.body.items.toSet, + editableMappingInfo, + editableMappingVersion, + editableMappingId, + remoteFallbackLayer, + urlOrHeaderToken(token, request)) + agglomerateIdsSorted = relevantMapping.toSeq.sortBy(_._1).map(_._2) + } yield Ok(ListOfLong(agglomerateIdsSorted).toByteArray) } } } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala index 38a4c05f7ee..02781cfc9fe 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/controllers/VolumeTracingZarrStreamingController.scala @@ -203,7 +203,7 @@ class VolumeTracingZarrStreamingController @Inject()( version = None, additionalCoordinates = None ) - (data, missingBucketIndices) <- if (tracing.getMappingIsEditable) + (data, missingBucketIndices) <- if (tracing.getHasEditableMapping) editableMappingService.volumeData(tracing, tracingId, List(wkRequest), urlOrHeaderToken(token, request)) else tracingService.data(tracingId, tracing, List(wkRequest)) dataWithFallback <- getFallbackLayerDataIfEmpty(tracing, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala index 22b4eb51c58..cd83ccee97e 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingService.scala @@ -143,23 +143,6 @@ class EditableMappingService @Inject()( } yield (newId, newEditableMappingInfo) } - def agglomerateIdForSegmentId(editableMappingId: String, - segmentId: Long, - remoteFallbackLayer: RemoteFallbackLayer, - userToken: Option[String])(implicit ec: ExecutionContext): Fox[Long] = { - val chunkId = segmentId / defaultSegmentToAgglomerateChunkSize - for { - (info, version) <- getInfoAndActualVersion(editableMappingId, None, remoteFallbackLayer, userToken) - chunk <- getSegmentToAgglomerateChunkWithEmptyFallback(editableMappingId, chunkId, version).map(_.toMap) - agglomerateId <- chunk.get(segmentId) match { - case Some(agglomerateId) => Fox.successful(agglomerateId) - case None => - getBaseSegmentToAgglomerate(info.baseMappingName, Set(segmentId), remoteFallbackLayer, userToken) - .flatMap(baseSegmentToAgglomerate => baseSegmentToAgglomerate.get(segmentId)) - } - } yield agglomerateId - } - def duplicate(editableMappingIdOpt: Option[String], version: Option[Long], remoteFallbackLayerBox: Box[RemoteFallbackLayer], @@ -263,12 +246,40 @@ class EditableMappingService @Inject()( def update(editableMappingId: String, updateActionGroup: EditableMappingUpdateActionGroup, - newVersion: Long): Fox[Unit] = + newVersion: Long, + remoteFallbackLayer: RemoteFallbackLayer, + userToken: Option[String]): Fox[Unit] = for { actionsWithTimestamp <- Fox.successful(updateActionGroup.actions.map(_.addTimestamp(updateActionGroup.timestamp))) + _ <- dryApplyUpdates(editableMappingId, newVersion, actionsWithTimestamp, remoteFallbackLayer, userToken) ?~> "editableMapping.dryUpdate.failed" _ <- tracingDataStore.editableMappingUpdates.put(editableMappingId, newVersion, actionsWithTimestamp) } yield () + private def dryApplyUpdates(editableMappingId: String, + newVersion: Long, + updates: List[EditableMappingUpdateAction], + remoteFallbackLayer: RemoteFallbackLayer, + userToken: Option[String]): Fox[Unit] = + for { + (previousInfo, previousVersion) <- getInfoAndActualVersion(editableMappingId, + None, + remoteFallbackLayer, + userToken) + updater = new EditableMappingUpdater( + editableMappingId, + previousInfo.baseMappingName, + previousVersion, + newVersion, + remoteFallbackLayer, + userToken, + remoteDatastoreClient, + this, + tracingDataStore, + relyOnAgglomerateIds = updates.length <= 1 + ) + updated <- updater.applyUpdatesAndSave(previousInfo, updates, dry = true) ?~> "editableMapping.update.failed" + } yield () + def applyPendingUpdates(editableMappingId: String, desiredVersion: Long, remoteFallbackLayer: RemoteFallbackLayer, @@ -413,11 +424,11 @@ class EditableMappingService @Inject()( ) private def getSegmentToAgglomerateChunk(editableMappingId: String, - agglomerateId: Long, + chunkId: Long, version: Option[Long]): Fox[Seq[(Long, Long)]] = for { keyValuePair: VersionedKeyValuePair[SegmentToAgglomerateProto] <- tracingDataStore.editableMappingsSegmentToAgglomerate - .get(segmentToAgglomerateKey(editableMappingId, agglomerateId), version, mayBeEmpty = Some(true))( + .get(segmentToAgglomerateKey(editableMappingId, chunkId), version, mayBeEmpty = Some(true))( fromProtoBytes[SegmentToAgglomerateProto]) valueProto = keyValuePair.value asSequence = valueProto.segmentToAgglomerate.map(pair => pair.segmentId -> pair.agglomerateId) diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala index 545c94c92cb..2440e17a667 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/editablemapping/EditableMappingUpdater.scala @@ -48,11 +48,12 @@ class EditableMappingUpdater( private val agglomerateToGraphBuffer: mutable.Map[String, AgglomerateGraph] = new mutable.HashMap[String, AgglomerateGraph]() - def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, updates: List[EditableMappingUpdateAction])( - implicit ec: ExecutionContext): Fox[EditableMappingInfo] = + def applyUpdatesAndSave(existingEditabeMappingInfo: EditableMappingInfo, + updates: List[EditableMappingUpdateAction], + dry: Boolean = false)(implicit ec: ExecutionContext): Fox[EditableMappingInfo] = for { - updatedEditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) - _ <- flushToFossil(updatedEditableMappingInfo) + updatedEditableMappingInfo: EditableMappingInfo <- updateIter(Some(existingEditabeMappingInfo), updates) + _ <- Fox.runIf(!dry)(flushToFossil(updatedEditableMappingInfo)) } yield updatedEditableMappingInfo private def flushToFossil(updatedEditableMappingInfo: EditableMappingInfo)(implicit ec: ExecutionContext): Fox[Unit] = @@ -333,7 +334,7 @@ class EditableMappingUpdater( (agglomerateId1, agglomerateId2) <- agglomerateIdsForMergeAction(update, segmentId1, segmentId2) ?~> "Failed to look up agglomerate ids for merge action segments" agglomerateGraph1 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId1) ?~> s"Failed to get agglomerate graph for id $agglomerateId1" agglomerateGraph2 <- agglomerateGraphForIdWithFallback(mapping, agglomerateId2) ?~> s"Failed to get agglomerate graph for id $agglomerateId2" - _ <- bool2Fox(agglomerateGraph2.segments.contains(segmentId2)) ?~> "Segment as queried by position is not contained in fetched agglomerate graph" + _ <- bool2Fox(agglomerateGraph2.segments.contains(segmentId2)) ?~> s"Segment $segmentId2 as queried by position ${update.segmentPosition2} is not contained in fetched agglomerate graph for agglomerate $agglomerateId2" mergedGraphOpt = mergeGraph(agglomerateGraph1, agglomerateGraph2, update, diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala index 703f8388c82..8ccfcfc7033 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/TSFullMeshService.scala @@ -50,7 +50,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, for { remoteFallbackLayer <- remoteFallbackLayerFromVolumeTracing(tracing, tracingId) baseMappingName <- volumeTracingService.baseMappingName(tracing) - fullMeshRequestAdapted = if (tracing.mappingIsEditable.getOrElse(false)) + fullMeshRequestAdapted = if (tracing.getHasEditableMapping) fullMeshRequest.copy(mappingName = baseMappingName, editableMappingTracingId = Some(tracingId), mappingType = Some("HDF5")) @@ -174,7 +174,7 @@ class TSFullMeshService @Inject()(volumeTracingService: VolumeTracingService, tracing: VolumeTracing, adHocMeshRequest: WebknossosAdHocMeshRequest, tracingId: String): Fox[(Array[Float], List[Int])] = - if (tracing.mappingIsEditable.getOrElse(false)) + if (tracing.getHasEditableMapping) editableMappingService.createAdHocMesh(tracing, tracingId, adHocMeshRequest, token) else volumeTracingService.createAdHocMesh(tracingId, adHocMeshRequest, token) } diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala index 4848ba247c1..cb12c273f53 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeSegmentStatisticsService.scala @@ -109,7 +109,7 @@ class VolumeSegmentStatisticsService @Inject()(volumeTracingService: VolumeTraci ) }.toList for { - (data, _) <- if (tracing.mappingIsEditable.getOrElse(false)) + (data, _) <- if (tracing.getHasEditableMapping) editableMappingService.volumeData(tracing, tracingId, dataRequests, userToken) else volumeTracingService.data(tracingId, tracing, dataRequests, includeFallbackDataIfAvailable = true, userToken) } yield data diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala index 5fa306a9d2c..f8fef19f3b8 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeTracingService.scala @@ -140,7 +140,7 @@ class VolumeTracingService @Inject()( case Full(tracing) => action match { case a: UpdateBucketVolumeAction => - if (tracing.getMappingIsEditable) { + if (tracing.getHasEditableMapping) { Fox.failure("Cannot mutate volume data in annotation with editable mapping.") } else updateBucket(tracingId, tracing, a, segmentIndexBuffer, updateGroup.version) ?~> "Failed to save volume data." @@ -213,10 +213,10 @@ class VolumeTracingService @Inject()( } yield volumeTracing override def editableMappingTracingId(tracing: VolumeTracing, tracingId: String): Option[String] = - if (tracing.mappingIsEditable.getOrElse(false)) Some(tracingId) else None + if (tracing.getHasEditableMapping) Some(tracingId) else None override def baseMappingName(tracing: VolumeTracing): Fox[Option[String]] = - if (tracing.mappingIsEditable.getOrElse(false)) + if (tracing.getHasEditableMapping) tracing.mappingName.map(editableMappingService.getBaseMappingName).getOrElse(Fox.successful(None)) else Fox.successful(tracing.mappingName) @@ -1011,7 +1011,7 @@ class VolumeTracingService @Inject()( def dummyTracing: VolumeTracing = ??? def mergeEditableMappings(tracingsWithIds: List[(VolumeTracing, String)], userToken: Option[String]): Fox[String] = - if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.mappingIsEditable.contains(true))) { + if (tracingsWithIds.forall(tracingWithId => tracingWithId._1.getHasEditableMapping)) { for { remoteFallbackLayers <- Fox.serialCombined(tracingsWithIds)(tracingWithId => remoteFallbackLayerFromVolumeTracing(tracingWithId._1, tracingWithId._2)) @@ -1021,7 +1021,7 @@ class VolumeTracingService @Inject()( _ <- bool2Fox(editableMappingIds.length == tracingsWithIds.length) ?~> "Not all volume tracings have editable mappings" newEditableMappingId <- editableMappingService.merge(editableMappingIds, remoteFallbackLayer, userToken) } yield newEditableMappingId - } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.mappingIsEditable.getOrElse(false))) { + } else if (tracingsWithIds.forall(tracingWithId => !tracingWithId._1.getHasEditableMapping)) { Fox.empty } else { Fox.failure("Cannot merge tracings with and without editable mappings") diff --git a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala index 28f69aa6964..95d942a0e72 100644 --- a/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala +++ b/webknossos-tracingstore/app/com/scalableminds/webknossos/tracingstore/tracings/volume/VolumeUpdateActions.scala @@ -368,7 +368,7 @@ case class UpdateMappingNameAction(mappingName: Option[String], if (tracing.mappingIsLocked.getOrElse(false)) tracing // cannot change mapping name if it is locked else tracing.copy(mappingName = mappingName, - mappingIsEditable = Some(isEditable.getOrElse(false)), + hasEditableMapping = Some(isEditable.getOrElse(false)), mappingIsLocked = Some(isLocked.getOrElse(false))) } diff --git a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes index b10d4276af0..cf4668e3a06 100644 --- a/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes +++ b/webknossos-tracingstore/conf/com.scalableminds.webknossos.tracingstore.routes @@ -37,7 +37,7 @@ POST /mapping/:tracingId/update @com.scalablemin GET /mapping/:tracingId/updateActionLog @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingUpdateActionLog(token: Option[String], tracingId: String) GET /mapping/:tracingId/info @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingInfo(token: Option[String], tracingId: String, version: Option[Long]) GET /mapping/:tracingId/segmentsForAgglomerate @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingSegmentIdsForAgglomerate(token: Option[String], tracingId: String, agglomerateId: Long) -GET /mapping/:tracingId/agglomerateIdForSegmentId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdForSegmentId(token: Option[String], tracingId: String, segmentId: Long) +POST /mapping/:tracingId/agglomeratesForSegments @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingController.editableMappingAgglomerateIdsForSegments(token: Option[String], tracingId: String) # Zarr endpoints for volume annotations GET /volume/zarr/json/:tracingId @com.scalableminds.webknossos.tracingstore.controllers.VolumeTracingZarrStreamingController.volumeTracingFolderContentJson(token: Option[String], tracingId: String) diff --git a/yarn.lock b/yarn.lock index 73233c93aee..4917caea431 100644 --- a/yarn.lock +++ b/yarn.lock @@ -1292,44 +1292,38 @@ resolved "https://registry.yarnpkg.com/@react-dnd/shallowequal/-/shallowequal-2.0.0.tgz#a3031eb54129f2c66b2753f8404266ec7bf67f0a" integrity sha512-Pc/AFTdwZwEKJxFJvlxrSmGe/di+aAOBn60sremrpLo6VI/6cmiUYNNwlI5KNYttg7uypzA3ILPMPgxB2GYZEg== -"@redux-saga/core@^1.1.3": - version "1.1.3" - resolved "https://registry.yarnpkg.com/@redux-saga/core/-/core-1.1.3.tgz#3085097b57a4ea8db5528d58673f20ce0950f6a4" - integrity sha512-8tInBftak8TPzE6X13ABmEtRJGjtK17w7VUs7qV17S8hCO5S3+aUTWZ/DBsBJPdE8Z5jOPwYALyvofgq1Ws+kg== +"@redux-saga/core@^1.3.0": + version "1.3.0" + resolved "https://registry.yarnpkg.com/@redux-saga/core/-/core-1.3.0.tgz#2ce08b73d407fc6ea9e7f7d83d2e97d981a3a8b8" + integrity sha512-L+i+qIGuyWn7CIg7k1MteHGfttKPmxwZR5E7OsGikCL2LzYA0RERlaUY00Y3P3ZV2EYgrsYlBrGs6cJP5OKKqA== dependencies: "@babel/runtime" "^7.6.3" - "@redux-saga/deferred" "^1.1.2" - "@redux-saga/delay-p" "^1.1.2" - "@redux-saga/is" "^1.1.2" - "@redux-saga/symbols" "^1.1.2" - "@redux-saga/types" "^1.1.0" - redux "^4.0.4" + "@redux-saga/deferred" "^1.2.1" + "@redux-saga/delay-p" "^1.2.1" + "@redux-saga/is" "^1.1.3" + "@redux-saga/symbols" "^1.1.3" + "@redux-saga/types" "^1.2.1" typescript-tuple "^2.2.1" -"@redux-saga/deferred@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@redux-saga/deferred/-/deferred-1.1.2.tgz#59937a0eba71fff289f1310233bc518117a71888" - integrity sha512-908rDLHFN2UUzt2jb4uOzj6afpjgJe3MjICaUNO3bvkV/kN/cNeI9PMr8BsFXB/MR8WTAZQq/PlTq8Kww3TBSQ== +"@redux-saga/deferred@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@redux-saga/deferred/-/deferred-1.2.1.tgz#aca373a08ccafd6f3481037f2f7ee97f2c87c3ec" + integrity sha512-cmin3IuuzMdfQjA0lG4B+jX+9HdTgHZZ+6u3jRAOwGUxy77GSlTi4Qp2d6PM1PUoTmQUR5aijlA39scWWPF31g== -"@redux-saga/delay-p@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@redux-saga/delay-p/-/delay-p-1.1.2.tgz#8f515f4b009b05b02a37a7c3d0ca9ddc157bb355" - integrity sha512-ojc+1IoC6OP65Ts5+ZHbEYdrohmIw1j9P7HS9MOJezqMYtCDgpkoqB5enAAZrNtnbSL6gVCWPHaoaTY5KeO0/g== +"@redux-saga/delay-p@^1.2.1": + version "1.2.1" + resolved "https://registry.yarnpkg.com/@redux-saga/delay-p/-/delay-p-1.2.1.tgz#e72ac4731c5080a21f75b61bedc31cb639d9e446" + integrity sha512-MdiDxZdvb1m+Y0s4/hgdcAXntpUytr9g0hpcOO1XFVyyzkrDu3SKPgBFOtHn7lhu7n24ZKIAT1qtKyQjHqRd+w== dependencies: - "@redux-saga/symbols" "^1.1.2" + "@redux-saga/symbols" "^1.1.3" -"@redux-saga/is@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@redux-saga/is/-/is-1.1.2.tgz#ae6c8421f58fcba80faf7cadb7d65b303b97e58e" - integrity sha512-OLbunKVsCVNTKEf2cH4TYyNbbPgvmZ52iaxBD4I1fTif4+MTXMa4/Z07L83zW/hTCXwpSZvXogqMqLfex2Tg6w== +"@redux-saga/is@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@redux-saga/is/-/is-1.1.3.tgz#b333f31967e87e32b4e6b02c75b78d609dd4ad73" + integrity sha512-naXrkETG1jLRfVfhOx/ZdLj0EyAzHYbgJWkXbB3qFliPcHKiWbv/ULQryOAEKyjrhiclmr6AMdgsXFyx7/yE6Q== dependencies: - "@redux-saga/symbols" "^1.1.2" - "@redux-saga/types" "^1.1.0" - -"@redux-saga/symbols@^1.1.2": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@redux-saga/symbols/-/symbols-1.1.2.tgz#216a672a487fc256872b8034835afc22a2d0595d" - integrity sha512-EfdGnF423glv3uMwLsGAtE6bg+R9MdqlHEzExnfagXPrIiuxwr3bdiAwz3gi+PsrQ3yBlaBpfGLtDG8rf3LgQQ== + "@redux-saga/symbols" "^1.1.3" + "@redux-saga/types" "^1.2.1" "@redux-saga/symbols@^1.1.3": version "1.1.3" @@ -1344,11 +1338,6 @@ "@redux-saga/symbols" "^1.1.3" "@redux-saga/types" "^1.2.1" -"@redux-saga/types@^1.1.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@redux-saga/types/-/types-1.1.0.tgz#0e81ce56b4883b4b2a3001ebe1ab298b84237204" - integrity sha512-afmTuJrylUU/0OtqzaRkbyYFFNgCF73Bvel/sw90pvGrWIZ+vyoIJqA6eMSoA6+nb443kTmulmBtC9NerXboNg== - "@redux-saga/types@^1.2.1": version "1.2.1" resolved "https://registry.yarnpkg.com/@redux-saga/types/-/types-1.2.1.tgz#9403f51c17cae37edf870c6bc0c81c1ece5ccef8" @@ -9997,12 +9986,12 @@ redux-mock-store@^1.2.2: dependencies: lodash.isplainobject "^4.0.6" -redux-saga@^1.0.0: - version "1.1.3" - resolved "https://registry.yarnpkg.com/redux-saga/-/redux-saga-1.1.3.tgz#9f3e6aebd3c994bbc0f6901a625f9a42b51d1112" - integrity sha512-RkSn/z0mwaSa5/xH/hQLo8gNf4tlvT18qXDNvedihLcfzh+jMchDgaariQoehCpgRltEm4zHKJyINEz6aqswTw== +redux-saga@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/redux-saga/-/redux-saga-1.3.0.tgz#a59ada7c28010189355356b99738c9fcb7ade30e" + integrity sha512-J9RvCeAZXSTAibFY0kGw6Iy4EdyDNW7k6Q+liwX+bsck7QVsU78zz8vpBRweEfANxnnlG/xGGeOvf6r8UXzNJQ== dependencies: - "@redux-saga/core" "^1.1.3" + "@redux-saga/core" "^1.3.0" redux@3.7.2, redux@^3.6.0, redux@^4.0.0, redux@^4.0.4: version "3.7.2" @@ -10882,16 +10871,7 @@ string-convert@^0.2.0: resolved "https://registry.yarnpkg.com/string-convert/-/string-convert-0.2.1.tgz#6982cc3049fbb4cd85f8b24568b9d9bf39eeff97" integrity sha1-aYLMMEn7tM2F+LJFaLnZvznu/5c= -"string-width-cjs@npm:string-width@^4.2.0": - version "4.2.3" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" - integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.1" - -string-width@^4.0.0, string-width@^4.2.2, string-width@^4.2.3: +"string-width-cjs@npm:string-width@^4.2.0", string-width@^4.0.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010" integrity sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g== @@ -10970,14 +10950,7 @@ stringify-entities@^4.0.0: character-entities-html4 "^2.0.0" character-entities-legacy "^3.0.0" -"strip-ansi-cjs@npm:strip-ansi@^6.0.1": - version "6.0.1" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" - integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== - dependencies: - ansi-regex "^5.0.1" - -strip-ansi@^6.0.0, strip-ansi@^6.0.1: +"strip-ansi-cjs@npm:strip-ansi@^6.0.1", strip-ansi@^6.0.0, strip-ansi@^6.0.1: version "6.0.1" resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9" integrity sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A== @@ -12128,16 +12101,7 @@ worker-loader@^3.0.8: loader-utils "^2.0.0" schema-utils "^3.0.0" -"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0": - version "7.0.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" - integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrap-ansi@^7.0.0: +"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0", wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" integrity sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==