Skip to content

Commit

Permalink
[electrophysiology_browser] Fix EEG data shift (aces#8999)
Browse files Browse the repository at this point in the history
This resolves the data shift issue by adding the number of valid samples in the last chunk to the index.json, and using it for the calculations which previously assumed all chunks were filled.

Closes aces#8992. Has a Loris-MRI counterpart: aces/Loris-MRI#1030

Loosely dependant on aces#8998 (file modifications in common and done at a later time).
  • Loading branch information
jeffersoncasimir authored Jan 23, 2024
1 parent 61b7dbf commit be39a57
Show file tree
Hide file tree
Showing 5 changed files with 103 additions and 49 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -95,12 +95,19 @@ class EEGLabSeriesProvider extends Component<CProps> {
Promise.race(racers(fetchJSON, chunksURL, '/index.json')).then(
({json, url}) => {
if (json) {
const {channelMetadata, shapes, timeInterval, seriesRange} = json;
const {
channelMetadata,
shapes,
timeInterval,
seriesRange,
validSamples,
} = json;
this.store.dispatch(
setDatasetMetadata({
chunksURL: url,
channelMetadata,
shapes,
validSamples,
timeInterval,
seriesRange,
limit,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,25 +7,30 @@ import {Group} from '@visx/group';
import {colorOrder} from '../../color';

const LineMemo = R.memoizeWith(
({amplitudeScale, filters, channelIndex, traceIndex,
chunkIndex, isStacked, DCOffset, numChannels, numChunks}) =>
`${amplitudeScale},${filters.join('-')},`
({amplitudeScale, interval, filters,
channelIndex, traceIndex, chunkIndex,
isStacked, DCOffset, numChannels,
numChunks, previousPoint,
}) =>
`${amplitudeScale},${interval.join('-')},${filters.join('-')},`
+ `${channelIndex}-${traceIndex}-${chunkIndex},`
+ `${isStacked},${DCOffset},${numChannels},${numChunks}`,
+ `${isStacked},${DCOffset},${numChannels},`
+ `${numChunks},${previousPoint}`,
({
channelIndex,
traceIndex,
chunkIndex,
interval,
seriesRange,
amplitudeScale,
filters,
values,
isStacked,
DCOffset,
numChannels,
numChunks,
...rest
channelIndex,
traceIndex,
chunkIndex,
interval,
seriesRange,
amplitudeScale,
filters,
values,
isStacked,
DCOffset,
numChannels,
numChunks,
previousPoint,
...rest
}) => {
const scales = [
scaleLinear()
Expand All @@ -36,12 +41,25 @@ const LineMemo = R.memoizeWith(
.range([-0.5, 0.5]),
];

const points = values.map((value, i) =>
vec2.fromValues(
scales[0](
interval[0] + (i / values.length) * (interval[1] - interval[0])
),
-(scales[1](value) - DCOffset)
const points = previousPoint === null
? []
: [
vec2.fromValues(
scales[0](
interval[0] - (1 / values.length) * (interval[1] - interval[0])
),
-(scales[1](previousPoint) - DCOffset)
)
];

points.push(
...values.map((value, i) =>
vec2.fromValues(
scales[0](
interval[0] + (i / values.length) * (interval[1] - interval[0])
),
-(scales[1](value) - DCOffset)
)
)
);

Expand Down Expand Up @@ -77,6 +95,7 @@ type CProps = {
withDCOffset: number,
numChannels: number,
numChunks: number,
previousPoint: number | null,
};

/**
Expand All @@ -95,22 +114,24 @@ type CProps = {
* @param root0.withDCOffset
* @param root0.numChannels
* @param root0.numChunks
* @param root0.previousPoint
*/
const LineChunk = ({
channelIndex,
traceIndex,
chunkIndex,
chunk,
seriesRange,
amplitudeScale,
scales,
physioFileID,
isHovered,
isStacked,
withDCOffset,
numChannels,
numChunks,
...rest
channelIndex,
traceIndex,
chunkIndex,
chunk,
seriesRange,
amplitudeScale,
scales,
physioFileID,
isHovered,
isStacked,
withDCOffset,
numChannels,
numChunks,
previousPoint,
...rest
}: CProps) => {
const {interval, values} = chunk;

Expand All @@ -133,7 +154,7 @@ const LineChunk = ({
top={-p0[1]}
>
<Group
transform={'translate(' + p0[0] + ' 0)' +
transform={'translate(' + p0[0] + ' 0) ' +
'scale(' + chunkLength + ' ' + chunkHeight + ')'
}
>
Expand All @@ -151,6 +172,7 @@ const LineChunk = ({
DCOffset={withDCOffset}
numChannels={numChannels}
numChunks={numChunks}
previousPoint={previousPoint}
/>
</Group>
</Group>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -623,7 +623,8 @@ const SeriesRenderer: FunctionComponent<CProps> = ({
/>
</clipPath>

{channelList.map((channel, i) => {
{
channelList.map((channel, i) => {
if (!channelMetadata[channel.index]) {
return null;
}
Expand Down Expand Up @@ -733,12 +734,12 @@ const SeriesRenderer: FunctionComponent<CProps> = ({
: 0;

return (
trace.chunks.map((chunk, k) => (
trace.chunks.map((chunk, k, chunks) => (
<LineChunk
channelIndex={channel.index}
traceIndex={j}
chunkIndex={k}
key={`${k}-${trace.chunks.length}`}
key={`${channel.index}-${k}-${trace.chunks.length}`}
chunk={chunk}
seriesRange={seriesRange}
amplitudeScale={amplitudeScale}
Expand All @@ -749,6 +750,11 @@ const SeriesRenderer: FunctionComponent<CProps> = ({
withDCOffset={DCOffset}
numChannels={numDisplayedChannels}
numChunks={numChunks}
previousPoint={
k === 0
? null
: chunks[k - 1].values.slice(-1)[0]
}
/>
))
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,7 +110,7 @@ export const createFetchChunksEpic = (fromState: (any) => State) => (
Rx.map(([, state]) => fromState(state)),
Rx.debounceTime(UPDATE_DEBOUNCE_TIME),
Rx.concatMap(({bounds, dataset, channels}) => {
const {chunksURL, shapes, timeInterval} = dataset;
const {chunksURL, shapes, validSamples, timeInterval} = dataset;
if (!chunksURL) {
return of();
}
Expand All @@ -123,19 +123,25 @@ export const createFetchChunksEpic = (fromState: (any) => State) => (
const shapeChunks =
shapes.map((shape) => shape[shape.length - 2]);

const chunkIntervals : chunkIntervals[] = shapeChunks
const valuesPerChunk =
shapes.map((shape) => shape[shape.length - 1]);

const chunkIntervals = shapeChunks
.map((numChunks, downsampling) => {
const recordingDuration = Math.abs(
timeInterval[1] - timeInterval[0]
);

const filledChunks = (numChunks - 1) +
(validSamples[downsampling] / valuesPerChunk[downsampling]);

const i0 =
(numChunks *
(filledChunks *
Math.floor(bounds.interval[0] - bounds.domain[0])
) / recordingDuration;

const i1 =
(numChunks *
(filledChunks *
Math.ceil(bounds.interval[1] - bounds.domain[0])
) / recordingDuration;

Expand All @@ -145,7 +151,11 @@ export const createFetchChunksEpic = (fromState: (any) => State) => (
];

return {
interval: interval,
interval:
[
Math.floor(i0),
Math.min(Math.ceil(i1), filledChunks),
],
numChunks: numChunks,
downsampling,
};
Expand All @@ -164,12 +174,18 @@ export const createFetchChunksEpic = (fromState: (any) => State) => (
const chunkPromises = R.range(...finestChunks.interval).flatMap(
(chunkIndex) => {
const numChunks = finestChunks.numChunks;

const filledChunks = (numChunks - 1) + (
validSamples[finestChunks.downsampling] /
valuesPerChunk[finestChunks.downsampling]
);

const chunkInterval = [
timeInterval[0] +
(chunkIndex / numChunks) *
(chunkIndex / filledChunks) *
(timeInterval[1] - timeInterval[0]),
timeInterval[0] +
((chunkIndex + 1) / numChunks) *
((chunkIndex + 1) / filledChunks) *
(timeInterval[1] - timeInterval[0]),
];
if (chunkInterval[0] <= bounds.interval[1]) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ export type Action =
chunksURL: string,
channelNames: string[],
shapes: number[][],
validSamples: number[],
timeInterval: [number, number],
seriesRange: [number, number],
limit: number,
Expand All @@ -46,6 +47,7 @@ export type State = {
activeEpoch: number | null,
physioFileID: number | null,
shapes: number[][],
validSamples: number[],
timeInterval: [number, number],
seriesRange: [number, number],
};
Expand All @@ -68,6 +70,7 @@ export const datasetReducer = (
offsetIndex: 1,
limit: DEFAULT_MAX_CHANNELS,
shapes: [],
validSamples: [],
timeInterval: [0, 1],
seriesRange: [-1, 2],
},
Expand Down

0 comments on commit be39a57

Please sign in to comment.