Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Pull 26.0.1 and 26.0.2 bug fixes into main #1233

Merged
merged 7 commits into from
Mar 17, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion install/requirements/requirements.txt
100644 → 100755
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ google
mat73
matplotlib
mne
mne-bids>=0.6
mne-bids>=0.14
mysqlclient
nibabel
nilearn
Expand Down
2 changes: 1 addition & 1 deletion python/lib/bidsreader.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ def load_bids_data(self, validate):
if self.verbose:
print('Loading the BIDS dataset with BIDS layout library...\n')

exclude_arr = ['/code/', '/sourcedata/', '/log/', '.git/']
exclude_arr = ['code/', 'sourcedata/', 'log/', '.git']
force_arr = [re.compile(r"_annotations\.(tsv|json)$")]

# BIDSLayoutIndexer is required for PyBIDS >= 0.12.1
Expand Down
73 changes: 43 additions & 30 deletions python/lib/eeg.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def __init__(self, bids_reader, bids_sub_id, bids_ses_id, bids_modality, db,

self.cohort_id = None
for row in bids_reader.participants_info:
if not row['participant_id'] == self.psc_id:
if not row['participant_id'] == self.bids_sub_id:
continue
if 'cohort' in row:
cohort_info = db.pselect(
Expand All @@ -159,12 +159,11 @@ def __init__(self, bids_reader, bids_sub_id, bids_ses_id, bids_modality, db,
break

self.session_id = self.get_loris_session_id()
# self.event_files = self.grep_bids_files('events') # This variable and function are never otherwise used

# check if a tsv with acquisition dates or age is available for the subject
self.scans_file = None
if self.bids_layout.get(suffix='scans', subject=self.psc_id, return_type='filename'):
self.scans_file = self.bids_layout.get(suffix='scans', subject=self.psc_id, return_type='filename')[0]
if self.bids_layout.get(suffix='scans', subject=self.bids_sub_id, return_type='filename'):
self.scans_file = self.bids_layout.get(suffix='scans', subject=self.bids_sub_id, return_type='filename')[0]

# register the data into LORIS
if (dataset_type and dataset_type == 'raw'):
Expand Down Expand Up @@ -343,9 +342,10 @@ def register_data(self, derivatives=False, detect=True):
files_to_archive, archive_rel_name, eeg_file_id
)

# create data chunks for React visualization in
# data_dir/bids_import/bids_dataset_name_BIDSVersion_chunks directory
physiological.create_chunks_for_visualization(eeg_file_id, self.data_dir)
# create data chunks for React visualization
eeg_viz_enabled = self.config_db_obj.get_config("useEEGBrowserVisualizationComponents")
if eeg_viz_enabled == 'true' or eeg_viz_enabled == '1':
physiological.create_chunks_for_visualization(eeg_file_id, self.data_dir)

def fetch_and_insert_eeg_files(self, derivatives=False, detect=True):
"""
Expand Down Expand Up @@ -593,6 +593,7 @@ def fetch_and_insert_electrode_file(
electrode_ids = physiological.insert_electrode_file(
electrode_data, electrode_path, physiological_file_id, blake2
)

# get coordsystem.json file
# subject-specific metadata
coordsystem_metadata_file = self.bids_layout.get_nearest(
Expand All @@ -603,18 +604,28 @@ def fetch_and_insert_electrode_file(
suffix = 'coordsystem',
all_ = False,
full_search = False,
subject=self.psc_id,
subject=self.bids_sub_id,
)
if not coordsystem_metadata_file:
message = '\nWARNING: no electrode metadata files (coordsystem.json) ' \
f'associated with physiological file ID {physiological_file_id}'
print(message)

else:
# copy the electrode metadata file to the LORIS BIDS import directory
electrode_metadata_path = self.copy_file_to_loris_bids_dir(
coordsystem_metadata_file.path, derivatives
# insert default (not registered) coordsystem in the database
physiological.insert_electrode_metadata(
None,
None,
physiological_file_id,
None,
electrode_ids
)
else:
electrode_metadata_path = coordsystem_metadata_file.path.replace(self.data_dir, '')
if self.loris_bids_root_dir:
# copy the electrode metadata file to the LORIS BIDS import directory
electrode_metadata_path = self.copy_file_to_loris_bids_dir(
coordsystem_metadata_file.path, derivatives
)
# load json data
with open(coordsystem_metadata_file.path) as metadata_file:
electrode_metadata = json.load(metadata_file)
Expand Down Expand Up @@ -754,20 +765,21 @@ def fetch_and_insert_event_files(
suffix = 'events',
all_ = False,
full_search = False,
subject=self.psc_id,
subject=self.bids_sub_id,
)
inheritance = False

if not event_metadata_file:
message = '\nWARNING: no events metadata files (event.json) associated' \
message = '\nWARNING: no events metadata files (events.json) associated ' \
'with physiological file ID ' + str(physiological_file_id)
print(message)
else:
# copy the event file to the LORIS BIDS import directory
event_metadata_path = self.copy_file_to_loris_bids_dir(
event_metadata_file.path, derivatives, inheritance
)

event_metadata_path = event_metadata_file.path.replace(self.data_dir, '')
if self.loris_bids_root_dir:
# copy the event file to the LORIS BIDS import directory
event_metadata_path = self.copy_file_to_loris_bids_dir(
event_metadata_file.path, derivatives, inheritance
)
# load json data
with open(event_metadata_file.path) as metadata_file:
event_metadata = json.load(metadata_file)
Expand All @@ -787,10 +799,12 @@ def fetch_and_insert_event_files(

# get events.tsv file and insert
event_data = utilities.read_tsv_file(event_data_file.path)
# copy the event file to the LORIS BIDS import directory
event_path = self.copy_file_to_loris_bids_dir(
event_data_file.path, derivatives
)
event_path = event_data_file.path.replace(self.data_dir, '')
if self.loris_bids_root_dir:
# copy the event file to the LORIS BIDS import directory
event_path = self.copy_file_to_loris_bids_dir(
event_data_file.path, derivatives
)
# get the blake2b hash of the task events file
blake2 = utilities.compute_blake2b_hash(event_data_file.path)

Expand Down Expand Up @@ -886,7 +900,7 @@ def create_and_insert_archive(self, files_to_archive, archive_rel_name,
physiological = Physiological(self.db, self.verbose)

# check if archive is on the filesystem
archive_full_path = os.path.join(self.data_dir, archive_rel_name)
(archive_rel_name, archive_full_path) = self.get_archive_paths(archive_rel_name)
blake2 = None
if os.path.isfile(archive_full_path):
blake2 = utilities.compute_blake2b_hash(archive_full_path)
Expand All @@ -911,7 +925,8 @@ def create_and_insert_archive(self, files_to_archive, archive_rel_name,
else:
return

(archive_rel_name, archive_full_path) = self.create_archive(files_to_archive, archive_rel_name)
# create the archive file
utilities.create_archive(files_to_archive, archive_full_path)

# insert the archive file in physiological_archive
blake2 = utilities.compute_blake2b_hash(archive_full_path)
Expand All @@ -936,7 +951,7 @@ def create_and_insert_event_archive(self, files_to_archive, archive_rel_name, ee
"""

# check if archive is on the filesystem
archive_full_path = os.path.join(self.data_dir, archive_rel_name)
(archive_rel_name, archive_full_path) = self.get_archive_paths(archive_rel_name)
blake2 = None
if os.path.isfile(archive_full_path):
blake2 = utilities.compute_blake2b_hash(archive_full_path)
Expand Down Expand Up @@ -971,16 +986,14 @@ def create_and_insert_event_archive(self, files_to_archive, archive_rel_name, ee
blake2 = utilities.compute_blake2b_hash(archive_full_path)
physiological_event_archive_obj.insert(eeg_file_id, blake2, archive_rel_name)

def create_archive(self, files_to_archive, archive_rel_name):
# create the archive file
def get_archive_paths(self, archive_rel_name):
package_path = self.config_db_obj.get_config("prePackagedDownloadPath")
if package_path:
raw_package_dir = os.path.join(package_path, 'raw')
os.makedirs(raw_package_dir, exist_ok=True)
archive_rel_name = os.path.basename(archive_rel_name)
archive_full_path = os.path.join(raw_package_dir, archive_rel_name)
utilities.create_archive(files_to_archive, archive_full_path)
else:
archive_full_path = os.path.join(self.data_dir, archive_rel_name)
utilities.create_archive(files_to_archive, archive_full_path)

return (archive_rel_name, archive_full_path)
31 changes: 16 additions & 15 deletions python/lib/physiological.py
Original file line number Diff line number Diff line change
Expand Up @@ -530,7 +530,7 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
print(f"Modality {modality} unknown in DB")
# force default
raise IndexError
except (IndexError, KeyError):
except Exception:
modality_id = self.physiological_coord_system_db.grep_coord_system_modality_from_name("Not registered")

# type (Fiducials, AnatomicalLandmark, HeadCoil, DigitizedHeapPoints)
Expand All @@ -544,7 +544,7 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
print(f"Type {coord_system_type} unknown in DB")
# force default
raise IndexError
except (IndexError, KeyError):
except Exception:
coord_system_type = None
type_id = self.physiological_coord_system_db.grep_coord_system_type_from_name("Not registered")

Expand All @@ -556,7 +556,7 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
print(f"Unit {unit_data} unknown in DB")
# force default
raise IndexError
except (IndexError, KeyError):
except Exception:
unit_id = self.physiological_coord_system_db.grep_coord_system_unit_from_name("Not registered")

# name
Expand All @@ -567,7 +567,7 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
print(f"Name {coord_system_name} unknown in DB")
# force default
raise IndexError
except (IndexError, KeyError):
except Exception:
name_id = self.physiological_coord_system_db.grep_coord_system_name_from_name("Not registered")

# get or create coord system in db
Expand All @@ -589,7 +589,7 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
ref_key : Point3D(None, *ref_val)
for ref_key, ref_val in ref_coords.items()
}
except (IndexError, KeyError):
except Exception:
# no ref points
is_ok_ref_coords = False
# insert ref points if found
Expand All @@ -609,12 +609,13 @@ def insert_electrode_metadata(self, electrode_metadata, electrode_metadata_file,
electrode_ids
)

# insert blake2b hash of task event file into physiological_parameter_file
self.insert_physio_parameter_file(
physiological_file_id,
'coordsystem_file_json_blake2b_hash',
blake2
)
if blake2:
# insert blake2b hash of task event file into physiological_parameter_file
self.insert_physio_parameter_file(
physiological_file_id,
'coordsystem_file_json_blake2b_hash',
blake2
)

def insert_event_metadata(self, event_metadata, event_metadata_file, physiological_file_id,
project_id, blake2, project_wide, hed_union):
Expand Down Expand Up @@ -1221,7 +1222,8 @@ def create_chunks_for_visualization(self, physio_file_id, data_dir):
script = None
file_path = self.grep_file_path_from_file_id(physio_file_id)

chunk_root_dir = self.config_db_obj.get_config("EEGChunksPath")
chunk_root_dir_config = self.config_db_obj.get_config("EEGChunksPath")
chunk_root_dir = chunk_root_dir_config
if not chunk_root_dir:
# the bids_rel_dir is the first two directories in file_path (
# bids_imports/BIDS_dataset_name_BIDSVersion)
Expand Down Expand Up @@ -1251,12 +1253,11 @@ def create_chunks_for_visualization(self, physio_file_id, data_dir):
print('ERROR: ' + script + ' not found')
sys.exit(lib.exitcode.CHUNK_CREATION_FAILURE)

# the final chunk path will be /data/%PROJECT%/data/bids_imports
# /BIDS_dataset_name_BIDSVersion_chunks/EEG_FILENAME.chunks
chunk_path = chunk_root_dir + os.path.splitext(os.path.basename(file_path))[0] + '.chunks'
if os.path.isdir(chunk_path):
self.insert_physio_parameter_file(
physiological_file_id = physio_file_id,
parameter_name = 'electrophysiology_chunked_dataset_path',
value = chunk_path.replace(data_dir, '')
value = chunk_path.replace(chunk_root_dir_config, '') if chunk_root_dir_config
else chunk_path.replace(data_dir, '')
)
2 changes: 1 addition & 1 deletion python/react-series-data-viewer/chunking.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,7 @@ def mne_file_to_chunks(path, chunk_size, loader, from_channel_name, channel_coun
time_interval = (parsed.times[0], parsed.times[-1])
channel_names = parsed.info["ch_names"]
channel_ranges = []
signal_range = [np.PINF, np.NINF]
signal_range = [np.inf, -np.inf]
channel_chunks_list = []
selected_channels = []
valid_samples_in_last_chunk = []
Expand Down
2 changes: 1 addition & 1 deletion python/react-series-data-viewer/eeglab_to_chunks.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def load_channels(path):
args = parser.parse_args()
for path in args.files:
eeg = mne_eeglab._check_load_mat(path, None)
eeglab_info = mne_eeglab._get_info(eeg, eog=())
eeglab_info = mne_eeglab._get_info(eeg, eog=(), montage_units="auto")
channel_names = eeglab_info[0]['ch_names']

if args.channel_index < 0:
Expand Down
17 changes: 11 additions & 6 deletions python/scripts/bids_import.py
Original file line number Diff line number Diff line change
Expand Up @@ -295,23 +295,28 @@ def read_and_insert_bids(
strict=False,
extension='json',
suffix='events',
all_=False
all_=False,
subject=None,
session=None
)

dataset_tag_dict = {}
if not root_event_metadata_file:
message = '\nWARNING: no events metadata files (event.json) in ' \
message = '\nWARNING: no events metadata files (events.json) in ' \
'root directory'
print(message)
else:
# copy the event file to the LORIS BIDS import directory
copy_file = str.replace(
root_event_metadata_file.path,
bids_layout.root,
data_dir,
""
)
event_metadata_path = loris_bids_root_dir + copy_file.lstrip('/')
lib.utilities.copy_file(root_event_metadata_file.path, event_metadata_path, verbose)
).lstrip('/')

event_metadata_path = copy_file
if not nocopy:
event_metadata_path = loris_bids_root_dir + copy_file
lib.utilities.copy_file(root_event_metadata_file.path, event_metadata_path, verbose)

# TODO: Move
hed_query = 'SELECT * FROM hed_schema_nodes WHERE 1'
Expand Down
Loading
Loading