Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion iblrig/neurophotometrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -349,11 +349,13 @@ def neurophotometrics_description(
case 'bpod':
return {'devices': {'neurophotometrics': description}}
case 'daqami':
hardware_settings: HardwareSettings = iblrig.path_helper.load_pydantic_yaml(HardwareSettings)
settings = hardware_settings.device_neurophotometrics
experiment_description = {'devices': {'neurophotometrics': description}}
experiment_description['devices']['neurophotometrics']['sync_metadata'] = dict(
acquisition_software='daqami',
collection='raw_photometry_data',
frameclock_channel='AI7',
frameclock_channel=settings.FRAMECLOCK_CHANNEL,
)
return experiment_description
case _:
Expand Down
1 change: 1 addition & 0 deletions iblrig/pydantic_definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -171,6 +171,7 @@ class HardwareSettingsNeurophotometrics(BunchModel):
BONSAI_WORKFLOW: Path = Path('devices', 'neurophotometrics', 'FP3002.bonsai')
BONSAI_WORKFLOW_DAQ: Path = Path('devices', 'neurophotometrics', 'FP3002_daq.bonsai')
COM_NEUROPHOTOMETRY: str | None = None
FRAMECLOCK_CHANNEL: str | None = None


class HardwareSettingsCameraWorkflow(BunchModel):
Expand Down
20 changes: 17 additions & 3 deletions iblrig/test/test_neurophotometrics.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,12 @@
import datetime
import unittest
from pathlib import Path
from tempfile import NamedTemporaryFile
from unittest.mock import patch

from iblrig.constants import BASE_PATH
from iblrig.neurophotometrics import neurophotometrics_description
from iblrig.path_helper import _load_settings_yaml


class TestExperimentDescription(unittest.TestCase):
Expand All @@ -27,9 +32,18 @@ def test_neurophotometrics_description(self):
self.assertDictEqual(dexpected, d)

# for daqami sync
d = neurophotometrics_description(
rois=['G0', 'G1'], locations=['SI', 'VTA'], sync_channel=1, start_time=dt, sync_mode='daqami'
)
settings_dict = _load_settings_yaml(BASE_PATH / 'settings' / 'hardware_settings_template.yaml')
with patch('iblrig.path_helper._load_settings_yaml', return_value=settings_dict), NamedTemporaryFile() as fp:
settings_dict['device_neurophotometrics'] = {
'BONSAI_EXECUTABLE': Path(fp.name),
'BONSAI_WORKFLOW': Path('devices', 'neurophotometrics', 'FP3002.bonsai'),
'BONSAI_WORKFLOW_DAQ': Path('devices', 'neurophotometrics', 'FP3002_daq.bonsai'),
'COM_NEUROPHOTOMETRY': None,
'FRAMECLOCK_CHANNEL': 'AI7',
}
d = neurophotometrics_description(
rois=['G0', 'G1'], locations=['SI', 'VTA'], sync_channel=1, start_time=dt, sync_mode='daqami'
)
dexpected = {
'devices': {
'neurophotometrics': {
Expand Down
23 changes: 15 additions & 8 deletions iblrig/test/test_transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import iblrig.raw_data_loaders
from ibllib.io import session_params
from ibllib.tests.fixtures.utils import populate_raw_spikeglx
from iblphotometry.io import validate_neurophotometrics_df, validate_neurophotometrics_digital_inputs
from iblrig.path_helper import HardwareSettings, load_pydantic_yaml
from iblrig.test.base import TASK_KWARGS
from iblrig.transfer_experiments import BehaviorCopier, CopyState, EphysCopier, SessionCopier, VideoCopier
Expand Down Expand Up @@ -98,16 +97,20 @@ def create_fake_data(self, start_time: datetime | None = None) -> Path:

# creating fake digital_inputs.csv
cols_dtypes = dict(
ChannelName=str, Channel='int8', AlwaysTrue='bool', SystemTimestamp='float64', ComputerTimestamp='float64'
ChannelName=str,
Channel='int8',
AlwaysTrue='bool',
SystemTimestamp='float64',
ComputerTimestamp='float64',
)
cols = list(cols_dtypes.keys())
digital_inputs_df = pd.DataFrame(np.random.randn(10, len(cols)), columns=cols)
for col, dtype in cols_dtypes.items():
digital_inputs_df[col] = digital_inputs_df[col].astype(dtype)

digital_inputs_df = validate_neurophotometrics_digital_inputs(digital_inputs_df)
digital_inputs_df.to_csv(neurophotometrics_folder / 'digital_inputs.csv', index=False, header=False)
digital_inputs_df.to_csv(neurophotometrics_folder / 'digital_inputs.csv')

# creating fake photometry data file
cols_dtypes = dict(
FrameCounter='int64',
SystemTimestamp='float64',
Expand All @@ -116,17 +119,13 @@ def create_fake_data(self, start_time: datetime | None = None) -> Path:
Region1G='float64',
Region2G='float64',
)

# creating fake photometry data file
cols = list(cols_dtypes.keys())
raw_photometry_df = pd.DataFrame(np.random.randn(10, len(cols)), columns=cols)
for col, dtype in cols_dtypes.items():
raw_photometry_df[col] = raw_photometry_df[col].astype(dtype)

raw_photometry_df = validate_neurophotometrics_df(raw_photometry_df)
(neurophotometrics_folder / 'raw_photometry').mkdir(exist_ok=True)
raw_photometry_df.to_csv(neurophotometrics_folder / 'raw_photometry' / 'raw_photometry.csv', index=False)

logger.info('Created fake photometry data in %s', neurophotometrics_folder)
return neurophotometrics_folder

Expand Down Expand Up @@ -159,9 +158,17 @@ def test_copier(self):
assert remote_photometry_path.joinpath('_neurophotometrics_fpData.channels.csv').exists()
assert remote_photometry_path.joinpath('_neurophotometrics_fpData.digitalInputs.pqt').exists()
assert remote_photometry_path.joinpath('_neurophotometrics_fpData.raw.pqt').exists()
# check raw data
data_raw_local = pd.read_csv(local_photometry_path.joinpath('raw_photometry', 'raw_photometry.csv'))
data_raw_remote = pd.read_parquet(remote_photometry_path.joinpath('_neurophotometrics_fpData.raw.pqt'))
pd.testing.assert_frame_equal(data_raw_local, data_raw_remote, check_dtype=False)
# check digital inputs data
assert remote_photometry_path.joinpath('_neurophotometrics_fpData.digitalInputs.pqt').exists()
data_digital_inputs_local = pd.read_csv(local_photometry_path.joinpath('digital_inputs.csv'))
data_digital_inputs_remote = pd.read_parquet(
remote_photometry_path.joinpath('_neurophotometrics_fpData.digitalInputs.pqt')
)
pd.testing.assert_frame_equal(data_digital_inputs_local, data_digital_inputs_remote, check_dtype=False)


class TestIntegrationTransferExperiments(TestIntegrationTransferExperimentsBase):
Expand Down
16 changes: 5 additions & 11 deletions iblrig/transfer_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,9 @@
from os.path import samestat
from pathlib import Path

import pandas as pd

import ibllib.pipes.misc
import iblphotometry.io as fpio
import one.alf.path as alfiles
from ibllib.io import raw_data_loaders, session_params
from ibllib.pipes.misc import sleepless
Expand Down Expand Up @@ -637,7 +638,7 @@ def _copy_collections(self) -> bool:
case 'bpod':
# copy the digital inputs file
csv_digital_inputs = neurophotometrics_session_path.joinpath('digital_inputs.csv')
digital_inputs_df = fpio.read_digital_inputs_csv(csv_digital_inputs, validate=True)
digital_inputs_df = pd.read_csv(csv_digital_inputs)
digital_inputs_df.to_parquet(remote_photometry_path.joinpath('_neurophotometrics_fpData.digitalInputs.pqt'))
case 'daqami':
# find the daqami files that correspond to the current acquisition
Expand Down Expand Up @@ -680,15 +681,8 @@ def _copy_collections(self) -> bool:
remote_sync_path.mkdir(exist_ok=True, parents=True)
shutil.copy(daqami_file, remote_sync_path.joinpath('_mcc_DAQdata.raw.tdms'))

# digital outputs file
# csv_digital_outputs = neurophotometrics_session_folder / 'digital_outputs.csv'
# digital_outputs_df = fpio.read_digital_outputs_csv(csv_digital_outputs, validate=True)
# digital_outputs_df.to_parquet(remote_photometry_path / '_neurophotometrics_fpData.digitalOutputs.pqt')

# explicitly with the data from the experiment description file
raw_photometry_df = fpio.from_raw_neurophotometrics_file_to_raw_df(csv_raw_photometry, validate=False)
cols = neurophotometrics_description['fibers'].keys()
raw_photometry_df = fpio.validate_neurophotometrics_df(raw_photometry_df, data_columns=cols)
# read neurophotometrics file
raw_photometry_df = pd.read_csv(csv_raw_photometry)
raw_photometry_df.to_parquet(remote_photometry_path.joinpath('_neurophotometrics_fpData.raw.pqt'))

# TODO why are we explicitly copying this file?
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies = [
"iblqt>=0.8.0",
"ONE-api>=3.3.0",
"tycmd-wrapper>=0.2.1",
"ibl-photometry>=0.1.1",
"ibl-photometry>=0.1.2",
#
# Everything else
"annotated-types>=0.7.0",
Expand Down
Loading