Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

iblphotometry integration #749

Open
wants to merge 18 commits into
base: iblrigv8dev
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/source/usage_neurophotometrics.rst
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ Setup
device_neurophotometrics:
DEVICE_MODEL: NP3002
BONSAI_EXECUTABLE: C:\Users\IBLuser\AppData\Local\Bonsai\Bonsai.exe
BONSAI_WORKFLOW: devices\neurophotometrics\FP3002.bonsai
BONSAI_WORKFLOW: devices\neurophotometrics\FP3002_digital_inputs.bonsai
COM_NEUROPHOTOMETRY: COM3


Expand Down Expand Up @@ -46,7 +46,7 @@ Starting a photometry session

cd C:\iblrigv8\
venv\scripts\Activate.ps1
start_photometry_session --subject=Mickey --roi G0 G1 --location NBM SI
start_photometry_task --subject Mickey --rois G0 G1 --locations NBM SI



Expand Down
26 changes: 19 additions & 7 deletions iblrig/neurophotometrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def start_workflow_cmd(debug: bool = False):


def init_neurophotometrics_subject(
session_stub: str, rois: Iterable[str], locations: Iterable[str], sync_channel: int = 1, **kwargs
subject: str, rois: Iterable[str], locations: Iterable[str], sync_channel: int = 1, **kwargs
) -> NeurophotometricsCopier:
"""
Initialize a neurophotometrics behavior session.
Expand All @@ -61,8 +61,8 @@ def init_neurophotometrics_subject(

Parameters
----------
session_stub : str
The name of the subject for this session.
subject : str
The name of the session_stub for this session.
rois : Iterable[str]
List of ROIs to be recorded.
locations : Iterable[str]
Expand All @@ -81,7 +81,17 @@ def init_neurophotometrics_subject(
regions = BrainRegions()
if not all(map(lambda x: x in regions.acronym, locations)):
_logger.warning(f'Brain regions {locations} not found in BrainRegions acronyms')
npc, dict_paths = _get_neurophotometrics_copier(session_stub)

# constructing the stub name
dict_paths = iblrig.path_helper.get_local_and_remote_paths()
date = datetime.datetime.today().strftime('%Y-%m-%d')
## counting the number of directories (to get the session number)
n = len([path for path in (dict_paths['local_subjects_folder'] / subject / date).iterdir() if path.isdir()])
session_number = f'{n + 1:03}'
stub_name = f'{subject}/{date}/{session_number}'

# creating the copier from the stub name and initializing
npc, dict_paths = _get_neurophotometrics_copier(stub_name)
description = NeurophotometricsCopier.neurophotometrics_description(rois, locations, sync_channel, **kwargs)
npc.initialize_experiment(acquisition_description=description)
return npc
Expand All @@ -97,7 +107,7 @@ def copy_photometry_subject(session_stub: str) -> bool:
def start_photometry_task_cmd():
"""
Command line interface for preparing a neurophotometrics session on the photometry computer.
start_photometry_recording -s Algernon --rois G0 G1 --locations
start_photometry_task --subject Mickey --rois G0 G1 --location NBM SI
:return:
"""
parser = argparse.ArgumentParser(
Expand All @@ -119,7 +129,9 @@ def start_photometry_task_cmd():
parser.add_argument('-c', '--sync-channel', type=int, default=1, help='Sync channel')
args = parser.parse_args()

assert len(args.roi) == len(args.location), 'The number of ROIs and locations must be the same.'
assert len(args.rois) == len(args.locations), 'The number of ROIs and locations must be the same.'
assert len(set(args.locations)) == len(args.locations), 'duplicate brain regions are not possible'
assert len(set(args.rois)) == len(args.rois), 'duplicate rois are not possible'

setup_logger(name='iblrig', level='DEBUG' if args.debug else 'INFO')
init_neurophotometrics_subject(subject=args.subject, rois=args.roi, locations=args.location, sync_channel=args.sync_channel)
init_neurophotometrics_subject(subject=args.subject, rois=args.rois, locations=args.locations, sync_channel=args.sync_channel)
8 changes: 5 additions & 3 deletions iblrig/test/test_transfers.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,19 +145,21 @@ def create_fake_data(self):
raw_photometry_df.to_csv(folder_neurophotometrics / 'raw_photometry.csv', index=False)

def test_copier(self):
session = _create_behavior_session(ntrials=50, kwargs=self.session_kwargs)
# session = _create_behavior_session(ntrials=50, kwargs=self.session_kwargs)
self.create_fake_data()

# the workaround to find the settings.yaml
with mock.patch('iblrig.path_helper._load_settings_yaml') as mocker:
mocker.side_effect = self.side_effect
# the actual code to test
iblrig.neurophotometrics.init_neurophotometrics_subject(
session_stub=session.paths['SESSION_FOLDER'],
session_stub=f'test_subject/{datetime.today().strftime("%Y-%m-%d")}/001',
rois=['Region00', 'Region01'],
locations=['VTA', 'SNc'],
)
iblrig.neurophotometrics.copy_photometry_subject(session.paths['SESSION_FOLDER'])
# iblrig.neurophotometrics.copy_photometry_subject(session.paths['SESSION_FOLDER'])
(sc,) = iblrig.commands.transfer_data(tag='neurophotometrics')
self.assertEqual(sc.state, 2)


class TestIntegrationTransferExperiments(TestIntegrationTransferExperimentsBase):
Expand Down
43 changes: 15 additions & 28 deletions iblrig/transfer_experiments.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,10 +12,9 @@
from pathlib import Path

import numpy as np
import pandas as pd
import pandera

import ibllib.pipes.misc
import iblphotometry.io as fpio
import iblrig
import one.alf.path as alfiles
from ibllib.io import raw_data_loaders, session_params
Expand Down Expand Up @@ -678,11 +677,17 @@ def neurophotometrics_description(
description['fibers'] = {roi: {'location': location} for roi, location in zip(rois, locations, strict=False)}
return {'neurophotometrics': description}

def _copy_collections(self, folder_neurophotometric: Path) -> bool:
def _copy_collections(self, folder_neurophotometric: Path | None = None) -> bool:
ed = self.experiment_description['neurophotometrics']
dt = datetime.datetime.fromisoformat(ed['datetime'])
# Here we find the first photometry folder after the start_time. In case this is failing
# we can feed a custom start_time to go to the desired folder, or just rename the folder
# FIXME TODO
folder_neurophotometric = (
self.session_path.parents[4].joinpath('neurophotometrics')
if folder_neurophotometric is None
else folder_neurophotometric
)
folder_day = next(folder_neurophotometric.glob(ed['datetime'][:10]), None)
assert folder_day is not None, f"Neurophotometrics folder {folder_neurophotometric} doesn't contain data"
folder_times = list(folder_day.glob('T*'))
Expand All @@ -693,32 +698,14 @@ def _copy_collections(self, folder_neurophotometric: Path) -> bool:
csv_digital_inputs = folder_day.joinpath(f'T{hhmmss[i]}', 'digital_inputs.csv')
assert csv_raw_photometry.exists(), f'Raw photometry file {csv_raw_photometry} not found'
assert csv_digital_inputs.exists(), f'Digital inputs file {csv_digital_inputs} not found'

# Copy the raw and digital inputs files to the server
# TODO move this into a data loader ? Especially the schemas will apply to both the csv and parquet format
df_raw_photometry = pd.read_csv(csv_raw_photometry)
df_digital_inputs = pd.read_csv(csv_digital_inputs, header=None)
df_digital_inputs.columns = ['ChannelName', 'Channel', 'AlwaysTrue', 'SystemTimestamp', 'ComputerTimestamp']
# this will ensure the columns are present, and that there was no magic new format on a new Bonsai version
schema_raw_data = pandera.DataFrameSchema(
columns=dict(
FrameCounter=pandera.Column(pandera.Int64),
SystemTimestamp=pandera.Column(pandera.Float64),
LedState=pandera.Column(pandera.Int16, coerce=True),
ComputerTimestamp=pandera.Column(pandera.Float64),
**{k: pandera.Column(pandera.Float64) for k in ed['fibers']},
)
)
schema_digital_inputs = pandera.DataFrameSchema(
columns=dict(
ChannelName=pandera.Column(str, coerce=True),
Channel=pandera.Column(pandera.Int8, coerce=True),
AlwaysTrue=pandera.Column(bool, coerce=True),
SystemTimestamp=pandera.Column(pandera.Float64),
ComputerTimestamp=pandera.Column(pandera.Float64),
)
)
df_raw_photometry = schema_raw_data.validate(df_raw_photometry)
df_digital_inputs = schema_digital_inputs.validate(df_digital_inputs)
# read in and
df_raw_photometry = fpio.from_raw_neurophotometrics_file_to_raw_df(csv_raw_photometry, validate=False)
# explicitly explicitly with the data from the experiment description file
cols = ed['fibers'].keys()
df_raw_photometry = fpio.validate_neurophotometrics_df(df_raw_photometry, data_columns=cols)
df_digital_inputs = fpio.read_digital_inputs_csv(csv_digital_inputs, validate=True)
remote_photometry_path = self.remote_session_path.joinpath(ed['collection'])
remote_photometry_path.mkdir(parents=True, exist_ok=True)
df_raw_photometry.to_parquet(remote_photometry_path.joinpath('_neurophotometrics_fpData.raw.pqt'))
Expand Down
27 changes: 27 additions & 0 deletions pdm.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ dependencies = [
"iblqt>=0.4.2",
"ONE-api>=2.11.2",
"tycmd-wrapper>=0.2.1",
"ibl-photometry",
#
# Everything else
"annotated-types>=0.7.0",
Expand Down
Loading