From 9907435904b1106436ac66961a7cbb4a1ed2bd07 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Thu, 24 Oct 2024 15:59:47 +0100 Subject: [PATCH 01/50] changes for testing conversions locally --- .../_scripts/convert_brainwide_map_processed_only.py | 6 ++++-- .../_scripts/convert_brainwide_map_raw_only.py | 9 ++++++--- src/ibl_to_nwb/converters/_brainwide_map_converter.py | 2 +- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py index af1ba5e..3fb6c4c 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only.py @@ -14,13 +14,15 @@ ) from ibl_to_nwb.testing import check_written_nwbfile_for_consistency -session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is revision = None -base_path = Path("E:/IBL") +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py index 09388d1..ebde212 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py @@ -5,13 +5,16 @@ from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe + # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is revision = None -base_path = Path("E:/IBL") +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) @@ -28,7 +31,7 @@ # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps data_interfaces = [] -spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client) data_interfaces.append(spikeglx_subconverter) diff --git a/src/ibl_to_nwb/converters/_brainwide_map_converter.py b/src/ibl_to_nwb/converters/_brainwide_map_converter.py index a0aa4ca..a212a66 100644 --- a/src/ibl_to_nwb/converters/_brainwide_map_converter.py +++ b/src/ibl_to_nwb/converters/_brainwide_map_converter.py @@ -2,7 +2,7 @@ from neuroconv.utils import dict_deep_update, load_dict_from_file -from src.ibl_to_nwb.converters._iblconverter import IblConverter +from ibl_to_nwb.converters._iblconverter import IblConverter class BrainwideMapConverter(IblConverter): From 0c6b01bbff36b6cf27437030a94cf9dbaeff0e80 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Thu, 24 Oct 2024 16:58:06 +0100 Subject: [PATCH 02/50] some bugfixes to pass the processed-only checks --- ...inwide_map_processed_only_local_testing.py | 29 +++++++------------ src/ibl_to_nwb/testing/_consistency_checks.py | 11 +++++-- 2 files changed, 19 insertions(+), 21 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 8200505..3961870 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -4,24 +4,12 @@ import os -# import traceback -# from concurrent.futures import ProcessPoolExecutor, as_completed from pathlib import Path from shutil import rmtree - -# from tempfile import mkdtemp -# from dandi.download import download as dandi_download -# from dandi.organize import organize as dandi_organize -# from dandi.upload import upload as dandi_upload -# from neuroconv.tools.data_transfers import automatic_dandi_upload -# from nwbinspector.tools import get_s3_urls_and_dandi_paths from one.api import ONE -# from pynwb import NWBHDF5IO -# from pynwb.image import ImageSeries -# from tqdm import tqdm -from ibl_to_nwb.brainwide_map import BrainwideMapConverter -from ibl_to_nwb.brainwide_map.datainterfaces import ( +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import ( BrainwideMapTrialsInterface, ) from ibl_to_nwb.datainterfaces import ( @@ -33,6 +21,8 @@ WheelInterface, ) +from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency + base_path = Path.home() / "ibl_scratch" # local directory # session = "d32876dd-8303-4720-8e7e-20678dc2fd71" session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe @@ -70,9 +60,10 @@ for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface( - one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False - ) + # IblPoseEstimationInterface( + # one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False + # ) + IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name) ) pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") @@ -94,7 +85,7 @@ ) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] + "-processed-only" +metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] # + "-processed-only" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -109,3 +100,5 @@ if cleanup: rmtree(cache_folder) rmtree(nwbfile_path.parent) + +check_written_nwbfile_for_consistency(one=session_one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index ec5484a..544745b 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -22,8 +22,14 @@ def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): nwbfile = io.read() eid = nwbfile.session_id + # run all consistentcy checks _check_wheel_data(eid=eid, nwbfile=nwbfile, one=one) - # TODO: fill in the rest of the routed calls + _check_lick_data(eid=eid, nwbfile=nwbfile, one=one) + _check_roi_motion_energy_data(eid=eid, nwbfile=nwbfile, one=one) + _check_pose_estimation_data(eid=eid, nwbfile=nwbfile, one=one) + _check_trials_data(eid=eid, nwbfile=nwbfile, one=one) + _check_pupil_tracking_data(eid=eid, nwbfile=nwbfile, one=one) + _check_spike_sorting_data(eid=eid, nwbfile=nwbfile, one=one) def _check_wheel_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): @@ -180,7 +186,6 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # get and prep data once for probe_name in probe_names: - # include revision TODO FIXME this will likely change - check back in with Miles if revision is not None: collection = f"alf/{probe_name}/pykilosort/{revision}" @@ -198,7 +203,7 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): for ix in units_table.index: probe_name = units_table.loc[ix, "probe_name"] - uuid = units_table.loc[ix, "uuid"] + uuid = units_table.loc[ix, "cluster_uuid"] spike_times_from_NWB = units_table.loc[ix, "spike_times"] cluster_id = np.where(cluster_uuids[probe_name] == uuid)[0][0] From a1614735e4f81de2923aa2a6804fc0909cc6f8d1 Mon Sep 17 00:00:00 2001 From: grg2rsr Date: Fri, 25 Oct 2024 15:09:30 +0100 Subject: [PATCH 03/50] for local testing --- ...rt_brainwide_map_raw_only_local_testing.py | 78 +++++++++++++++++++ .../converters/_ibl_spikeglx_converter.py | 49 ++++++------ 2 files changed, 104 insertions(+), 23 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py new file mode 100644 index 0000000..2695525 --- /dev/null +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -0,0 +1,78 @@ +from pathlib import Path + +from one.api import ONE + +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import RawVideoInterface + +# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" +session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe +data_folder = Path( + "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" +) +spikeglx_source_folder_path = data_folder / "raw_ephys_data" + +# Specify the revision of the pose estimation data +# Setting to 'None' will use whatever the latest released revision is +revision = None + +# base_path = Path("E:/IBL") +base_path = Path.home() / "ibl_scratch" # local directory +base_path.mkdir(exist_ok=True) +nwbfiles_folder_path = base_path / "nwbfiles" +nwbfiles_folder_path.mkdir(exist_ok=True) + +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "cache" +ibl_client = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + silent=True, + cache_dir=one_cache_folder_path, +) + +# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps +data_interfaces = [] + +# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client, eid=session_id) +data_interfaces.append(spikeglx_subconverter) + +# # Raw video takes some special handling +# metadata_retrieval = BrainwideMapConverter(one=ibl_client, session=session_id, data_interfaces=[], verbose=False) +# subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] + +# pose_estimation_files = ibl_client.list_datasets(eid=session_id, filename="*.dlc*") +# for pose_estimation_file in pose_estimation_files: +# camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + +# video_interface = RawVideoInterface( +# nwbfiles_folder_path=nwbfiles_folder_path, +# subject_id=subject_id, +# one=ibl_client, +# session=session_id, +# camera_name=camera_name, +# ) +# data_interfaces.append(video_interface) + +# Run conversion +session_converter = BrainwideMapConverter( + one=ibl_client, session=session_id, data_interfaces=data_interfaces, verbose=False +) + +metadata = session_converter.get_metadata() +metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] +subject_id = metadata["Subject"]["subject_id"] + +subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" +subject_folder_path.mkdir(exist_ok=True) +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{session_id}_desc-raw_ecephys+image.nwb" + +session_converter.run_conversion( + nwbfile_path=nwbfile_path, + metadata=metadata, + overwrite=True, +) + +# TODO: add some kind of raw-specific check +# check_written_nwbfile_for_consistency(one=ibl_client, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index 3f9cc9a..b52c74a 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -2,39 +2,42 @@ from one.api import ONE from pydantic import DirectoryPath from pynwb import NWBFile +import numpy as np +from brainbox.io.one import SpikeSortingLoader, EphysSessionLoader class IblSpikeGlxConverter(SpikeGLXConverterPipe): - - def __init__(self, folder_path: DirectoryPath, one: ONE) -> None: + def __init__(self, folder_path: DirectoryPath, one: ONE, eid: str) -> None: super().__init__(folder_path=folder_path) self.one = one + self.eid = eid # probably should better name this session_id ? def temporally_align_data_interfaces(self) -> None: """Align the raw data timestamps to the other data streams using the ONE API.""" # This is the syntax for aligning the raw timestamps; I cannot test this without the actual data as stored # on your end, so please work with Heberto if there are any problems after uncommenting - # probe_to_imec_map = { - # "probe00": 0, - # "probe01": 1, - # } - # - # ephys_session_loader = EphysSessionLoader(one=self.one, eid=session_id) - # probes = ephys_session_loader.probes - # for probe_name, pid in ephys_session_loader.probes.items(): - # spike_sorting_loader = SpikeSortingLoader(pid=pid, one=ibl_client) - # - # probe_index = probe_to_imec_map[probe_name] - # for band in ["ap", "lf"]: - # recording_interface = next( - # interface - # for interface in self.data_interface_objects - # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] - # ) - # - # band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) - # aligned_timestamps = spike_sorting_loader.samples2times(numpy.arange(0, band_info.ns), direction='forward') - # recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) + probe_to_imec_map = { + "probe00": 0, + "probe01": 1, + } + + ephys_session_loader = EphysSessionLoader(one=self.one, eid=self.eid) + probes = ephys_session_loader.probes + for probe_name, pid in ephys_session_loader.probes.items(): + spike_sorting_loader = SpikeSortingLoader(pid=pid, one=self.one) + + probe_index = probe_to_imec_map[probe_name] + for band in ["ap", "lf"]: + recording_interface = self.data_interface_objects[f"imec{probe_index}.{band}"] + # recording_interface = next( + # interface + # for interface in self.data_interface_objects + # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] + # ) + + band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, band_info.ns), direction="forward") + recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) pass def add_to_nwbfile(self, nwbfile: NWBFile, metadata) -> None: From caabeb6d45ee0b1b3fad9c38b67b97633c85ded2 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 10 Dec 2024 11:06:08 +0000 Subject: [PATCH 04/50] read after write for raw ephys and video data added --- src/ibl_to_nwb/testing/_consistency_checks.py | 94 +++++++++++++++++-- 1 file changed, 85 insertions(+), 9 deletions(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 544745b..7cc6811 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -5,6 +5,7 @@ from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile +from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): @@ -186,15 +187,10 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # get and prep data once for probe_name in probe_names: - # include revision TODO FIXME this will likely change - check back in with Miles - if revision is not None: - collection = f"alf/{probe_name}/pykilosort/{revision}" - else: - collection = f"alf/{probe_name}/pykilosort" - - spike_times[probe_name] = one.load_dataset(eid, "spikes.times", collection=collection) - spike_clusters[probe_name] = one.load_dataset(eid, "spikes.clusters", collection=collection) - cluster_uuids[probe_name] = one.load_dataset(eid, "clusters.uuids", collection=collection) + collection = f"alf/{probe_name}/pykilosort" + spike_times[probe_name] = one.load_dataset(eid, "spikes.times", collection=collection, revision=revision) + spike_clusters[probe_name] = one.load_dataset(eid, "spikes.clusters", collection=collection, revision=revision) + cluster_uuids[probe_name] = one.load_dataset(eid, "clusters.uuids", collection=collection, revision=revision) # pre-sort for fast access sort_ix = np.argsort(spike_clusters[probe_name]) @@ -214,3 +210,83 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): # testing assert_array_less(np.max((spike_times_from_ONE - spike_times_from_NWB) * 30000), 1) + + +def _check_raw_ephys_data(*, eid: str, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): + # data_one + pids, pnames_one = one.eid2pid(eid) + pidname_map = dict(zip(pnames_one, pids)) + pid = pidname_map[pname] + spike_sorting_loader = SpikeSortingLoader(pid=pid, one=one) + sglx_streamer = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + data_one = sglx_streamer._raw + + pname_to_imec = { + "probe00": "Imec0", + "probe01": "Imec1", + } + imec_to_pname = dict(zip(pname_to_imec.values(), pname_to_imec.keys())) + imecs = [key.split(band.upper())[1] for key in list(nwbfile.acquisition.keys()) if band.upper() in key] + pnames_nwb = [imec_to_pname[imec] for imec in imecs] + + assert set(pnames_one) == set(pnames_nwb) + + # nwb ephys data + imec = pname_to_imec[pname] + data_nwb = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].data + + # compare number of samples in both + n_samples_one = data_one.shape[0] + n_samples_nwb = data_nwb.shape[0] + + assert n_samples_nwb == n_samples_one + + # draw a random set of samples and check if they are equal in value + n_samples, n_channels = data_nwb.shape + + ix = np.column_stack( + [ + np.random.randint(n_samples, size=10), + np.random.randint(n_channels, size=10), + ] + ) + + samples_nwb = np.array([data_nwb[*i] for i in ix]) + samples_one = np.array([data_one[*i] for i in ix]) + np.testing.assert_array_equal(samples_nwb, samples_one) + + # check the time stamps + nwb_timestamps = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].timestamps[:] + + # from brainbox.io + brainbox_timestamps = spike_sorting_loader.samples2times(np.arange(0, sglx_streamer.ns), direction="forward") + np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) + + +def _check_raw_video_data(*, eid: str, one: ONE, nwbfile: NWBFile, nwbfile_path: str): + # timestamps + datasets = one.list_datasets(eid, "*Camera.times*", collection="alf") + cameras = [key for key in nwbfile.acquisition.keys() if key.endswith("Camera")] + for camera in cameras: + timestamps_nwb = nwbfile.acquisition[camera].timestamps[:] + + dataset = [dataset for dataset in datasets if camera.split("OriginalVideo")[1].lower() in dataset.lower()] + timestamps_one = one.load_dataset(eid, dataset) + np.testing.assert_array_equal(timestamps_nwb, timestamps_one) + + # values (the first 100 bytes) + datasets = one.list_datasets(eid, collection="raw_video_data") + cameras = [key for key in nwbfile.acquisition.keys() if key.endswith("Camera")] + + for camera in cameras: + cam = camera.split("OriginalVideo")[1].lower() + dataset = [dataset for dataset in datasets if cam in dataset.lower()] + one_video_path = one.load_dataset(eid, dataset) + with open(one_video_path, "rb") as fH: + one_video_bytes = fH.read(100) + + nwb_video_path = nwbfile_path.parent / Path(nwbfile.acquisition[camera].external_file[:][0]) + with open(nwb_video_path, "rb") as fH: + nwb_video_bytes = fH.read(100) + + assert one_video_bytes == nwb_video_bytes From 3ab8de39293b4f35102d4d2f0a2d9fa350818807 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 11 Dec 2024 14:02:48 +0000 Subject: [PATCH 05/50] revision argument in all datainterfaces --- ...inwide_map_processed_only_local_testing.py | 16 ++-- ...rt_brainwide_map_raw_only_local_testing.py | 77 ++++++++++--------- .../datainterfaces/_brainwide_map_trials.py | 5 +- .../datainterfaces/_ibl_sorting_extractor.py | 7 +- .../_ibl_streaming_interface.py | 3 +- src/ibl_to_nwb/datainterfaces/_lick_times.py | 5 +- .../datainterfaces/_pose_estimation.py | 21 ++--- .../datainterfaces/_pupil_tracking.py | 5 +- .../datainterfaces/_roi_motion_energy.py | 5 +- .../datainterfaces/_wheel_movement.py | 7 +- 10 files changed, 83 insertions(+), 68 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 3961870..bddd4bd 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -34,6 +34,7 @@ cleanup: bool = False # assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!" +revision = None nwbfile_path.parent.mkdir(exist_ok=True) @@ -52,32 +53,29 @@ # These interfaces should always be present in source data data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting")) -data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session)) -data_interfaces.append(WheelInterface(one=session_one, session=session)) +data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) +data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) # These interfaces may not be present; check if they are before adding to list pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - # IblPoseEstimationInterface( - # one=session_one, session=session, camera_name=camera_name, include_pose=True, include_video=False - # ) - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name) + IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) ) pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name)) + data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name)) + data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): - data_interfaces.append(LickInterface(one=session_one, session=session)) + data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) # Run conversion session_converter = BrainwideMapConverter( diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 2695525..3c162c8 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -1,20 +1,22 @@ +# %% from pathlib import Path - from one.api import ONE - from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -# session_id = "d32876dd-8303-4720-8e7e-20678dc2fd71" -session_id = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe -data_folder = Path( - "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" -) -spikeglx_source_folder_path = data_folder / "raw_ephys_data" +# eid = "d32876dd-8303-4720-8e7e-20678dc2fd71" +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe + +# %% +# one_cache_folder = '/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache' +# data_folder = Path( +# "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" +# ) +# spikeglx_source_folder_path = data_folder / "raw_ephys_data" # Specify the revision of the pose estimation data # Setting to 'None' will use whatever the latest released revision is -revision = None +# revision = None # base_path = Path("E:/IBL") base_path = Path.home() / "ibl_scratch" # local directory @@ -23,50 +25,55 @@ nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -one_cache_folder_path = base_path / "cache" -ibl_client = ONE( +# one_cache_folder_path = base_path / "cache" +one_cache_folder_path = "/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache" +one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", silent=True, cache_dir=one_cache_folder_path, ) -# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps data_interfaces = [] -# spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") -spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client, eid=session_id) -data_interfaces.append(spikeglx_subconverter) +# %% ephys +# session_folder = one.eid2path(eid) +# spikeglx_source_folder_path = session_folder / 'raw_ephys_data' -# # Raw video takes some special handling -# metadata_retrieval = BrainwideMapConverter(one=ibl_client, session=session_id, data_interfaces=[], verbose=False) -# subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] -# pose_estimation_files = ibl_client.list_datasets(eid=session_id, filename="*.dlc*") -# for pose_estimation_file in pose_estimation_files: -# camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") +# Specify the path to the SpikeGLX files on the server but use ONE API for timestamps +# spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) +# data_interfaces.append(spikeglx_subconverter) + + +# %% video +# Raw video takes some special handling +metadata_retrieval = BrainwideMapConverter(one=one, session=eid, data_interfaces=[], verbose=False) +subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] -# video_interface = RawVideoInterface( -# nwbfiles_folder_path=nwbfiles_folder_path, -# subject_id=subject_id, -# one=ibl_client, -# session=session_id, -# camera_name=camera_name, -# ) -# data_interfaces.append(video_interface) +pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") +for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + + video_interface = RawVideoInterface( + nwbfiles_folder_path=nwbfiles_folder_path, + subject_id=subject_id, + one=one, + session=eid, + camera_name=camera_name, + ) + data_interfaces.append(video_interface) # Run conversion -session_converter = BrainwideMapConverter( - one=ibl_client, session=session_id, data_interfaces=data_interfaces, verbose=False -) +session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=False) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] +metadata["NWBFile"]["eid"] = metadata["NWBFile"]["eid"] subject_id = metadata["Subject"]["subject_id"] subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{session_id}_desc-raw_ecephys+image.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-video.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -75,4 +82,4 @@ ) # TODO: add some kind of raw-specific check -# check_written_nwbfile_for_consistency(one=ibl_client, nwbfile_path=nwbfile_path) +# check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 723b7d5..c7594f7 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -9,9 +9,10 @@ class BrainwideMapTrialsInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session)[-1] if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -20,7 +21,7 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - trials = self.one.load_object(id=self.session, obj="trials", collection="alf") + trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) column_ordering = [ "choice", diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 38cbc7e..b698da3 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions @@ -28,6 +28,9 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): silent=True, cache_dir=cache_folder, ) + if revision is None: # latest + revision = one.list_revisions(session)[-1] + atlas = AllenAtlas() brain_regions = BrainRegions() @@ -45,7 +48,7 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None): for probe_name in probe_names: sorting_loader = SpikeSortingLoader(eid=session, one=one, pname=probe_name, atlas=atlas) sorting_loaders.update({probe_name: sorting_loader}) - spikes, clusters, channels = sorting_loader.load_spike_sorting() + spikes, clusters, channels = sorting_loader.load_spike_sorting(revision=revision) # cluster_ids.extend(list(np.array(clusters["metrics"]["cluster_id"]) + unit_id_per_probe_shift)) number_of_units = len(np.unique(spikes["clusters"])) cluster_ids.extend(list(np.arange(number_of_units).astype("int32") + unit_id_per_probe_shift)) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py index f8aac0b..5064633 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_streaming_interface.py @@ -81,7 +81,8 @@ def __init__(self, **kwargs): self.recording_extractor.set_property(key="ibl_y", values=ibl_coords[:, 1]) self.recording_extractor.set_property(key="ibl_z", values=ibl_coords[:, 2]) self.recording_extractor.set_property( # SpikeInterface refers to this as 'brain_area' - key="brain_area", values=list(channels["acronym"]) # NeuroConv remaps to 'location', a required field + key="brain_area", + values=list(channels["acronym"]), # NeuroConv remaps to 'location', a required field ) # Acronyms are symmetric, do not differentiate hemisphere self.recording_extractor.set_property( key="beryl_location", diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index 76f9a9e..375f854 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -7,12 +7,13 @@ class LickInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session)[-1] if revision is None else revision def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - licks = self.one.load_object(id=self.session, obj="licks", collection="alf") + licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision) lick_events_table = DynamicTable( name="LickTimes", diff --git a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py index abf30d3..94946b7 100644 --- a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py +++ b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py @@ -37,17 +37,18 @@ def __init__( self.revision = revision if self.revision is None: - session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*") - revision_datetime_format = "%Y-%m-%d" - revisions = [ - datetime.strptime(session_file.split("#")[1], revision_datetime_format) - for session_file in session_files - if "#" in session_file - ] + self.revision = one.list_revisions(session)[-1] + # session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*") + # revision_datetime_format = "%Y-%m-%d" + # revisions = [ + # datetime.strptime(session_file.split("#")[1], revision_datetime_format) + # for session_file in session_files + # if "#" in session_file + # ] - if any(revisions): - most_recent = max(revisions) - self.revision = most_recent.strftime("%Y-%m-%d") + # if any(revisions): + # most_recent = max(revisions) + # self.revision = most_recent.strftime("%Y-%m-%d") def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict) -> None: camera_data = self.one.load_object( diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index c307ef6..0477795 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -12,10 +12,11 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str): + def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): self.one = one self.session = session self.camera_name = camera_name + self.revision = one.list_revisions(session)[-1] if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -28,7 +29,7 @@ def get_metadata(self) -> dict: def add_to_nwbfile(self, nwbfile, metadata: dict): left_or_right = self.camera_name[:5].rstrip("C") - camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf") + camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf", revision=self.revision) pupil_time_series = list() for ibl_key in ["pupilDiameter_raw", "pupilDiameter_smooth"]: diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 4218647..8ea21d3 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -7,17 +7,18 @@ class RoiMotionEnergyInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str): + def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): self.one = one self.session = session self.camera_name = camera_name + self.revision = one.list_revisions(session)[-1] if revision is None else revision def add_to_nwbfile(self, nwbfile, metadata: dict): left_right_or_body = self.camera_name[:5].rstrip("C") camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf") motion_energy_video_region = self.one.load_object( - id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf" + id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf", revision=self.revision ) width, height, x, y = motion_energy_video_region["position"] diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 234b307..00ec234 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -11,9 +11,10 @@ class WheelInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str): + def __init__(self, one: ONE, session: str, revision: str | None = None): self.one = one self.session = session + self.revision = one.list_revisions(session) if revision is None else revision def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -23,8 +24,8 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): - wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf") - wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf") + wheel_moves = self.one.load_object(id=self.session, obj="wheelMoves", collection="alf", revision=self.revision) + wheel = self.one.load_object(id=self.session, obj="wheel", collection="alf", revision=self.revision) # Estimate velocity and acceleration interpolation_frequency = 1000.0 # Hz From 58ccd217f734c87327cc092e5a0c8f28c592b6ff Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 11 Dec 2024 14:18:42 +0000 Subject: [PATCH 06/50] cleanups --- ...inwide_map_processed_only_local_testing.py | 1 - ...rt_brainwide_map_raw_only_local_testing.py | 33 +++++--------- src/ibl_to_nwb/_scripts/download_data.py | 44 +++++++++++++++++++ 3 files changed, 54 insertions(+), 24 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/download_data.py diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index bddd4bd..836c8a8 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -24,7 +24,6 @@ from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency base_path = Path.home() / "ibl_scratch" # local directory -# session = "d32876dd-8303-4720-8e7e-20678dc2fd71" session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 3c162c8..37b4751 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -4,29 +4,18 @@ from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface -# eid = "d32876dd-8303-4720-8e7e-20678dc2fd71" -eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe +# select eid +# -> run download_data_local first with this eid to set up the local folder structure and one cache +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" -# %% -# one_cache_folder = '/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache' -# data_folder = Path( -# "/media/georg/openlab/Downloads/ONE/openalyx.internationalbrainlab.org/steinmetzlab/Subjects/NR_0031/2023-07-14/001" -# ) -# spikeglx_source_folder_path = data_folder / "raw_ephys_data" - -# Specify the revision of the pose estimation data -# Setting to 'None' will use whatever the latest released revision is -# revision = None - -# base_path = Path("E:/IBL") -base_path = Path.home() / "ibl_scratch" # local directory +# folders +base_path = Path.home() / "ibl_scratch" base_path.mkdir(exist_ok=True) nwbfiles_folder_path = base_path / "nwbfiles" nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -# one_cache_folder_path = base_path / "cache" -one_cache_folder_path = "/home/georg/ibl_scratch/ibl_conversion/caa5dddc-9290-4e27-9f5e-575ba3598614/cache" +one_cache_folder_path = base_path / 'ibl_conversion' / eid / 'cache' one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", @@ -37,14 +26,12 @@ data_interfaces = [] # %% ephys -# session_folder = one.eid2path(eid) -# spikeglx_source_folder_path = session_folder / 'raw_ephys_data' - +session_folder = one.eid2path(eid) +spikeglx_source_folder_path = session_folder / 'raw_ephys_data' # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps -# spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) -# data_interfaces.append(spikeglx_subconverter) - +spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) +data_interfaces.append(spikeglx_subconverter) # %% video # Raw video takes some special handling diff --git a/src/ibl_to_nwb/_scripts/download_data.py b/src/ibl_to_nwb/_scripts/download_data.py new file mode 100644 index 0000000..b0e07ca --- /dev/null +++ b/src/ibl_to_nwb/_scripts/download_data.py @@ -0,0 +1,44 @@ +# %% +from pathlib import Path +from one.api import ONE + +# %% +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM eid with dual probe + +base_path = Path.home() / "ibl_scratch" # local directory + +# Download behavior and spike sorted data for this eid +session_path = base_path / "ibl_conversion" / eid +cache_folder = base_path / "ibl_conversion" / eid / "cache" +session_one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + silent=False, + cache_dir=cache_folder, +) + +# %% latest revision +revisions = session_one.list_revisions(eid) +revision = revisions[-1] + +# %% list all datasets +datasets = session_one.list_datasets(eid) + +# %% list all collections +collections = session_one.list_collections(eid) + +# %% +for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) + +# %% downloads all raw ephys data! +collections = session_one.list_collections(eid, collection="raw_ephys_data/*") +for collection in collections: + datasets = session_one.list_datasets(eid, collection=collection) + for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) + +# %% just the video data +datasets = session_one.list_datasets(eid, collection="raw_video_data") +for dataset in datasets: + session_one.load_dataset(eid, dataset, download_only=True) From 5e17eeca245dbbd9b0408177b64d898908affc70 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 15:48:28 +0000 Subject: [PATCH 07/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- ..._brainwide_map_processed_only_local_testing.py | 15 ++++++++------- ...onvert_brainwide_map_raw_only_local_testing.py | 6 ++++-- src/ibl_to_nwb/_scripts/download_data.py | 1 + .../converters/_ibl_spikeglx_converter.py | 4 ++-- .../datainterfaces/_ibl_sorting_extractor.py | 2 +- src/ibl_to_nwb/datainterfaces/_pose_estimation.py | 1 - src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 4 +++- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 8 files changed, 20 insertions(+), 15 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..6a3da46 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -3,16 +3,14 @@ os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying import os - from pathlib import Path from shutil import rmtree + from one.api import ONE -from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.converters import BrainwideMapConverter from ibl_to_nwb.datainterfaces import ( BrainwideMapTrialsInterface, -) -from ibl_to_nwb.datainterfaces import ( IblPoseEstimationInterface, IblSortingInterface, LickInterface, @@ -20,7 +18,6 @@ RoiMotionEnergyInterface, WheelInterface, ) - from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency base_path = Path.home() / "ibl_scratch" # local directory @@ -66,12 +63,16 @@ pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append(PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) + data_interfaces.append( + PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + ) roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append(RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision)) + data_interfaces.append( + RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + ) if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 37b4751..904acdf 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -1,6 +1,8 @@ # %% from pathlib import Path + from one.api import ONE + from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface @@ -15,7 +17,7 @@ nwbfiles_folder_path.mkdir(exist_ok=True) # Initialize IBL (ONE) client to download processed data for this session -one_cache_folder_path = base_path / 'ibl_conversion' / eid / 'cache' +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", @@ -27,7 +29,7 @@ # %% ephys session_folder = one.eid2path(eid) -spikeglx_source_folder_path = session_folder / 'raw_ephys_data' +spikeglx_source_folder_path = session_folder / "raw_ephys_data" # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) diff --git a/src/ibl_to_nwb/_scripts/download_data.py b/src/ibl_to_nwb/_scripts/download_data.py index b0e07ca..93fd184 100644 --- a/src/ibl_to_nwb/_scripts/download_data.py +++ b/src/ibl_to_nwb/_scripts/download_data.py @@ -1,5 +1,6 @@ # %% from pathlib import Path + from one.api import ONE # %% diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index b52c74a..a9bf223 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -1,9 +1,9 @@ +import numpy as np +from brainbox.io.one import EphysSessionLoader, SpikeSortingLoader from neuroconv.converters import SpikeGLXConverterPipe from one.api import ONE from pydantic import DirectoryPath from pynwb import NWBFile -import numpy as np -from brainbox.io.one import SpikeSortingLoader, EphysSessionLoader class IblSpikeGlxConverter(SpikeGLXConverterPipe): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index b698da3..c7bb03f 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -28,7 +28,7 @@ def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, r silent=True, cache_dir=cache_folder, ) - if revision is None: # latest + if revision is None: # latest revision = one.list_revisions(session)[-1] atlas = AllenAtlas() diff --git a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py index 94946b7..5e0e49f 100644 --- a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py +++ b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py @@ -1,4 +1,3 @@ -from datetime import datetime from typing import Optional import numpy as np diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index 0477795..fb7b626 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -29,7 +29,9 @@ def get_metadata(self) -> dict: def add_to_nwbfile(self, nwbfile, metadata: dict): left_or_right = self.camera_name[:5].rstrip("C") - camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf", revision=self.revision) + camera_data = self.one.load_object( + id=self.session, obj=self.camera_name, collection="alf", revision=self.revision + ) pupil_time_series = list() for ibl_key in ["pupilDiameter_raw", "pupilDiameter_smooth"]: diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 7cc6811..dc922df 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np +from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile -from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): From 7999b4a81e66416d8da6d0fb4448467f320521c1 Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:06:20 -0600 Subject: [PATCH 08/50] add signature to sorting interface --- .../convert_brainwide_map_processed_only_local_testing.py | 3 +-- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 8 ++++++++ 3 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..29c991f 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -25,9 +25,8 @@ base_path = Path.home() / "ibl_scratch" # local directory session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe - nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" -nwbfile_path.parent.mkdir(exist_ok=True) +nwbfile_path.parent.mkdir(exist_ok=True, parents=True) stub_test: bool = False cleanup: bool = False diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index b698da3..dc7dc68 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str]=None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 18c478f..2d5401d 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -1,6 +1,8 @@ """The interface for loading spike sorted data via ONE access.""" from pathlib import Path +from typing import Optional +from pydantic import DirectoryPath from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import ( BaseSortingExtractorInterface, @@ -13,6 +15,7 @@ class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor + def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -27,3 +30,8 @@ def get_metadata(self) -> dict: ) return metadata + + + + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None, verbose: bool = False): + super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) \ No newline at end of file From c58da111c8a59a14fe3624797c348ac40b3321e1 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:08:47 +0000 Subject: [PATCH 09/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../datainterfaces/_ibl_sorting_extractor.py | 2 +- .../datainterfaces/_ibl_sorting_interface.py | 15 +++++++++------ 2 files changed, 10 insertions(+), 7 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index ad5777c..54e374c 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -16,7 +16,7 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str]=None): + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 2d5401d..8f81e4f 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -2,12 +2,12 @@ from pathlib import Path from typing import Optional -from pydantic import DirectoryPath from neuroconv.datainterfaces.ecephys.basesortingextractorinterface import ( BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file +from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor @@ -15,7 +15,6 @@ class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -31,7 +30,11 @@ def get_metadata(self) -> dict: return metadata - - - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None, verbose: bool = False): - super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) \ No newline at end of file + def __init__( + self, + session: str, + cache_folder: Optional[DirectoryPath] = None, + revision: Optional[str] = None, + verbose: bool = False, + ): + super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) From 6c805835c2842aac3a27351d3d3e14ba587c531a Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:30:08 -0600 Subject: [PATCH 10/50] fix typing --- src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py | 3 ++- src/ibl_to_nwb/datainterfaces/_lick_times.py | 4 +++- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index c7594f7..e82b548 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Optional from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface @@ -9,7 +10,7 @@ class BrainwideMapTrialsInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index 375f854..f94f71d 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -1,3 +1,5 @@ +from typing import Optional + from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module @@ -7,7 +9,7 @@ class LickInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 00ec234..f5420f5 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -1,4 +1,5 @@ from pathlib import Path +from typing import Optional from brainbox.behavior import wheel as wheel_methods from neuroconv.basedatainterface import BaseDataInterface @@ -11,7 +12,7 @@ class WheelInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session) if revision is None else revision From 9a1c01cd26804f56ce2439a1658d5397471f993a Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 09:48:31 -0600 Subject: [PATCH 11/50] fix more typing errors --- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 3 ++- src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py | 3 ++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index fb7b626..9b4901f 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -1,6 +1,7 @@ """Data Interface for the pupil tracking.""" from pathlib import Path +from typing import Optional import numpy as np from neuroconv.basedatainterface import BaseDataInterface @@ -12,7 +13,7 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str | None] = None): self.one = one self.session = session self.camera_name = camera_name diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 8ea21d3..0a40f2e 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -1,4 +1,5 @@ """Data Interface for the special data type of ROI Motion Energy.""" +from typing import Optional from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module @@ -7,7 +8,7 @@ class RoiMotionEnergyInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: str | None = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): self.one = one self.session = session self.camera_name = camera_name From 2b9c4bf96dc2cf9377de9d26e2acecd8aa9025e5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 13 Dec 2024 15:48:40 +0000 Subject: [PATCH 12/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index 0a40f2e..a3c6007 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -1,4 +1,5 @@ """Data Interface for the special data type of ROI Motion Energy.""" + from typing import Optional from neuroconv.basedatainterface import BaseDataInterface From 5f9d77e66b9863dde32cf60f06c97dd2b6a9180b Mon Sep 17 00:00:00 2001 From: Heberto Mayorquin Date: Fri, 13 Dec 2024 10:02:58 -0600 Subject: [PATCH 13/50] optional --- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index 9b4901f..c0c4972 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -13,7 +13,7 @@ class PupilTrackingInterface(BaseDataInterface): - def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str | None] = None): + def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): self.one = one self.session = session self.camera_name = camera_name From 05d29588be4c3b9964609471d2517bf8e171ee26 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 10:34:10 +0000 Subject: [PATCH 14/50] integration of mine and hebertos changes --- .../convert_brainwide_map_processed_only_local_testing.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 7 +++++-- src/ibl_to_nwb/testing/_consistency_checks.py | 3 +++ 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 836c8a8..be5c0ce 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -51,7 +51,7 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting")) +data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting", revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 18c478f..47d65af 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -8,11 +8,14 @@ from neuroconv.utils import load_dict_from_file from ._ibl_sorting_extractor import IblSortingExtractor - +from typing import Optional +from pydantic import DirectoryPath class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): + super().__init__(session=session, cache_folder=cache_folder, revision=revision) + def get_metadata(self) -> dict: metadata = super().get_metadata() diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 7cc6811..f3cc375 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -175,6 +175,9 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: units_table = nwbfile.units[:] probe_names = units_table["probe_name"].unique() + if revision is None: + revision = one.list_revisions(eid)[-1] + spike_times = {} spike_clusters = {} cluster_uuids = {} From e608b5dcee31d8a3bffbe91865e21ff2ce1344db Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 10:38:43 +0000 Subject: [PATCH 15/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index ea4407d..40fb865 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -10,14 +10,14 @@ from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor -from typing import Optional -from pydantic import DirectoryPath + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor + def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): super().__init__(session=session, cache_folder=cache_folder, revision=revision) - + def get_metadata(self) -> dict: metadata = super().get_metadata() From 22feb6a9424a8d02e6948b5b7ba5835899861e25 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 11:12:27 +0000 Subject: [PATCH 16/50] added automatic last revision to consistency checking --- src/ibl_to_nwb/testing/_consistency_checks.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index e129c33..6714772 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np -from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile +from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): @@ -176,7 +176,7 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: probe_names = units_table["probe_name"].unique() if revision is None: - revision = one.list_revisions(eid)[-1] + revision = one.list_revisions(session)[-1] spike_times = {} spike_clusters = {} From 76fc999f024bae786ad723b57e5e15d145a8e4fa Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 11:16:06 +0000 Subject: [PATCH 17/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 6714772..c9b4c83 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,11 +1,11 @@ from pathlib import Path import numpy as np +from brainbox.io.one import SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal from pynwb import NWBHDF5IO, NWBFile -from brainbox.io.one import SpikeSortingLoader def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): From 62d7a400143e23e8a978b96c8b57834c76f8eab5 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 17 Dec 2024 14:58:38 +0100 Subject: [PATCH 18/50] output path related fixes / cleanups --- ...inwide_map_processed_only_local_testing.py | 65 ++++++++++--------- ...rt_brainwide_map_raw_only_local_testing.py | 3 +- src/ibl_to_nwb/testing/_consistency_checks.py | 2 +- 3 files changed, 38 insertions(+), 32 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 8d9450d..f45f83a 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -20,10 +20,15 @@ ) from ibl_to_nwb.testing._consistency_checks import check_written_nwbfile_for_consistency -base_path = Path.home() / "ibl_scratch" # local directory -session = "caa5dddc-9290-4e27-9f5e-575ba3598614" # a BWM session with dual probe -nwbfile_path = base_path / "nwbfiles" / session / f"{session}.nwb" -nwbfile_path.parent.mkdir(exist_ok=True, parents=True) +# select eid +# -> run download_data_local first with this eid to set up the local folder structure and one cache +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + +# folders +base_path = Path.home() / "ibl_scratch" +base_path.mkdir(exist_ok=True) +nwbfiles_folder_path = base_path / "nwbfiles" +nwbfiles_folder_path.mkdir(exist_ok=True) stub_test: bool = False cleanup: bool = False @@ -31,58 +36,59 @@ # assert len(os.environ.get("DANDI_API_KEY", "")) > 0, "Run `export DANDI_API_KEY=...`!" revision = None -nwbfile_path.parent.mkdir(exist_ok=True) - -# Download behavior and spike sorted data for this session -session_path = base_path / "ibl_conversion" / session -cache_folder = base_path / "ibl_conversion" / session / "cache" -session_one = ONE( +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" +one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", - silent=False, - cache_dir=cache_folder, + silent=True, + cache_dir=one_cache_folder_path, ) # Initialize as many of each interface as we need across the streams data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=session, cache_folder=cache_folder / "sorting", revision=revision)) -data_interfaces.append(BrainwideMapTrialsInterface(one=session_one, session=session, revision=revision)) -data_interfaces.append(WheelInterface(one=session_one, session=session, revision=revision)) +data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) +data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) +data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) # These interfaces may not be present; check if they are before adding to list -pose_estimation_files = session_one.list_datasets(eid=session, filename="*.dlc*") +pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") data_interfaces.append( - IblPoseEstimationInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -pupil_tracking_files = session_one.list_datasets(eid=session, filename="*features*") +pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") data_interfaces.append( - PupilTrackingInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -roi_motion_energy_files = session_one.list_datasets(eid=session, filename="*ROIMotionEnergy.npy*") +roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") data_interfaces.append( - RoiMotionEnergyInterface(one=session_one, session=session, camera_name=camera_name, revision=revision) + RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) ) -if session_one.list_datasets(eid=session, collection="alf", filename="licks*"): - data_interfaces.append(LickInterface(one=session_one, session=session, revision=revision)) +if one.list_datasets(eid=eid, collection="alf", filename="licks*"): + data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) # Run conversion session_converter = BrainwideMapConverter( - one=session_one, session=session, data_interfaces=data_interfaces, verbose=True + one=one, session=eid, data_interfaces=data_interfaces, verbose=True ) metadata = session_converter.get_metadata() -metadata["NWBFile"]["session_id"] = metadata["NWBFile"]["session_id"] # + "-processed-only" +subject_id = metadata["Subject"]["subject_id"] + +subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" +subject_folder_path.mkdir(exist_ok=True) +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, @@ -94,8 +100,9 @@ # nwb_folder_path=nwbfile_path.parent, # cleanup=cleanup, # ) -if cleanup: - rmtree(cache_folder) - rmtree(nwbfile_path.parent) -check_written_nwbfile_for_consistency(one=session_one, nwbfile_path=nwbfile_path) +# if cleanup: +# rmtree(cache_folder) +# rmtree(nwbfile_path.parent) + +check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 904acdf..11d3c1a 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -57,12 +57,11 @@ session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=False) metadata = session_converter.get_metadata() -metadata["NWBFile"]["eid"] = metadata["NWBFile"]["eid"] subject_id = metadata["Subject"]["subject_id"] subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-video.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index c9b4c83..e129c33 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -176,7 +176,7 @@ def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: probe_names = units_table["probe_name"].unique() if revision is None: - revision = one.list_revisions(session)[-1] + revision = one.list_revisions(eid)[-1] spike_times = {} spike_clusters = {} From 4202759ace2b7bed0656e1473988bba73b11a59e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 13:58:55 +0000 Subject: [PATCH 19/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- ...inwide_map_processed_only_local_testing.py | 21 +++++++------------ 1 file changed, 7 insertions(+), 14 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index f45f83a..4fc8da9 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -4,7 +4,6 @@ import os from pathlib import Path -from shutil import rmtree from one.api import ONE @@ -49,7 +48,9 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) +data_interfaces.append( + IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision) +) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) @@ -57,31 +58,23 @@ pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") - data_interfaces.append( - IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") for pupil_tracking_file in pupil_tracking_files: camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") - data_interfaces.append( - PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") for roi_motion_energy_file in roi_motion_energy_files: camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") - data_interfaces.append( - RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) - ) + data_interfaces.append(RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) if one.list_datasets(eid=eid, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) # Run conversion -session_converter = BrainwideMapConverter( - one=one, session=eid, data_interfaces=data_interfaces, verbose=True -) +session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) metadata = session_converter.get_metadata() subject_id = metadata["Subject"]["subject_id"] From b640ee6d48025c06d71eec4df859ebd63ca4e8ac Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 10:54:44 +0100 Subject: [PATCH 20/50] attempting to pass one to IblSortingInterface - fails currently by pydantic --- .gitignore | 1 + ...inwide_map_processed_only_local_testing.py | 6 ++-- ...rt_brainwide_map_raw_only_local_testing.py | 6 +--- .../datainterfaces/_brainwide_map_trials.py | 12 +++++--- .../datainterfaces/_ibl_sorting_extractor.py | 29 +++++++++++-------- .../datainterfaces/_ibl_sorting_interface.py | 22 +++++++------- src/ibl_to_nwb/datainterfaces/_lick_times.py | 5 +++- .../datainterfaces/_pupil_tracking.py | 6 ++-- .../datainterfaces/_wheel_movement.py | 6 ++-- 9 files changed, 53 insertions(+), 40 deletions(-) diff --git a/.gitignore b/.gitignore index 213258b..44e36ed 100644 --- a/.gitignore +++ b/.gitignore @@ -134,3 +134,4 @@ dmypy.json #misc endpoint_schemas/ tests/ +src/local \ No newline at end of file diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 4fc8da9..40415ae 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -48,9 +48,9 @@ data_interfaces = list() # These interfaces should always be present in source data -data_interfaces.append( - IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision) -) +# data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) + +data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 11d3c1a..04f1422 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -63,11 +63,7 @@ subject_folder_path.mkdir(exist_ok=True) nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" -session_converter.run_conversion( - nwbfile_path=nwbfile_path, - metadata=metadata, - overwrite=True, -) +session_converter.run_conversion(nwbfile_path=nwbfile_path, metadata=metadata, overwrite=True) # TODO: add some kind of raw-specific check # check_written_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index e82b548..9b50398 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -7,6 +7,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals +from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): @@ -14,6 +15,8 @@ def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision + self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) + self.session_loader.load_trials() def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -22,7 +25,8 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) + # trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) + trials = self.session_loader.trials column_ordering = [ "choice", @@ -42,12 +46,12 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): VectorData( name="start_time", description="The beginning of the trial.", - data=trials["intervals"][:, 0], + data=trials["intervals_0"].values, ), VectorData( name="stop_time", description="The end of the trial.", - data=trials["intervals"][:, 1], + data=trials["intervals_1"].values, ), ] for ibl_key in column_ordering: @@ -55,7 +59,7 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): VectorData( name=metadata["Trials"][ibl_key]["name"], description=metadata["Trials"][ibl_key]["description"], - data=trials[ibl_key], + data=trials[ibl_key].values, ) ) nwbfile.add_time_intervals( diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 54e374c..760a3fb 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -7,6 +7,10 @@ import pandas as pd from pydantic import DirectoryPath from spikeinterface import BaseSorting, BaseSortingSegment +from one.api import ONE +from brainbox.io.one import SpikeSortingLoader +from iblatlas.atlas import AllenAtlas +from iblatlas.regions import BrainRegions class IblSortingExtractor(BaseSorting): @@ -16,18 +20,19 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): - from brainbox.io.one import SpikeSortingLoader - from iblatlas.atlas import AllenAtlas - from iblatlas.regions import BrainRegions - from one.api import ONE - - one = ONE( - base_url="https://openalyx.internationalbrainlab.org", - password="international", - silent=True, - cache_dir=cache_folder, - ) + # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): + def __init__( + self, + one: ONE, + session: str, + revision: Optional[str] = None, + ): + # one = ONE( + # base_url="https://openalyx.internationalbrainlab.org", + # password="international", + # silent=True, + # cache_dir=cache_folder, + # ) if revision is None: # latest revision = one.list_revisions(session)[-1] diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index 40fb865..a27471b 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -10,13 +10,21 @@ from pydantic import DirectoryPath from ._ibl_sorting_extractor import IblSortingExtractor - +from one.api import ONE class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor - def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision=None): - super().__init__(session=session, cache_folder=cache_folder, revision=revision) + def __init__( + self, + one: ONE, + session: str, + # cache_folder: Optional[DirectoryPath] = None, + revision: Optional[str] = None, + # verbose: bool = False, + ): + # super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) + super().__init__(one=one, session=session, revision=revision) def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -33,11 +41,3 @@ def get_metadata(self) -> dict: return metadata - def __init__( - self, - session: str, - cache_folder: Optional[DirectoryPath] = None, - revision: Optional[str] = None, - verbose: bool = False, - ): - super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f94f71d..f885a70 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,16 +6,19 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable - +from brainbox.io.one import SessionLoader class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision + # self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) + # self.session_loader.load_licks() def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision) + # licks = self.session_loader.licks lick_events_table = DynamicTable( name="LickTimes", diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index c0c4972..d8eb6bf 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -10,7 +10,7 @@ from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking - +from brainbox.io.one import SessionLoader class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): @@ -18,13 +18,15 @@ def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[ self.session = session self.camera_name = camera_name self.revision = one.list_revisions(session)[-1] if revision is None else revision + self.session_loader = SessionLoader(one=one, eid=session, revision=revision) + self.session_loader.load_pupil() def get_metadata(self) -> dict: metadata = super().get_metadata() pupils_metadata = load_dict_from_file(file_path=Path(__file__).parent.parent / "_metadata" / "pupils.yml") metadata.update(pupils_metadata) - + return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index f5420f5..31d8709 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -9,13 +9,15 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals - +from brainbox.io.one import SessionLoader class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session) if revision is None else revision + self.session_loader = SessionLoader(eid=session, one=one, revision=revision) + self.session_loader.load_wheel() def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -59,7 +61,7 @@ def add_to_nwbfile(self, nwbfile, metadata: dict): description=metadata["WheelPosition"]["description"], data=wheel["position"], timestamps=wheel["timestamps"], - unit="rad", + unit="radians", reference_frame="Initial angle at start time is zero. Counter-clockwise is positive.", ) ) From 6487907383d49c84c35cda43e3ba45fb3b44b514 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 09:55:23 +0000 Subject: [PATCH 21/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .gitignore | 2 +- src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py | 2 +- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 5 ++--- src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py | 5 ++--- src/ibl_to_nwb/datainterfaces/_lick_times.py | 2 +- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 5 +++-- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 7 files changed, 12 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 44e36ed..5ca4667 100644 --- a/.gitignore +++ b/.gitignore @@ -134,4 +134,4 @@ dmypy.json #misc endpoint_schemas/ tests/ -src/local \ No newline at end of file +src/local diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 9b50398..e0a5688 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional +from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 760a3fb..75cbbaa 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -5,12 +5,11 @@ import numpy as np import pandas as pd -from pydantic import DirectoryPath -from spikeinterface import BaseSorting, BaseSortingSegment -from one.api import ONE from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from one.api import ONE +from spikeinterface import BaseSorting, BaseSortingSegment class IblSortingExtractor(BaseSorting): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index a27471b..61f88b6 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -7,10 +7,10 @@ BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file -from pydantic import DirectoryPath +from one.api import ONE from ._ibl_sorting_extractor import IblSortingExtractor -from one.api import ONE + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor @@ -40,4 +40,3 @@ def get_metadata(self) -> dict: ) return metadata - diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f885a70..b971a13 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,7 +6,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable -from brainbox.io.one import SessionLoader + class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index d8eb6bf..5946c02 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -4,13 +4,14 @@ from typing import Optional import numpy as np +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking -from brainbox.io.one import SessionLoader + class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): @@ -26,7 +27,7 @@ def get_metadata(self) -> dict: pupils_metadata = load_dict_from_file(file_path=Path(__file__).parent.parent / "_metadata" / "pupils.yml") metadata.update(pupils_metadata) - + return metadata def add_to_nwbfile(self, nwbfile, metadata: dict): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 31d8709..42dd014 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -2,6 +2,7 @@ from typing import Optional from brainbox.behavior import wheel as wheel_methods +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -9,7 +10,7 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader + class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): From 45cbaca11f009332a94bb099c6e4d78eb46a607f Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 12:46:20 +0100 Subject: [PATCH 22/50] one instantiation removed in IblSortingInterface, but requires hack in neuroconv --- ...rt_brainwide_map_processed_only_local_testing.py | 7 ------- .../_scripts/convert_brainwide_map_raw_only.py | 2 ++ .../converters/_ibl_spikeglx_converter.py | 13 +++---------- .../datainterfaces/_brainwide_map_trials.py | 2 +- .../datainterfaces/_ibl_sorting_extractor.py | 12 +++--------- .../datainterfaces/_ibl_sorting_interface.py | 12 ++++-------- src/ibl_to_nwb/datainterfaces/_lick_times.py | 2 +- src/ibl_to_nwb/datainterfaces/_pupil_tracking.py | 3 ++- src/ibl_to_nwb/datainterfaces/_wheel_movement.py | 3 ++- 9 files changed, 18 insertions(+), 38 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 40415ae..d197a48 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -48,8 +48,6 @@ data_interfaces = list() # These interfaces should always be present in source data -# data_interfaces.append(IblSortingInterface(session=eid, cache_folder=one_cache_folder_path / "sorting", revision=revision)) - data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) @@ -88,11 +86,6 @@ metadata=metadata, overwrite=True, ) -# automatic_dandi_upload( -# dandiset_id="000409", -# nwb_folder_path=nwbfile_path.parent, -# cleanup=cleanup, -# ) # if cleanup: # rmtree(cache_folder) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py index ebde212..0c0ebf1 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only.py @@ -32,6 +32,8 @@ data_interfaces = [] # spikeglx_source_folder_path = Path("D:/example_data/ephy_testing_data/spikeglx/Noise4Sam_g0") +session_folder = ibl_client.eid2path(session_id) +spikeglx_source_folder_path = session_folder / "raw_ephys_data" spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=ibl_client) data_interfaces.append(spikeglx_subconverter) diff --git a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py index a9bf223..8e63c7f 100644 --- a/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py +++ b/src/ibl_to_nwb/converters/_ibl_spikeglx_converter.py @@ -10,7 +10,7 @@ class IblSpikeGlxConverter(SpikeGLXConverterPipe): def __init__(self, folder_path: DirectoryPath, one: ONE, eid: str) -> None: super().__init__(folder_path=folder_path) self.one = one - self.eid = eid # probably should better name this session_id ? + self.eid = eid def temporally_align_data_interfaces(self) -> None: """Align the raw data timestamps to the other data streams using the ONE API.""" @@ -22,21 +22,14 @@ def temporally_align_data_interfaces(self) -> None: } ephys_session_loader = EphysSessionLoader(one=self.one, eid=self.eid) - probes = ephys_session_loader.probes for probe_name, pid in ephys_session_loader.probes.items(): spike_sorting_loader = SpikeSortingLoader(pid=pid, one=self.one) probe_index = probe_to_imec_map[probe_name] for band in ["ap", "lf"]: recording_interface = self.data_interface_objects[f"imec{probe_index}.{band}"] - # recording_interface = next( - # interface - # for interface in self.data_interface_objects - # if f"imec{probe_index}.{band}" in interface.source_data["file_path"] - # ) - - band_info = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) - aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, band_info.ns), direction="forward") + sl = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + aligned_timestamps = spike_sorting_loader.samples2times(np.arange(0, sl.ns), direction="forward") recording_interface.set_aligned_timestamps(aligned_timestamps=aligned_timestamps) pass diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 9b50398..e0a5688 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional +from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 760a3fb..9d50966 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -5,12 +5,12 @@ import numpy as np import pandas as pd -from pydantic import DirectoryPath -from spikeinterface import BaseSorting, BaseSortingSegment -from one.api import ONE from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from one.api import ONE +from pydantic import DirectoryPath +from spikeinterface import BaseSorting, BaseSortingSegment class IblSortingExtractor(BaseSorting): @@ -27,12 +27,6 @@ def __init__( session: str, revision: Optional[str] = None, ): - # one = ONE( - # base_url="https://openalyx.internationalbrainlab.org", - # password="international", - # silent=True, - # cache_dir=cache_folder, - # ) if revision is None: # latest revision = one.list_revisions(session)[-1] diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py index a27471b..296b5b0 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_interface.py @@ -7,24 +7,21 @@ BaseSortingExtractorInterface, ) from neuroconv.utils import load_dict_from_file -from pydantic import DirectoryPath +from one.api import ONE from ._ibl_sorting_extractor import IblSortingExtractor -from one.api import ONE + class IblSortingInterface(BaseSortingExtractorInterface): Extractor = IblSortingExtractor def __init__( self, - one: ONE, session: str, - # cache_folder: Optional[DirectoryPath] = None, + one: ONE, revision: Optional[str] = None, - # verbose: bool = False, ): - # super().__init__(verbose, session=session, cache_folder=cache_folder, revision=revision) - super().__init__(one=one, session=session, revision=revision) + super().__init__(session=session, one=one, revision=revision) def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -40,4 +37,3 @@ def get_metadata(self) -> dict: ) return metadata - diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index f885a70..b971a13 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -6,7 +6,7 @@ from one.api import ONE from pynwb import NWBFile from pynwb.file import DynamicTable -from brainbox.io.one import SessionLoader + class LickInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index d8eb6bf..bd2cc95 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -4,13 +4,14 @@ from typing import Optional import numpy as np +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import TimeSeries from pynwb.behavior import PupilTracking -from brainbox.io.one import SessionLoader + class PupilTrackingInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[str] = None): diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 31d8709..42dd014 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -2,6 +2,7 @@ from typing import Optional from brainbox.behavior import wheel as wheel_methods +from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -9,7 +10,7 @@ from pynwb import TimeSeries from pynwb.behavior import CompassDirection, SpatialSeries from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader + class WheelInterface(BaseDataInterface): def __init__(self, one: ONE, session: str, revision: Optional[str] = None): From cff20a532dbc0daa4d6909b77633f9f83cb254e4 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 18 Dec 2024 15:56:36 +0100 Subject: [PATCH 23/50] for heberto --- ...ert_brainwide_map_processed_only_local_testing.py | 2 +- .../convert_brainwide_map_raw_only_local_testing.py | 2 +- .../datainterfaces/_ibl_sorting_extractor.py | 12 ++++++++++++ 3 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index d197a48..b3a4429 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -79,7 +79,7 @@ subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-processed_.nwb" session_converter.run_conversion( nwbfile_path=nwbfile_path, diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py index 04f1422..085b31b 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_raw_only_local_testing.py @@ -61,7 +61,7 @@ subject_folder_path = nwbfiles_folder_path / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) -nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" +nwbfile_path = subject_folder_path / f"sub-{subject_id}_ses-{eid}_desc-raw_ecephys+raw_video_.nwb" session_converter.run_conversion(nwbfile_path=nwbfile_path, metadata=metadata, overwrite=True) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 9a0b480..3602a79 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -10,6 +10,7 @@ from iblatlas.regions import BrainRegions from one.api import ONE from spikeinterface import BaseSorting, BaseSortingSegment +from neuroconv.utils import get_json_schema_from_method_signature class IblSortingExtractor(BaseSorting): @@ -19,6 +20,17 @@ class IblSortingExtractor(BaseSorting): installation_mesg = "" name = "iblsorting" + def get_source_schema(cls) -> dict: + """ + Infer the JSON schema for the source_data from the method signature (annotation typing). + + Returns + ------- + dict + The JSON schema for the source_data. + """ + return get_json_schema_from_method_signature(cls, exclude=["source_data", "one"]) + # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): def __init__( self, From 86144df71f26157ea9ac1dc9a02a8409b8f40b62 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 18 Dec 2024 14:57:19 +0000 Subject: [PATCH 24/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 3602a79..2f9749c 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -8,9 +8,9 @@ from brainbox.io.one import SpikeSortingLoader from iblatlas.atlas import AllenAtlas from iblatlas.regions import BrainRegions +from neuroconv.utils import get_json_schema_from_method_signature from one.api import ONE from spikeinterface import BaseSorting, BaseSortingSegment -from neuroconv.utils import get_json_schema_from_method_signature class IblSortingExtractor(BaseSorting): From 72a71177422af28bbef3fd2ebb6e68b1cfe9506e Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 19 Dec 2024 16:09:34 +0100 Subject: [PATCH 25/50] for sdsc --- .../_convert_brainwide_map_processed.py | 92 +++++++++++++++++++ ...inwide_map_processed_only_local_testing.py | 3 - src/ibl_to_nwb/testing/__init__.py | 2 +- src/ibl_to_nwb/testing/_consistency_checks.py | 12 ++- 4 files changed, 104 insertions(+), 5 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py new file mode 100644 index 0000000..39852a7 --- /dev/null +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -0,0 +1,92 @@ +import sys +from pathlib import Path + +from one.api import ONE + +from ibl_to_nwb.converters import BrainwideMapConverter +from ibl_to_nwb.datainterfaces import ( + BrainwideMapTrialsInterface, + IblPoseEstimationInterface, + IblSortingInterface, + LickInterface, + PupilTrackingInterface, + RoiMotionEnergyInterface, + WheelInterface, +) +from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency + + +def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): + # Run conversion + session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) + metadata = session_converter.get_metadata() + subject_id = metadata["Subject"]["subject_id"] + + subject_folder_path = output_folder / f"sub-{subject_id}" + subject_folder_path.mkdir(exist_ok=True) + if raw: + fname = f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" + else: + fname = f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" + + nwbfile_path = subject_folder_path / fname + session_converter.run_conversion( + nwbfile_path=nwbfile_path, + metadata=metadata, + overwrite=True, + ) + return nwbfile_path + + +if __name__ == "__main__": + eid = sys.argv[1] + + # path setup + base_path = Path.home() / "ibl_scratch" + output_folder = base_path / "nwbfiles" + output_folder.mkdir(exist_ok=True, parents=True) + + revision = "2024-07-10" + + # Initialize IBL (ONE) client to download processed data for this session + one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + silent=True, + cache_dir=one_cache_folder_path, + ) + + # Initialize as many of each interface as we need across the streams + data_interfaces = list() + + # These interfaces should always be present in source data + data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) + data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) + data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) + + # These interfaces may not be present; check if they are before adding to list + pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") + for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + data_interfaces.append( + IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) + ) + + pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") + for pupil_tracking_file in pupil_tracking_files: + camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") + data_interfaces.append(PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision)) + + roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") + for roi_motion_energy_file in roi_motion_energy_files: + camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") + data_interfaces.append( + RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) + ) + + if one.list_datasets(eid=eid, collection="alf", filename="licks*"): + data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) + + # check + check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index b3a4429..19b70e6 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -1,8 +1,5 @@ import os -os.environ["JUPYTER_PLATFORM_DIRS"] = "1" # Annoying - -import os from pathlib import Path from one.api import ONE diff --git a/src/ibl_to_nwb/testing/__init__.py b/src/ibl_to_nwb/testing/__init__.py index 516db43..4b7dc3f 100644 --- a/src/ibl_to_nwb/testing/__init__.py +++ b/src/ibl_to_nwb/testing/__init__.py @@ -1 +1 @@ -from ._consistency_checks import check_written_nwbfile_for_consistency +from ._consistency_checks import check_nwbfile_for_consistency, check_raw_nwbfile_for_consistency diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index e129c33..4c94136 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -8,7 +8,7 @@ from pynwb import NWBHDF5IO, NWBFile -def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): +def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): """ Check the processed-only NWB file for consistency with the equivalent calls to the ONE API. @@ -33,6 +33,16 @@ def check_written_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): _check_spike_sorting_data(eid=eid, nwbfile=nwbfile, one=one) +def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): + with NWBHDF5IO(path=nwbfile_path, mode="r") as io: + nwbfile = io.read() + eid = nwbfile.session_id + + # run checks for raw files + _check_raw_ephys_data(eid=eid, one=one, nwbfile=nwbfile) + _check_raw_video_data(eid=eid, one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) + + def _check_wheel_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): processing_module = nwbfile.processing["wheel"] wheel_position_series = processing_module.data_interfaces["CompassDirection"].spatial_series["WheelPositionSeries"] From 73899e579a40945adeaad69229dba8a1e6278963 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:10:09 +0000 Subject: [PATCH 26/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../convert_brainwide_map_processed_only_local_testing.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index 19b70e6..bda5fe0 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -1,4 +1,3 @@ -import os from pathlib import Path From 0c792fbe9b254c86e0a614485b3c939ad946a101 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 19 Dec 2024 16:13:35 +0100 Subject: [PATCH 27/50] fix for one local on sdsc --- .../_scripts/_convert_brainwide_map_processed.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 39852a7..1ad9cfd 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -1,7 +1,8 @@ import sys from pathlib import Path -from one.api import ONE +# from one.api import ONE +from deploy.iblsdsc import OneSdsc as ONE from ibl_to_nwb.converters import BrainwideMapConverter from ibl_to_nwb.datainterfaces import ( @@ -49,12 +50,13 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): revision = "2024-07-10" # Initialize IBL (ONE) client to download processed data for this session - one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + # one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", - silent=True, - cache_dir=one_cache_folder_path, + mode="local", + # silent=True, + # cache_dir=one_cache_folder_path, ) # Initialize as many of each interface as we need across the streams @@ -88,5 +90,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): if one.list_datasets(eid=eid, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) + nwbfile_path = convert(eid=eid, one=one, data_interfaces=data_interfaces, raw=False) + # check check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) From cafbbd59489bccf0228aed06dfd3cedb5aeac583 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Dec 2024 15:15:27 +0000 Subject: [PATCH 28/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- .../convert_brainwide_map_processed_only_local_testing.py | 1 - 1 file changed, 1 deletion(-) diff --git a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py index bda5fe0..7bac904 100644 --- a/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py +++ b/src/ibl_to_nwb/_scripts/convert_brainwide_map_processed_only_local_testing.py @@ -1,4 +1,3 @@ - from pathlib import Path from one.api import ONE From 9fad4ec7d4ab1cca4afe836784838aa8b07c4662 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 19 Dec 2024 16:15:53 +0100 Subject: [PATCH 29/50] sdsc fix --- src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 2f9749c..25b0513 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -29,7 +29,7 @@ def get_source_schema(cls) -> dict: dict The JSON schema for the source_data. """ - return get_json_schema_from_method_signature(cls, exclude=["source_data", "one"]) + return get_json_schema_from_method_signature(cls, exclude=["source_data", "one", "OneSdsc"]) # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): def __init__( From 86868dce99e60d1ce7bf22f60f59da27e918dd16 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Mon, 23 Dec 2024 10:49:17 +0100 Subject: [PATCH 30/50] revisions bugfix --- .../_convert_brainwide_map_processed.py | 25 +++++++++++++------ .../datainterfaces/_brainwide_map_trials.py | 5 +--- .../datainterfaces/_ibl_sorting_extractor.py | 2 +- 3 files changed, 20 insertions(+), 12 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 1ad9cfd..935e5ae 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -1,8 +1,9 @@ import sys +from datetime import datetime from pathlib import Path -# from one.api import ONE -from deploy.iblsdsc import OneSdsc as ONE +# from deploy.iblsdsc import OneSdsc as ONE +from one.api import ONE from ibl_to_nwb.converters import BrainwideMapConverter from ibl_to_nwb.datainterfaces import ( @@ -17,6 +18,15 @@ from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency +def get_last_before(eid: str, one: ONE, revision: str): + revisions = one.list_revisions(eid) + revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions[1:]] + revision = datetime.strptime(revision, "%Y-%m-%d") + revisions = sorted(revisions) + ix = sum([not (rev > revision) for rev in revisions]) + return revisions[ix] + + def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): # Run conversion session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) @@ -40,25 +50,26 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): if __name__ == "__main__": - eid = sys.argv[1] + # eid = sys.argv[1] + eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" # path setup base_path = Path.home() / "ibl_scratch" output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) - revision = "2024-07-10" - # Initialize IBL (ONE) client to download processed data for this session - # one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", mode="local", # silent=True, - # cache_dir=one_cache_folder_path, + cache_dir=one_cache_folder_path, ) + revision = get_last_before(eid=eid, one=one, revision="2024-07-10") + # Initialize as many of each interface as we need across the streams data_interfaces = list() diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index e0a5688..3c50242 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -15,8 +15,6 @@ def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision - self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) - self.session_loader.load_trials() def get_metadata(self) -> dict: metadata = super().get_metadata() @@ -25,8 +23,7 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - # trials = self.one.load_object(id=self.session, obj="trials", collection="alf", revision=self.revision) - trials = self.session_loader.trials + trials = self.one.load_dataset(self.session, "_ibl_trials.table.pqt", collection="alf", revision=self.revision) column_ordering = [ "choice", diff --git a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py index 25b0513..2f9749c 100644 --- a/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py +++ b/src/ibl_to_nwb/datainterfaces/_ibl_sorting_extractor.py @@ -29,7 +29,7 @@ def get_source_schema(cls) -> dict: dict The JSON schema for the source_data. """ - return get_json_schema_from_method_signature(cls, exclude=["source_data", "one", "OneSdsc"]) + return get_json_schema_from_method_signature(cls, exclude=["source_data", "one"]) # def __init__(self, session: str, cache_folder: Optional[DirectoryPath] = None, revision: Optional[str] = None): def __init__( From fcab8a9d14d89df8420c45d9690cd1d8e8673396 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Dec 2024 09:49:27 +0000 Subject: [PATCH 31/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py | 1 - src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py | 1 - 2 files changed, 2 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 935e5ae..4a89473 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -1,4 +1,3 @@ -import sys from datetime import datetime from pathlib import Path diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 3c50242..a41280a 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,7 +1,6 @@ from pathlib import Path from typing import Optional -from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file From 0b930d7b83c061f88654b64ec364d289c969d201 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 8 Jan 2025 13:14:39 +0000 Subject: [PATCH 32/50] updates with revision hack: session_id = eid:revision --- .gitignore | 3 +- .../_metadata/brainwide_map_general.yml | 2 +- .../_convert_brainwide_map_processed.py | 27 ++-- .../_scripts/_convert_brainwide_map_raw.py | 89 +++++++++++ .../_scripts/post_conversion_check.py | 36 +++++ .../_scripts/post_conversion_check_nwbfile.py | 31 ++++ .../datainterfaces/_brainwide_map_trials.py | 6 +- src/ibl_to_nwb/datainterfaces/_lick_times.py | 8 +- .../datainterfaces/_pose_estimation.py | 11 -- .../datainterfaces/_pupil_tracking.py | 3 - .../datainterfaces/_roi_motion_energy.py | 6 +- .../datainterfaces/_wheel_movement.py | 3 - src/ibl_to_nwb/testing/_consistency_checks.py | 140 ++++++++++-------- 13 files changed, 259 insertions(+), 106 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py create mode 100644 src/ibl_to_nwb/_scripts/post_conversion_check.py create mode 100644 src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py diff --git a/.gitignore b/.gitignore index 5ca4667..daff4cb 100644 --- a/.gitignore +++ b/.gitignore @@ -134,4 +134,5 @@ dmypy.json #misc endpoint_schemas/ tests/ -src/local +src/ibl_to_nwb/local/ +.vscode diff --git a/src/ibl_to_nwb/_metadata/brainwide_map_general.yml b/src/ibl_to_nwb/_metadata/brainwide_map_general.yml index 1c6dc83..ec9ba5e 100644 --- a/src/ibl_to_nwb/_metadata/brainwide_map_general.yml +++ b/src/ibl_to_nwb/_metadata/brainwide_map_general.yml @@ -9,4 +9,4 @@ NWBFile: Subject: description: | Mice were housed under a 12/12 h light/dark cycle (normal or inverted depending on the laboratory) with food and water 112 available ad libitum, except during behavioural training days. Electrophysiological recordings and behavioural training were performed during either the dark or light phase of the subject cycle depending on the laboratory. Subjects were obtained from either the Jackson Laboratory or Charles River. - strain: C57BL/6 + strain: C57BL/6 \ No newline at end of file diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 935e5ae..8d5137f 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -1,4 +1,3 @@ -import sys from datetime import datetime from pathlib import Path @@ -15,30 +14,27 @@ RoiMotionEnergyInterface, WheelInterface, ) -from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency def get_last_before(eid: str, one: ONE, revision: str): - revisions = one.list_revisions(eid) - revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions[1:]] + revisions = one.list_revisions(eid, revision="*") + revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] revision = datetime.strptime(revision, "%Y-%m-%d") revisions = sorted(revisions) - ix = sum([not (rev > revision) for rev in revisions]) - return revisions[ix] + ix = sum([not (rev > revision) for rev in revisions]) - 1 + return revisions[ix].strftime("%Y-%m-%d") -def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): +def convert(eid: str, one: ONE, data_interfaces: list, revision: str): # Run conversion session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) metadata = session_converter.get_metadata() + metadata["NWBFile"]["session_id"] = f'{eid}:{revision}' # FIXME this hack has to go subject_id = metadata["Subject"]["subject_id"] subject_folder_path = output_folder / f"sub-{subject_id}" subject_folder_path.mkdir(exist_ok=True) - if raw: - fname = f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" - else: - fname = f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" + fname = f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" nwbfile_path = subject_folder_path / fname session_converter.run_conversion( @@ -63,7 +59,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): one = ONE( base_url="https://openalyx.internationalbrainlab.org", password="international", - mode="local", + mode="remote", # silent=True, cache_dir=one_cache_folder_path, ) @@ -78,7 +74,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) - # These interfaces may not be present; check if they are before adding to list + # # These interfaces may not be present; check if they are before adding to list pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") for pose_estimation_file in pose_estimation_files: camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") @@ -101,7 +97,4 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool): if one.list_datasets(eid=eid, collection="alf", filename="licks*"): data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) - nwbfile_path = convert(eid=eid, one=one, data_interfaces=data_interfaces, raw=False) - - # check - check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) + nwbfile_path = convert(eid=eid, one=one, data_interfaces=data_interfaces, revision=revision) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py new file mode 100644 index 0000000..f5a8272 --- /dev/null +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py @@ -0,0 +1,89 @@ +from datetime import datetime +from pathlib import Path + +# from deploy.iblsdsc import OneSdsc as ONE +from one.api import ONE + +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import RawVideoInterface + + +def get_last_before(eid: str, one: ONE, revision: str): + revisions = one.list_revisions(eid, revision="*") + revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] + revision = datetime.strptime(revision, "%Y-%m-%d") + revisions = sorted(revisions) + ix = sum([not (rev > revision) for rev in revisions]) - 1 + return revisions[ix].strftime("%Y-%m-%d") + + +def convert(eid: str, one: ONE, data_interfaces: list, raw: bool, revision: str): + # Run conversion + session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) + metadata = session_converter.get_metadata() + metadata["NWBFile"]["session_id"] = f'{eid}:{revision}' # FIXME this hack has to go + subject_id = metadata["Subject"]["subject_id"] + + subject_folder_path = output_folder / f"sub-{subject_id}" + subject_folder_path.mkdir(exist_ok=True) + fname = f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" + + nwbfile_path = subject_folder_path / fname + session_converter.run_conversion( + nwbfile_path=nwbfile_path, + metadata=metadata, + overwrite=True, + ) + return nwbfile_path + + +if __name__ == "__main__": + # eid = sys.argv[1] + eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + + # path setup + base_path = Path.home() / "ibl_scratch" + output_folder = base_path / "nwbfiles" + output_folder.mkdir(exist_ok=True, parents=True) + + # Initialize IBL (ONE) client to download processed data for this session + one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + mode="remote", + # silent=True, + cache_dir=one_cache_folder_path, + ) + + revision = get_last_before(eid=eid, one=one, revision="2024-07-10") + + # Initialize as many of each interface as we need across the streams + data_interfaces = list() + + # ephys + session_folder = one.eid2path(eid) + spikeglx_source_folder_path = session_folder / "raw_ephys_data" + + # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps + spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) + data_interfaces.append(spikeglx_subconverter) + + # video + metadata_retrieval = BrainwideMapConverter(one=one, session=eid, data_interfaces=[], verbose=False) + subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] + + pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") + for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + + video_interface = RawVideoInterface( + nwbfiles_folder_path=output_folder, + subject_id=subject_id, + one=one, + session=eid, + camera_name=camera_name, + ) + data_interfaces.append(video_interface) + + nwbfile_path = convert(eid=eid, one=one, data_interfaces=data_interfaces, raw=False, revision=revision) diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check.py b/src/ibl_to_nwb/_scripts/post_conversion_check.py new file mode 100644 index 0000000..80d9b2a --- /dev/null +++ b/src/ibl_to_nwb/_scripts/post_conversion_check.py @@ -0,0 +1,36 @@ +from one.api import ONE +from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency +from pathlib import Path + +nwbfile_path = "" + +# eid = sys.argv[1] +eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + +# path setup +base_path = Path.home() / "ibl_scratch" +output_folder = base_path / "nwbfiles" +output_folder.mkdir(exist_ok=True, parents=True) + +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" +one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + # mode="local", + mode="remote", + # silent=True, + cache_dir=one_cache_folder_path, +) + +subject_id = one.eid2ref(eid)["subject"] + +subject_folder_path = output_folder / f"sub-{subject_id}" +subject_folder_path.mkdir(exist_ok=True) +# if raw: +# fname = f"sub-{subject_id}_ses-{eid}_desc-raw.nwb" +# else: +fname = f"sub-{subject_id}_ses-{eid}_desc-processed.nwb" + +nwbfile_path = subject_folder_path / fname +check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py new file mode 100644 index 0000000..76bb7ea --- /dev/null +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -0,0 +1,31 @@ +# %% +from pathlib import Path + +from one.api import ONE +from pynwb import NWBHDF5IO + +from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency + +# path setup +nwbfile_path = Path("/home/georg/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-processed-debug.nwb") +nwbfile = NWBHDF5IO.read_nwb(nwbfile_path) + +eid, revision = nwbfile.session_id.split(':') # this is the hack that has to be removed eventually + +# path setup +base_path = Path.home() / "ibl_scratch" +output_folder = base_path / "nwbfiles" +output_folder.mkdir(exist_ok=True, parents=True) + +# %% +# Initialize IBL (ONE) client to download processed data for this session +one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" +one = ONE( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + mode="remote", + cache_dir=one_cache_folder_path, +) + +check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) +# %% diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index 3c50242..f66aca7 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional -from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals +from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): @@ -23,7 +23,9 @@ def get_metadata(self) -> dict: return metadata def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - trials = self.one.load_dataset(self.session, "_ibl_trials.table.pqt", collection="alf", revision=self.revision) + session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) + session_loader.load_trials() + trials = session_loader.trials column_ordering = [ "choice", diff --git a/src/ibl_to_nwb/datainterfaces/_lick_times.py b/src/ibl_to_nwb/datainterfaces/_lick_times.py index b971a13..e38e310 100644 --- a/src/ibl_to_nwb/datainterfaces/_lick_times.py +++ b/src/ibl_to_nwb/datainterfaces/_lick_times.py @@ -13,12 +13,10 @@ def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session)[-1] if revision is None else revision - # self.session_loader = SessionLoader(one=self.one, eid=self.session, revision=self.revision) - # self.session_loader.load_licks() def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - licks = self.one.load_object(id=self.session, obj="licks", collection="alf", revision=self.revision) - # licks = self.session_loader.licks + # licks = self.one.load_object(id=self.session, obj="licks", collection="alf") + licks = self.one.load_dataset(self.session, "licks.times", collection="alf", revision=self.revision) lick_events_table = DynamicTable( name="LickTimes", @@ -30,7 +28,7 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): VectorData( name="lick_time", description="Time stamps of licks as detected from tongue dlc traces", - data=licks["times"], + data=licks, ) ], ) diff --git a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py index 5e0e49f..4686199 100644 --- a/src/ibl_to_nwb/datainterfaces/_pose_estimation.py +++ b/src/ibl_to_nwb/datainterfaces/_pose_estimation.py @@ -37,17 +37,6 @@ def __init__( self.revision = revision if self.revision is None: self.revision = one.list_revisions(session)[-1] - # session_files = self.one.list_datasets(eid=self.session, filename=f"*{self.camera_name}.dlc*") - # revision_datetime_format = "%Y-%m-%d" - # revisions = [ - # datetime.strptime(session_file.split("#")[1], revision_datetime_format) - # for session_file in session_files - # if "#" in session_file - # ] - - # if any(revisions): - # most_recent = max(revisions) - # self.revision = most_recent.strftime("%Y-%m-%d") def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict) -> None: camera_data = self.one.load_object( diff --git a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py index 5946c02..c0c4972 100644 --- a/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py +++ b/src/ibl_to_nwb/datainterfaces/_pupil_tracking.py @@ -4,7 +4,6 @@ from typing import Optional import numpy as np -from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -19,8 +18,6 @@ def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[ self.session = session self.camera_name = camera_name self.revision = one.list_revisions(session)[-1] if revision is None else revision - self.session_loader = SessionLoader(one=one, eid=session, revision=revision) - self.session_loader.load_pupil() def get_metadata(self) -> dict: metadata = super().get_metadata() diff --git a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py index a3c6007..13c5c22 100644 --- a/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py +++ b/src/ibl_to_nwb/datainterfaces/_roi_motion_energy.py @@ -18,9 +18,11 @@ def __init__(self, one: ONE, session: str, camera_name: str, revision: Optional[ def add_to_nwbfile(self, nwbfile, metadata: dict): left_right_or_body = self.camera_name[:5].rstrip("C") - camera_data = self.one.load_object(id=self.session, obj=self.camera_name, collection="alf") + camera_data = self.one.load_object( + id=self.session, obj=self.camera_name, collection="alf", revision=self.revision + ) motion_energy_video_region = self.one.load_object( - id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf", revision=self.revision + id=self.session, obj=f"{left_right_or_body}ROIMotionEnergy", collection="alf" ) width, height, x, y = motion_energy_video_region["position"] diff --git a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py index 42dd014..cefa455 100644 --- a/src/ibl_to_nwb/datainterfaces/_wheel_movement.py +++ b/src/ibl_to_nwb/datainterfaces/_wheel_movement.py @@ -2,7 +2,6 @@ from typing import Optional from brainbox.behavior import wheel as wheel_methods -from brainbox.io.one import SessionLoader from neuroconv.basedatainterface import BaseDataInterface from neuroconv.tools.nwb_helpers import get_module from neuroconv.utils import load_dict_from_file @@ -17,8 +16,6 @@ def __init__(self, one: ONE, session: str, revision: Optional[str] = None): self.one = one self.session = session self.revision = one.list_revisions(session) if revision is None else revision - self.session_loader = SessionLoader(eid=session, one=one, revision=revision) - self.session_loader.load_wheel() def get_metadata(self) -> dict: metadata = super().get_metadata() diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 4c94136..0318bba 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -1,7 +1,7 @@ from pathlib import Path import numpy as np -from brainbox.io.one import SpikeSortingLoader +from brainbox.io.one import SessionLoader, SpikeSortingLoader from numpy.testing import assert_array_equal, assert_array_less from one.api import ONE from pandas.testing import assert_frame_equal @@ -9,41 +9,31 @@ def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): - """ - Check the processed-only NWB file for consistency with the equivalent calls to the ONE API. - - Parameters - ---------- - one : ONE - Initialized ONE client. - nwbfile_path : Path - Path to the NWB file. - """ with NWBHDF5IO(path=nwbfile_path, mode="r") as io: nwbfile = io.read() - eid = nwbfile.session_id # run all consistentcy checks - _check_wheel_data(eid=eid, nwbfile=nwbfile, one=one) - _check_lick_data(eid=eid, nwbfile=nwbfile, one=one) - _check_roi_motion_energy_data(eid=eid, nwbfile=nwbfile, one=one) - _check_pose_estimation_data(eid=eid, nwbfile=nwbfile, one=one) - _check_trials_data(eid=eid, nwbfile=nwbfile, one=one) - _check_pupil_tracking_data(eid=eid, nwbfile=nwbfile, one=one) - _check_spike_sorting_data(eid=eid, nwbfile=nwbfile, one=one) + _check_wheel_data(nwbfile=nwbfile, one=one) + _check_lick_data(nwbfile=nwbfile, one=one) + _check_roi_motion_energy_data(nwbfile=nwbfile, one=one) + _check_pose_estimation_data(nwbfile=nwbfile, one=one) + _check_trials_data(nwbfile=nwbfile, one=one) + _check_pupil_tracking_data(nwbfile=nwbfile, one=one) + _check_spike_sorting_data(nwbfile=nwbfile, one=one) def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): with NWBHDF5IO(path=nwbfile_path, mode="r") as io: nwbfile = io.read() - eid = nwbfile.session_id + eid, revision = nwbfile.session_id.split(':') # run checks for raw files _check_raw_ephys_data(eid=eid, one=one, nwbfile=nwbfile) _check_raw_video_data(eid=eid, one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) -def _check_wheel_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): +def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): + eid, revision = nwbfile.session_id.split(':') processing_module = nwbfile.processing["wheel"] wheel_position_series = processing_module.data_interfaces["CompassDirection"].spatial_series["WheelPositionSeries"] wheel_movement_table = processing_module.data_interfaces["WheelMovementIntervals"][:] @@ -69,17 +59,21 @@ def _check_wheel_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = N assert_array_equal(x=data_from_ONE, y=data_from_NWB) -def _check_lick_data(*, eid: str, one: ONE, nwbfile: NWBFile): +def _check_lick_data(*, one: ONE, nwbfile: NWBFile): + eid, revision = nwbfile.session_id.split(':') + processing_module = nwbfile.processing["camera"] lick_times_table = processing_module.data_interfaces["LickTimes"][:] data_from_NWB = lick_times_table["lick_time"].values - data_from_ONE = one.load_dataset(eid, "licks.times") + data_from_ONE = one.load_dataset(eid, "licks.times", revision=revision) assert_array_equal(x=data_from_ONE, y=data_from_NWB) -def _check_roi_motion_energy_data(*, eid: str, one: ONE, nwbfile: NWBFile): +def _check_roi_motion_energy_data(*, one: ONE, nwbfile: NWBFile): processing_module = nwbfile.processing["camera"] + eid, revision = nwbfile.session_id.split(':') + camera_views = ["body", "left", "right"] for view in camera_views: @@ -87,17 +81,19 @@ def _check_roi_motion_energy_data(*, eid: str, one: ONE, nwbfile: NWBFile): # data data_from_NWB = camera_motion_energy.data[:] - data_from_ONE = one.load_dataset(eid, f"{view}Camera.ROIMotionEnergy", collection="alf") + data_from_ONE = one.load_dataset(eid, f"{view}Camera.ROIMotionEnergy", collection="alf", revision=revision) assert_array_equal(x=data_from_ONE, y=data_from_NWB) # timestamps data_from_NWB = camera_motion_energy.timestamps[:] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.times", collection="alf") + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.times", collection="alf", revision=revision) assert_array_equal(x=data_from_ONE, y=data_from_NWB) -def _check_pose_estimation_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): +def _check_pose_estimation_data(*, one: ONE, nwbfile: NWBFile): processing_module = nwbfile.processing["camera"] + eid, revision = nwbfile.session_id.split(':') + camera_views = ["body", "left", "right"] for view in camera_views: @@ -107,32 +103,43 @@ def _check_pose_estimation_data(*, eid: str, one: ONE, nwbfile: NWBFile, revisio for node in nodes: # x data_from_NWB = pose_estimation_container.pose_estimation_series[node].data[:][:, 0] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf")[f"{node}_x"].values + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf", revision=revision)[ + f"{node}_x" + ].values assert_array_equal(x=data_from_ONE, y=data_from_NWB) # y data_from_NWB = pose_estimation_container.pose_estimation_series[node].data[:][:, 1] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf")[f"{node}_y"].values + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf", revision=revision)[ + f"{node}_y" + ].values assert_array_equal(x=data_from_ONE, y=data_from_NWB) # confidence data_from_NWB = pose_estimation_container.pose_estimation_series[node].confidence[:] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf")[ + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.dlc.pqt", collection="alf", revision=revision)[ f"{node}_likelihood" ].values assert_array_equal(x=data_from_ONE, y=data_from_NWB) # timestamps data_from_NWB = pose_estimation_container.pose_estimation_series[node].timestamps[:] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.times", collection="alf") + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.times", collection="alf", revision=revision) assert_array_equal(x=data_from_ONE, y=data_from_NWB) -def _check_trials_data(*, eid: str, one: ONE, nwbfile: NWBFile): - data_from_NWB = nwbfile.trials[:] - data_from_ONE = one.load_dataset(eid, "_ibl_trials.table", collection="alf") - data_from_ONE["stimOff_times"] = one.load_dataset(eid, "_ibl_trials.stimOff_times", collection="alf") - data_from_ONE.index.name = "id" +def _check_trials_data(*, one: ONE, nwbfile: NWBFile): + eid, revision = nwbfile.session_id.split(':') + + + data_from_NWB = nwbfile.trials[:].reset_index(drop=True) + session_loader = SessionLoader(one=one, eid=eid, revision=revision) + session_loader.load_trials() + data_from_ONE = session_loader.trials.reset_index(drop=True) + + # data_from_ONE = one.load_dataset(eid, "_ibl_trials.table", collection="alf") + # data_from_ONE["stimOff_times"] = one.load_dataset(eid, "_ibl_trials.stimOff_times", collection="alf") + # data_from_ONE.index.name = "id" naming_map = { "start_time": "intervals_0", @@ -158,7 +165,9 @@ def _check_trials_data(*, eid: str, one: ONE, nwbfile: NWBFile): assert_frame_equal(left=data_from_NWB, right=data_from_ONE) -def _check_pupil_tracking_data(*, eid: str, one: ONE, nwbfile: NWBFile): +def _check_pupil_tracking_data(*, one: ONE, nwbfile: NWBFile): + eid, revision = nwbfile.session_id.split(':') + processing_module = nwbfile.processing["camera"] camera_views = ["left", "right"] @@ -167,30 +176,34 @@ def _check_pupil_tracking_data(*, eid: str, one: ONE, nwbfile: NWBFile): # raw data_from_NWB = pupil_tracking_container.time_series[f"{view.capitalize()}RawPupilDiameter"].data[:] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.features.pqt", collection="alf")[ + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.features.pqt", collection="alf", revision=revision)[ "pupilDiameter_raw" ].values assert_array_equal(x=data_from_ONE, y=data_from_NWB) # smooth data_from_NWB = pupil_tracking_container.time_series[f"{view.capitalize()}SmoothedPupilDiameter"].data[:] - data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.features.pqt", collection="alf")[ + data_from_ONE = one.load_dataset(eid, f"_ibl_{view}Camera.features.pqt", collection="alf", revision=revision)[ "pupilDiameter_smooth" ].values assert_array_equal(x=data_from_ONE, y=data_from_NWB) -def _check_spike_sorting_data(*, eid: str, one: ONE, nwbfile: NWBFile, revision: str = None): - units_table = nwbfile.units[:] - probe_names = units_table["probe_name"].unique() +def _check_spike_sorting_data(*, one: ONE, nwbfile: NWBFile): + eid, revision = nwbfile.session_id.split(':') - if revision is None: - revision = one.list_revisions(eid)[-1] + pids, probe_names = one.eid2pid(eid) + pids = dict(zip(probe_names, pids)) - spike_times = {} - spike_clusters = {} - cluster_uuids = {} + units_table = nwbfile.units[:] + # probe_names = units_table["probe_name"].unique() + + # spike_times = {} + # spike_clusters = {} + # cluster_uuids = {} + spikes = {} + clusters = {} # for fast spike extraction def get_spikes_for_cluster(spike_clusters, spike_times, cluster): @@ -198,25 +211,26 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): start_ix, stop_ix = np.searchsorted(spike_clusters, [cluster, cluster + 1]) return np.sort(spike_times[start_ix:stop_ix]) - # get and prep data once + # get and prep data for probe_name in probe_names: - collection = f"alf/{probe_name}/pykilosort" - spike_times[probe_name] = one.load_dataset(eid, "spikes.times", collection=collection, revision=revision) - spike_clusters[probe_name] = one.load_dataset(eid, "spikes.clusters", collection=collection, revision=revision) - cluster_uuids[probe_name] = one.load_dataset(eid, "clusters.uuids", collection=collection, revision=revision) + spike_sorting_loader = SpikeSortingLoader(pid=pids[probe_name], one=one) + spikes_, clusters_, _ = spike_sorting_loader.load_spike_sorting(revision=revision) + spikes[probe_name] = spikes_ + clusters[probe_name] = clusters_ # pre-sort for fast access - sort_ix = np.argsort(spike_clusters[probe_name]) - spike_clusters[probe_name] = spike_clusters[probe_name][sort_ix] - spike_times[probe_name] = spike_times[probe_name][sort_ix] + sort_ix = np.argsort(spikes[probe_name]['clusters']) + spikes[probe_name]['times'] = spikes[probe_name]['times'][sort_ix] + spikes[probe_name]['clusters'] = spikes[probe_name]['clusters'][sort_ix] for ix in units_table.index: - probe_name = units_table.loc[ix, "probe_name"] - uuid = units_table.loc[ix, "cluster_uuid"] + probe_name, uuid = units_table.loc[ix, ["probe_name", "cluster_uuid"]] + assert uuid in clusters[probe_name]['uuids'].values spike_times_from_NWB = units_table.loc[ix, "spike_times"] - cluster_id = np.where(cluster_uuids[probe_name] == uuid)[0][0] - spike_times_from_ONE = get_spikes_for_cluster(spike_clusters[probe_name], spike_times[probe_name], cluster_id) + cluster_id = np.where(clusters[probe_name]['uuids'] == uuid)[0][0] + spikes[probe_name]['clusters'] + spike_times_from_ONE = get_spikes_for_cluster(spikes[probe_name]['clusters'], spikes[probe_name]['times'], cluster_id) # more verbose but slower for more than ~20 checks # spike_times_from_ONE = spike_times[probe_name][spike_clusters[probe_name] == cluster_id] @@ -225,7 +239,9 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): assert_array_less(np.max((spike_times_from_ONE - spike_times_from_NWB) * 30000), 1) -def _check_raw_ephys_data(*, eid: str, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): +def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): + eid, revision = nwbfile.session_id.split(':') + # data_one pids, pnames_one = one.eid2pid(eid) pidname_map = dict(zip(pnames_one, pids)) @@ -276,7 +292,9 @@ def _check_raw_ephys_data(*, eid: str, one: ONE, nwbfile: NWBFile, pname: str = np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) -def _check_raw_video_data(*, eid: str, one: ONE, nwbfile: NWBFile, nwbfile_path: str): +def _check_raw_video_data(*, one: ONE, nwbfile: NWBFile, nwbfile_path: str): + eid, revision = nwbfile.session_id.split(':') + # timestamps datasets = one.list_datasets(eid, "*Camera.times*", collection="alf") cameras = [key for key in nwbfile.acquisition.keys() if key.endswith("Camera")] From 631dec9cf46e22206958d4a78ff6c9fa9d109dd1 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 8 Jan 2025 13:15:08 +0000 Subject: [PATCH 33/50] ruff happiness --- .../_convert_brainwide_map_processed.py | 2 +- .../_scripts/_convert_brainwide_map_raw.py | 2 +- .../_scripts/post_conversion_check.py | 4 +- .../_scripts/post_conversion_check_nwbfile.py | 6 ++- .../datainterfaces/_brainwide_map_trials.py | 2 +- src/ibl_to_nwb/testing/_consistency_checks.py | 45 +++++++++---------- 6 files changed, 32 insertions(+), 29 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 8d5137f..8071cb9 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -29,7 +29,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str): # Run conversion session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) metadata = session_converter.get_metadata() - metadata["NWBFile"]["session_id"] = f'{eid}:{revision}' # FIXME this hack has to go + metadata["NWBFile"]["session_id"] = f"{eid}:{revision}" # FIXME this hack has to go subject_id = metadata["Subject"]["subject_id"] subject_folder_path = output_folder / f"sub-{subject_id}" diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py index f5a8272..c3626f0 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py @@ -21,7 +21,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool, revision: str) # Run conversion session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) metadata = session_converter.get_metadata() - metadata["NWBFile"]["session_id"] = f'{eid}:{revision}' # FIXME this hack has to go + metadata["NWBFile"]["session_id"] = f"{eid}:{revision}" # FIXME this hack has to go subject_id = metadata["Subject"]["subject_id"] subject_folder_path = output_folder / f"sub-{subject_id}" diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check.py b/src/ibl_to_nwb/_scripts/post_conversion_check.py index 80d9b2a..bcdad28 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check.py @@ -1,6 +1,8 @@ +from pathlib import Path + from one.api import ONE + from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency -from pathlib import Path nwbfile_path = "" diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index 76bb7ea..c923f46 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -7,10 +7,12 @@ from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency # path setup -nwbfile_path = Path("/home/georg/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-processed-debug.nwb") +nwbfile_path = Path( + "/home/georg/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-processed-debug.nwb" +) nwbfile = NWBHDF5IO.read_nwb(nwbfile_path) -eid, revision = nwbfile.session_id.split(':') # this is the hack that has to be removed eventually +eid, revision = nwbfile.session_id.split(":") # this is the hack that has to be removed eventually # path setup base_path = Path.home() / "ibl_scratch" diff --git a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py index f66aca7..ebd3b09 100644 --- a/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py +++ b/src/ibl_to_nwb/datainterfaces/_brainwide_map_trials.py @@ -1,13 +1,13 @@ from pathlib import Path from typing import Optional +from brainbox.io.one import SessionLoader from hdmf.common import VectorData from neuroconv.basedatainterface import BaseDataInterface from neuroconv.utils import load_dict_from_file from one.api import ONE from pynwb import NWBFile from pynwb.epoch import TimeIntervals -from brainbox.io.one import SessionLoader class BrainwideMapTrialsInterface(BaseDataInterface): diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 0318bba..247fd4f 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -25,7 +25,7 @@ def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): with NWBHDF5IO(path=nwbfile_path, mode="r") as io: nwbfile = io.read() - eid, revision = nwbfile.session_id.split(':') + eid, revision = nwbfile.session_id.split(":") # run checks for raw files _check_raw_ephys_data(eid=eid, one=one, nwbfile=nwbfile) @@ -33,7 +33,7 @@ def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): - eid, revision = nwbfile.session_id.split(':') + eid, revision = nwbfile.session_id.split(":") processing_module = nwbfile.processing["wheel"] wheel_position_series = processing_module.data_interfaces["CompassDirection"].spatial_series["WheelPositionSeries"] wheel_movement_table = processing_module.data_interfaces["WheelMovementIntervals"][:] @@ -60,8 +60,8 @@ def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): def _check_lick_data(*, one: ONE, nwbfile: NWBFile): - eid, revision = nwbfile.session_id.split(':') - + eid, revision = nwbfile.session_id.split(":") + processing_module = nwbfile.processing["camera"] lick_times_table = processing_module.data_interfaces["LickTimes"][:] @@ -72,8 +72,7 @@ def _check_lick_data(*, one: ONE, nwbfile: NWBFile): def _check_roi_motion_energy_data(*, one: ONE, nwbfile: NWBFile): processing_module = nwbfile.processing["camera"] - eid, revision = nwbfile.session_id.split(':') - + eid, revision = nwbfile.session_id.split(":") camera_views = ["body", "left", "right"] for view in camera_views: @@ -92,8 +91,7 @@ def _check_roi_motion_energy_data(*, one: ONE, nwbfile: NWBFile): def _check_pose_estimation_data(*, one: ONE, nwbfile: NWBFile): processing_module = nwbfile.processing["camera"] - eid, revision = nwbfile.session_id.split(':') - + eid, revision = nwbfile.session_id.split(":") camera_views = ["body", "left", "right"] for view in camera_views: @@ -129,8 +127,7 @@ def _check_pose_estimation_data(*, one: ONE, nwbfile: NWBFile): def _check_trials_data(*, one: ONE, nwbfile: NWBFile): - eid, revision = nwbfile.session_id.split(':') - + eid, revision = nwbfile.session_id.split(":") data_from_NWB = nwbfile.trials[:].reset_index(drop=True) session_loader = SessionLoader(one=one, eid=eid, revision=revision) @@ -166,8 +163,8 @@ def _check_trials_data(*, one: ONE, nwbfile: NWBFile): def _check_pupil_tracking_data(*, one: ONE, nwbfile: NWBFile): - eid, revision = nwbfile.session_id.split(':') - + eid, revision = nwbfile.session_id.split(":") + processing_module = nwbfile.processing["camera"] camera_views = ["left", "right"] @@ -191,9 +188,9 @@ def _check_pupil_tracking_data(*, one: ONE, nwbfile: NWBFile): def _check_spike_sorting_data(*, one: ONE, nwbfile: NWBFile): - eid, revision = nwbfile.session_id.split(':') + eid, revision = nwbfile.session_id.split(":") - pids, probe_names = one.eid2pid(eid) + pids, probe_names = one.eid2pid(eid) pids = dict(zip(probe_names, pids)) units_table = nwbfile.units[:] @@ -219,18 +216,20 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): clusters[probe_name] = clusters_ # pre-sort for fast access - sort_ix = np.argsort(spikes[probe_name]['clusters']) - spikes[probe_name]['times'] = spikes[probe_name]['times'][sort_ix] - spikes[probe_name]['clusters'] = spikes[probe_name]['clusters'][sort_ix] + sort_ix = np.argsort(spikes[probe_name]["clusters"]) + spikes[probe_name]["times"] = spikes[probe_name]["times"][sort_ix] + spikes[probe_name]["clusters"] = spikes[probe_name]["clusters"][sort_ix] for ix in units_table.index: probe_name, uuid = units_table.loc[ix, ["probe_name", "cluster_uuid"]] - assert uuid in clusters[probe_name]['uuids'].values + assert uuid in clusters[probe_name]["uuids"].values spike_times_from_NWB = units_table.loc[ix, "spike_times"] - cluster_id = np.where(clusters[probe_name]['uuids'] == uuid)[0][0] - spikes[probe_name]['clusters'] - spike_times_from_ONE = get_spikes_for_cluster(spikes[probe_name]['clusters'], spikes[probe_name]['times'], cluster_id) + cluster_id = np.where(clusters[probe_name]["uuids"] == uuid)[0][0] + spikes[probe_name]["clusters"] + spike_times_from_ONE = get_spikes_for_cluster( + spikes[probe_name]["clusters"], spikes[probe_name]["times"], cluster_id + ) # more verbose but slower for more than ~20 checks # spike_times_from_ONE = spike_times[probe_name][spike_clusters[probe_name] == cluster_id] @@ -240,7 +239,7 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): - eid, revision = nwbfile.session_id.split(':') + eid, revision = nwbfile.session_id.split(":") # data_one pids, pnames_one = one.eid2pid(eid) @@ -293,7 +292,7 @@ def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band def _check_raw_video_data(*, one: ONE, nwbfile: NWBFile, nwbfile_path: str): - eid, revision = nwbfile.session_id.split(':') + eid, revision = nwbfile.session_id.split(":") # timestamps datasets = one.list_datasets(eid, "*Camera.times*", collection="alf") From 225a0f8d99944a875dfcb5eb2ad13d1b8daa20c6 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 8 Jan 2025 13:16:58 +0000 Subject: [PATCH 34/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/_metadata/brainwide_map_general.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/_metadata/brainwide_map_general.yml b/src/ibl_to_nwb/_metadata/brainwide_map_general.yml index ec9ba5e..1c6dc83 100644 --- a/src/ibl_to_nwb/_metadata/brainwide_map_general.yml +++ b/src/ibl_to_nwb/_metadata/brainwide_map_general.yml @@ -9,4 +9,4 @@ NWBFile: Subject: description: | Mice were housed under a 12/12 h light/dark cycle (normal or inverted depending on the laboratory) with food and water 112 available ad libitum, except during behavioural training days. Electrophysiological recordings and behavioural training were performed during either the dark or light phase of the subject cycle depending on the laboratory. Subjects were obtained from either the Jackson Laboratory or Charles River. - strain: C57BL/6 \ No newline at end of file + strain: C57BL/6 From 497ba319f4cf066e6f502292d27158093bd44801 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 8 Jan 2025 14:19:11 +0000 Subject: [PATCH 35/50] one scripts to convert both raw and processed, and also to to run on SDSC and locally --- .../_scripts/_convert_brainwide_map.py | 155 ++++++++++++++++++ .../_convert_brainwide_map_processed.py | 14 +- .../_scripts/_convert_brainwide_map_raw.py | 14 +- 3 files changed, 175 insertions(+), 8 deletions(-) create mode 100644 src/ibl_to_nwb/_scripts/_convert_brainwide_map.py diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py new file mode 100644 index 0000000..9be14b9 --- /dev/null +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -0,0 +1,155 @@ +import os +import sys +from datetime import datetime +from pathlib import Path + +# if running on SDSC, use the OneSdsc, else normal +if "USE_SDSC_ONE" in os.environ: + from deploy.iblsdsc import OneSdsc as ONE +else: + from one.api import ONE + +from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter +from ibl_to_nwb.datainterfaces import ( + BrainwideMapTrialsInterface, + IblPoseEstimationInterface, + IblSortingInterface, + LickInterface, + PupilTrackingInterface, + RawVideoInterface, + RoiMotionEnergyInterface, + WheelInterface, +) + + +def get_last_before(eid: str, one: ONE, revision: str): + revisions = one.list_revisions(eid, revision="*") + revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] + revision = datetime.strptime(revision, "%Y-%m-%d") + revisions = sorted(revisions) + ix = sum([not (rev > revision) for rev in revisions]) - 1 + return revisions[ix].strftime("%Y-%m-%d") + + +def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str): + # Run conversion + session_converter = BrainwideMapConverter(one=one, session=eid, data_interfaces=data_interfaces, verbose=True) + metadata = session_converter.get_metadata() + metadata["NWBFile"]["session_id"] = f"{eid}:{revision}" # FIXME this hack has to go + subject_id = metadata["Subject"]["subject_id"] + + subject_folder_path = output_folder / f"sub-{subject_id}" + subject_folder_path.mkdir(exist_ok=True) + if mode == "raw": + fname = f"sub-{subject_id}_ses-{eid}_desc-raw_ecephys+image.nwb" + if mode == "processed": + fname = f"sub-{subject_id}_ses-{eid}_desc-processed_behavior+ecephys.nwb" + + nwbfile_path = subject_folder_path / fname + session_converter.run_conversion( + nwbfile_path=nwbfile_path, + metadata=metadata, + overwrite=True, + ) + return nwbfile_path + + +if __name__ == "__main__": + if len(sys.argv) == 1: + eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + mode = "raw" + else: + eid = sys.argv[1] + mode = sys.argv[2] # raw or processed + + # path setup + base_path = Path.home() / "ibl_scratch" + output_folder = base_path / "nwbfiles" + output_folder.mkdir(exist_ok=True, parents=True) + + one_kwargs = dict( + base_url="https://openalyx.internationalbrainlab.org", + password="international", + mode="remote", + ) + + # if not running on SDSC adding the cache folder explicitly + if "USE_SDSC_ONE" not in os.environ: + # Initialize IBL (ONE) client to download processed data for this session + one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + one_kwargs["cache_dir"] = one_cache_folder_path + + # instantiate one + one = ONE(**one_kwargs) + + # correct revision + revision = get_last_before(eid=eid, one=one, revision="2024-07-10") + + # Initialize as many of each interface as we need across the streams + data_interfaces = [] + + if mode == "raw": + # ephys + session_folder = one.eid2path(eid) + spikeglx_source_folder_path = session_folder / "raw_ephys_data" + + # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps + spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) + data_interfaces.append(spikeglx_subconverter) + + # video + metadata_retrieval = BrainwideMapConverter(one=one, session=eid, data_interfaces=[], verbose=False) + subject_id = metadata_retrieval.get_metadata()["Subject"]["subject_id"] + + pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") + for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + + video_interface = RawVideoInterface( + nwbfiles_folder_path=output_folder, + subject_id=subject_id, + one=one, + session=eid, + camera_name=camera_name, + ) + data_interfaces.append(video_interface) + + if mode == "processed": + # These interfaces should always be present in source data + data_interfaces.append(IblSortingInterface(one=one, session=eid, revision=revision)) + data_interfaces.append(BrainwideMapTrialsInterface(one=one, session=eid, revision=revision)) + data_interfaces.append(WheelInterface(one=one, session=eid, revision=revision)) + + # # These interfaces may not be present; check if they are before adding to list + pose_estimation_files = one.list_datasets(eid=eid, filename="*.dlc*") + for pose_estimation_file in pose_estimation_files: + camera_name = pose_estimation_file.replace("alf/_ibl_", "").replace(".dlc.pqt", "") + data_interfaces.append( + IblPoseEstimationInterface(one=one, session=eid, camera_name=camera_name, revision=revision) + ) + + pupil_tracking_files = one.list_datasets(eid=eid, filename="*features*") + for pupil_tracking_file in pupil_tracking_files: + camera_name = pupil_tracking_file.replace("alf/_ibl_", "").replace(".features.pqt", "") + data_interfaces.append( + PupilTrackingInterface(one=one, session=eid, camera_name=camera_name, revision=revision) + ) + + roi_motion_energy_files = one.list_datasets(eid=eid, filename="*ROIMotionEnergy.npy*") + for roi_motion_energy_file in roi_motion_energy_files: + camera_name = roi_motion_energy_file.replace("alf/", "").replace(".ROIMotionEnergy.npy", "") + data_interfaces.append( + RoiMotionEnergyInterface(one=one, session=eid, camera_name=camera_name, revision=revision) + ) + + if one.list_datasets(eid=eid, collection="alf", filename="licks*"): + data_interfaces.append(LickInterface(one=one, session=eid, revision=revision)) + + # run the conversion + nwbfile_path = convert( + eid=eid, + one=one, + data_interfaces=data_interfaces, + revision=revision, + mode=mode, + ) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py index 8071cb9..0853d9b 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_processed.py @@ -1,8 +1,12 @@ +import os +import sys from datetime import datetime from pathlib import Path -# from deploy.iblsdsc import OneSdsc as ONE -from one.api import ONE +if "USE_SDSC_ONE" in os.envion: + from deploy.iblsdsc import OneSdsc as ONE +else: + from one.api import ONE from ibl_to_nwb.converters import BrainwideMapConverter from ibl_to_nwb.datainterfaces import ( @@ -46,8 +50,10 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str): if __name__ == "__main__": - # eid = sys.argv[1] - eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + if len(sys.argv) == 1: + eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + else: + eid = sys.argv[1] # path setup base_path = Path.home() / "ibl_scratch" diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py index c3626f0..e16180b 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map_raw.py @@ -1,8 +1,12 @@ +import os +import sys from datetime import datetime from pathlib import Path -# from deploy.iblsdsc import OneSdsc as ONE -from one.api import ONE +if "USE_SDSC_ONE" in os.envion: + from deploy.iblsdsc import OneSdsc as ONE +else: + from one.api import ONE from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter from ibl_to_nwb.datainterfaces import RawVideoInterface @@ -38,8 +42,10 @@ def convert(eid: str, one: ONE, data_interfaces: list, raw: bool, revision: str) if __name__ == "__main__": - # eid = sys.argv[1] - eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + if len(sys.argv) == 1: + eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" + else: + eid = sys.argv[1] # path setup base_path = Path.home() / "ibl_scratch" From b72fd85088cb0fbeabb5c611359d9a4329aa9a27 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Wed, 8 Jan 2025 14:27:57 +0000 Subject: [PATCH 36/50] rest caching disabled --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 9be14b9..f141886 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -67,6 +67,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) + # common one_kwargs = dict( base_url="https://openalyx.internationalbrainlab.org", password="international", @@ -74,7 +75,9 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) ) # if not running on SDSC adding the cache folder explicitly - if "USE_SDSC_ONE" not in os.environ: + if "USE_SDSC_ONE" in os.environ: + one_kwargs["cache_rest"] = None # disables rest caching (write permission errors on popeye) + else: # Initialize IBL (ONE) client to download processed data for this session one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one_kwargs["cache_dir"] = one_cache_folder_path From a8811a6a1cf585501ff8108752ee99e220f9e622 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 14:55:06 +0000 Subject: [PATCH 37/50] decompression and cleaup added --- .../_scripts/_convert_brainwide_map.py | 31 +++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index f141886..9c3bccb 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -2,11 +2,15 @@ import sys from datetime import datetime from pathlib import Path +import spikeglx +import shutil # if running on SDSC, use the OneSdsc, else normal if "USE_SDSC_ONE" in os.environ: + print("using SDSC ONE") from deploy.iblsdsc import OneSdsc as ONE else: + print("using regular ONE") from one.api import ONE from ibl_to_nwb.converters import BrainwideMapConverter, IblSpikeGlxConverter @@ -54,6 +58,8 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) return nwbfile_path +cleanup = False + if __name__ == "__main__": if len(sys.argv) == 1: eid = "caa5dddc-9290-4e27-9f5e-575ba3598614" @@ -62,6 +68,9 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) eid = sys.argv[1] mode = sys.argv[2] # raw or processed + print(eid) + print(mode) + # path setup base_path = Path.home() / "ibl_scratch" output_folder = base_path / "nwbfiles" @@ -96,6 +105,19 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) session_folder = one.eid2path(eid) spikeglx_source_folder_path = session_folder / "raw_ephys_data" + # check and decompress + # get paths + cbin_paths = [] + for root, dirs, files in os.walk(spikeglx_source_folder_path): + for file in files: + if file.endswith(".cbin"): + cbin_paths.append(Path(root) / file) + + for path in cbin_paths: + if not path.with_suffix(".bin").exists(): + print(f"decompressing {path}") + spikeglx.reader(path).decompress_to_scratch() + # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) data_interfaces.append(spikeglx_subconverter) @@ -156,3 +178,12 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) revision=revision, mode=mode, ) + + # cleanup + if cleanup: + if mode == "raw": + for path in cbin_paths: + bin_path = path.with_suffix(".bin") + if bin_path.exists(): + print(f"removing {bin_path}") + os.remove(bin_path) From fed267951dcb9722f41b7076d6b9e73ceb2eb0a5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 9 Jan 2025 14:55:17 +0000 Subject: [PATCH 38/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 9c3bccb..bcf1bb4 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -2,8 +2,8 @@ import sys from datetime import datetime from pathlib import Path + import spikeglx -import shutil # if running on SDSC, use the OneSdsc, else normal if "USE_SDSC_ONE" in os.environ: From b894d6a14817a815dfd94b16eb774923ec36804c Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 16:51:47 +0000 Subject: [PATCH 39/50] decompression to scratch added --- .../_scripts/_convert_brainwide_map.py | 30 ++++++++++++++----- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 9c3bccb..a2b7ec3 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -26,6 +26,21 @@ ) +def create_symlinks(source_dir, target_dir): + """replicates the tree under source_dir at target dir in the form of symlinks""" + for root, dirs, files in os.walk(source_dir): + for dir in dirs: + folder = target_dir / (Path(root) / dir).relative_to(source_dir) + folder.mkdir(parents=True, exist_ok=True) + + for root, dirs, files in os.walk(source_dir): + for file in files: + source_file_path = Path(root) / file + target_file_path = target_dir / source_file_path.relative_to(source_dir) + if not target_file_path.exists(): + target_file_path.symlink_to(source_file_path) + + def get_last_before(eid: str, one: ONE, revision: str): revisions = one.list_revisions(eid, revision="*") revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] @@ -75,6 +90,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) base_path = Path.home() / "ibl_scratch" output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) + local_scratch_folder = base_path / eid # common one_kwargs = dict( @@ -105,10 +121,12 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) session_folder = one.eid2path(eid) spikeglx_source_folder_path = session_folder / "raw_ephys_data" + # create symlinks at local scratch + create_symlinks(spikeglx_source_folder_path, local_scratch_folder) + # check and decompress - # get paths cbin_paths = [] - for root, dirs, files in os.walk(spikeglx_source_folder_path): + for root, dirs, files in os.walk(local_scratch_folder): for file in files: if file.endswith(".cbin"): cbin_paths.append(Path(root) / file) @@ -116,7 +134,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) for path in cbin_paths: if not path.with_suffix(".bin").exists(): print(f"decompressing {path}") - spikeglx.reader(path).decompress_to_scratch() + spikeglx.Reader(path).decompress_to_scratch() # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) @@ -182,8 +200,4 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) # cleanup if cleanup: if mode == "raw": - for path in cbin_paths: - bin_path = path.with_suffix(".bin") - if bin_path.exists(): - print(f"removing {bin_path}") - os.remove(bin_path) + shutil.rmtree(local_scratch_folder) From cf853a2c770a346e2c79623c8c2cc6ffa3af066e Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 16:52:46 +0000 Subject: [PATCH 40/50] ruff --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 30b4136..902ac2a 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -1,4 +1,5 @@ import os +import shutil import sys from datetime import datetime from pathlib import Path From 221f6b71747b0b4ff8a4f43440127a5615fce2ca Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 17:04:30 +0000 Subject: [PATCH 41/50] removing uuids from filenames of symbolic links --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 902ac2a..b69c73e 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -27,7 +27,7 @@ ) -def create_symlinks(source_dir, target_dir): +def create_symlinks(source_dir, target_dir, remove_uuid=True): """replicates the tree under source_dir at target dir in the form of symlinks""" for root, dirs, files in os.walk(source_dir): for dir in dirs: @@ -36,6 +36,10 @@ def create_symlinks(source_dir, target_dir): for root, dirs, files in os.walk(source_dir): for file in files: + if remove_uuid: + parts = file.split(".") + parts.remove(parts[-2]) + file = "".join(parts) source_file_path = Path(root) / file target_file_path = target_dir / source_file_path.relative_to(source_dir) if not target_file_path.exists(): From ca472a102a61de00b6b9f442e79f7787f501421f Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 17:18:33 +0000 Subject: [PATCH 42/50] cleanup updated --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 1 + 1 file changed, 1 insertion(+) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index b69c73e..2525696 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -205,4 +205,5 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) # cleanup if cleanup: if mode == "raw": + os.system(f"find {local_scratch_folder} -type l -exec unlink {{}} \;") shutil.rmtree(local_scratch_folder) From b74359d6f541956e94483da1d2e490371a233e17 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Thu, 9 Jan 2025 17:26:09 +0000 Subject: [PATCH 43/50] bugfix in symlink creater (uuid removal) --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 2525696..fab3a20 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -36,12 +36,13 @@ def create_symlinks(source_dir, target_dir, remove_uuid=True): for root, dirs, files in os.walk(source_dir): for file in files: - if remove_uuid: - parts = file.split(".") - parts.remove(parts[-2]) - file = "".join(parts) source_file_path = Path(root) / file target_file_path = target_dir / source_file_path.relative_to(source_dir) + if remove_uuid: + parent, name = target_file_path.parent, target_file_path.name + name_parts = name.split(".") + name_parts.remove(name_parts[-2]) + target_file_path = parent / ".".join(name_parts) if not target_file_path.exists(): target_file_path.symlink_to(source_file_path) From 3a3d323ff196bd7258e18d95d31cf8567aa28562 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Mon, 13 Jan 2025 10:12:09 +0000 Subject: [PATCH 44/50] post conversion check update and bugfix --- .../_scripts/_convert_brainwide_map.py | 2 +- .../_scripts/post_conversion_check_nwbfile.py | 17 ++++++++++------- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index fab3a20..b2dfb7c 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -143,7 +143,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) spikeglx.Reader(path).decompress_to_scratch() # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps - spikeglx_subconverter = IblSpikeGlxConverter(folder_path=spikeglx_source_folder_path, one=one, eid=eid) + spikeglx_subconverter = IblSpikeGlxConverter(folder_path=local_scratch_folder, one=one, eid=eid) data_interfaces.append(spikeglx_subconverter) # video diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index c923f46..cb13292 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -1,15 +1,16 @@ # %% +import sys from pathlib import Path from one.api import ONE from pynwb import NWBHDF5IO -from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency +from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency, check_raw_nwbfile_for_consistency + +nwbfile_path = sys.argv[1] +if "raw" in nwbfile_path: + raw = True -# path setup -nwbfile_path = Path( - "/home/georg/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-processed-debug.nwb" -) nwbfile = NWBHDF5IO.read_nwb(nwbfile_path) eid, revision = nwbfile.session_id.split(":") # this is the hack that has to be removed eventually @@ -19,7 +20,6 @@ output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) -# %% # Initialize IBL (ONE) client to download processed data for this session one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" one = ONE( @@ -29,5 +29,8 @@ cache_dir=one_cache_folder_path, ) -check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) +if raw: + check_raw_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) +else: + check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) # %% From 1f3cf2533581fc3c9aca07216de63c9f1a5d1967 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Mon, 13 Jan 2025 10:58:21 +0000 Subject: [PATCH 45/50] checking all bands all probes --- .../_scripts/post_conversion_check_nwbfile.py | 27 ++++-- src/ibl_to_nwb/testing/_consistency_checks.py | 82 ++++++++++--------- 2 files changed, 66 insertions(+), 43 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index cb13292..c504327 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -1,8 +1,16 @@ # %% +import os import sys from pathlib import Path -from one.api import ONE +# if running on SDSC, use the OneSdsc, else normal +if "USE_SDSC_ONE" in os.environ: + print("using SDSC ONE") + from deploy.iblsdsc import OneSdsc as ONE +else: + print("using regular ONE") + from one.api import ONE + from pynwb import NWBHDF5IO from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency, check_raw_nwbfile_for_consistency @@ -20,15 +28,24 @@ output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) -# Initialize IBL (ONE) client to download processed data for this session -one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" -one = ONE( +# common +one_kwargs = dict( base_url="https://openalyx.internationalbrainlab.org", password="international", mode="remote", - cache_dir=one_cache_folder_path, ) +# if not running on SDSC adding the cache folder explicitly +if "USE_SDSC_ONE" in os.environ: + one_kwargs["cache_rest"] = None # disables rest caching (write permission errors on popeye) +else: + # Initialize IBL (ONE) client to download processed data for this session + one_cache_folder_path = base_path / "ibl_conversion" / eid / "cache" + one_kwargs["cache_dir"] = one_cache_folder_path + +# instantiate one +one = ONE(**one_kwargs) + if raw: check_raw_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) else: diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 247fd4f..d598efc 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -25,11 +25,10 @@ def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): with NWBHDF5IO(path=nwbfile_path, mode="r") as io: nwbfile = io.read() - eid, revision = nwbfile.session_id.split(":") # run checks for raw files - _check_raw_ephys_data(eid=eid, one=one, nwbfile=nwbfile) - _check_raw_video_data(eid=eid, one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) + _check_raw_ephys_data(one=one, nwbfile=nwbfile) + _check_raw_video_data(one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): @@ -241,54 +240,61 @@ def get_spikes_for_cluster(spike_clusters, spike_times, cluster): def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band: str = "ap"): eid, revision = nwbfile.session_id.split(":") - # data_one + # comparing probe names pids, pnames_one = one.eid2pid(eid) pidname_map = dict(zip(pnames_one, pids)) - pid = pidname_map[pname] - spike_sorting_loader = SpikeSortingLoader(pid=pid, one=one) - sglx_streamer = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) - data_one = sglx_streamer._raw pname_to_imec = { "probe00": "Imec0", "probe01": "Imec1", } + imec_to_pname = dict(zip(pname_to_imec.values(), pname_to_imec.keys())) imecs = [key.split(band.upper())[1] for key in list(nwbfile.acquisition.keys()) if band.upper() in key] pnames_nwb = [imec_to_pname[imec] for imec in imecs] assert set(pnames_one) == set(pnames_nwb) - # nwb ephys data - imec = pname_to_imec[pname] - data_nwb = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].data - - # compare number of samples in both - n_samples_one = data_one.shape[0] - n_samples_nwb = data_nwb.shape[0] - - assert n_samples_nwb == n_samples_one - - # draw a random set of samples and check if they are equal in value - n_samples, n_channels = data_nwb.shape - - ix = np.column_stack( - [ - np.random.randint(n_samples, size=10), - np.random.randint(n_channels, size=10), - ] - ) - - samples_nwb = np.array([data_nwb[*i] for i in ix]) - samples_one = np.array([data_one[*i] for i in ix]) - np.testing.assert_array_equal(samples_nwb, samples_one) - - # check the time stamps - nwb_timestamps = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].timestamps[:] - - # from brainbox.io - brainbox_timestamps = spike_sorting_loader.samples2times(np.arange(0, sglx_streamer.ns), direction="forward") - np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) + # comparing ephys samples + for pname in pnames_nwb: + for band in ["ap", "lf"]: + pid = pidname_map[pname] + spike_sorting_loader = SpikeSortingLoader(pid=pid, one=one) + sglx_streamer = spike_sorting_loader.raw_electrophysiology(band=band, stream=True) + data_one = sglx_streamer._raw + + # nwb ephys data + imec = pname_to_imec[pname] + data_nwb = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].data + + # compare number of samples in both + n_samples_one = data_one.shape[0] + n_samples_nwb = data_nwb.shape[0] + + assert n_samples_nwb == n_samples_one + + # draw a random set of samples and check if they are equal in value + n_samples, n_channels = data_nwb.shape + + ix = np.column_stack( + [ + np.random.randint(n_samples, size=10), + np.random.randint(n_channels, size=10), + ] + ) + + samples_nwb = np.array([data_nwb[*i] for i in ix]) + samples_one = np.array([data_one[*i] for i in ix]) + np.testing.assert_array_equal(samples_nwb, samples_one) + + # check the time stamps + nwb_timestamps = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].timestamps[:] + + # from brainbox.io + brainbox_timestamps = spike_sorting_loader.samples2times( + np.arange(0, sglx_streamer.ns), direction="forward" + ) + np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) def _check_raw_video_data(*, one: ONE, nwbfile: NWBFile, nwbfile_path: str): From 4a8c726c36ff80c34a0415bbd6133b76a0569ed6 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 14 Jan 2025 02:30:43 -0800 Subject: [PATCH 46/50] WIP commit / SDSC updates --- .../_scripts/_convert_brainwide_map.py | 45 ++++++++++++++----- .../_scripts/post_conversion_check_nwbfile.py | 6 ++- src/ibl_to_nwb/helpers.py | 23 ++++++++++ src/ibl_to_nwb/testing/_consistency_checks.py | 32 ++++++------- 4 files changed, 76 insertions(+), 30 deletions(-) create mode 100644 src/ibl_to_nwb/helpers.py diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index b2dfb7c..f7acf6f 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -27,17 +27,38 @@ ) -def create_symlinks(source_dir, target_dir, remove_uuid=True): +# def create_symlinks(source_dir, target_dir, remove_uuid=True): +# """replicates the tree under source_dir at target dir in the form of symlinks""" +# for root, dirs, files in os.walk(source_dir): +# for dir in dirs: +# folder = target_dir / (Path(root) / dir).relative_to(source_dir) +# folder.mkdir(parents=True, exist_ok=True) + +# for root, dirs, files in os.walk(source_dir): +# for file in files: +# source_file_path = Path(root) / file +# target_file_path = target_dir / source_file_path.relative_to(source_dir) +# if remove_uuid: +# parent, name = target_file_path.parent, target_file_path.name +# name_parts = name.split(".") +# name_parts.remove(name_parts[-2]) +# target_file_path = parent / ".".join(name_parts) +# if not target_file_path.exists(): +# target_file_path.symlink_to(source_file_path) + +def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): """replicates the tree under source_dir at target dir in the form of symlinks""" - for root, dirs, files in os.walk(source_dir): - for dir in dirs: - folder = target_dir / (Path(root) / dir).relative_to(source_dir) - folder.mkdir(parents=True, exist_ok=True) for root, dirs, files in os.walk(source_dir): for file in files: source_file_path = Path(root) / file + if filter is not None: + if filter not in str(source_file_path): + continue + target_file_path = target_dir / source_file_path.relative_to(source_dir) + target_file_path.parent.mkdir(parents=True, exist_ok=True) + if remove_uuid: parent, name = target_file_path.parent, target_file_path.name name_parts = name.split(".") @@ -46,7 +67,6 @@ def create_symlinks(source_dir, target_dir, remove_uuid=True): if not target_file_path.exists(): target_file_path.symlink_to(source_file_path) - def get_last_before(eid: str, one: ONE, revision: str): revisions = one.list_revisions(eid, revision="*") revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] @@ -96,7 +116,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) base_path = Path.home() / "ibl_scratch" output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) - local_scratch_folder = base_path / eid + session_scratch_folder = base_path / eid # common one_kwargs = dict( @@ -128,11 +148,11 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) spikeglx_source_folder_path = session_folder / "raw_ephys_data" # create symlinks at local scratch - create_symlinks(spikeglx_source_folder_path, local_scratch_folder) + create_symlinks(spikeglx_source_folder_path, session_scratch_folder) # check and decompress cbin_paths = [] - for root, dirs, files in os.walk(local_scratch_folder): + for root, dirs, files in os.walk(session_scratch_folder): for file in files: if file.endswith(".cbin"): cbin_paths.append(Path(root) / file) @@ -143,7 +163,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) spikeglx.Reader(path).decompress_to_scratch() # Specify the path to the SpikeGLX files on the server but use ONE API for timestamps - spikeglx_subconverter = IblSpikeGlxConverter(folder_path=local_scratch_folder, one=one, eid=eid) + spikeglx_subconverter = IblSpikeGlxConverter(folder_path=session_scratch_folder, one=one, eid=eid) data_interfaces.append(spikeglx_subconverter) # video @@ -206,5 +226,6 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) # cleanup if cleanup: if mode == "raw": - os.system(f"find {local_scratch_folder} -type l -exec unlink {{}} \;") - shutil.rmtree(local_scratch_folder) + os.system(f"find {session_scratch_folder} -type l -exec unlink {{}} \;") + shutil.rmtree(session_scratch_folder) +# find . -type l -exec unlink {} \;") \ No newline at end of file diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index c504327..d6537f8 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -15,18 +15,19 @@ from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency, check_raw_nwbfile_for_consistency -nwbfile_path = sys.argv[1] +# nwbfile_path = sys.argv[1] +nwbfile_path = "/mnt/home/graiser/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-raw_ecephys+image.nwb" if "raw" in nwbfile_path: raw = True nwbfile = NWBHDF5IO.read_nwb(nwbfile_path) - eid, revision = nwbfile.session_id.split(":") # this is the hack that has to be removed eventually # path setup base_path = Path.home() / "ibl_scratch" output_folder = base_path / "nwbfiles" output_folder.mkdir(exist_ok=True, parents=True) +# session_scratch_folder = base_path / eid # common one_kwargs = dict( @@ -51,3 +52,4 @@ else: check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) # %% +print('all checks passed') \ No newline at end of file diff --git a/src/ibl_to_nwb/helpers.py b/src/ibl_to_nwb/helpers.py new file mode 100644 index 0000000..3714079 --- /dev/null +++ b/src/ibl_to_nwb/helpers.py @@ -0,0 +1,23 @@ +import os +from pathlib import Path + +def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): + """replicates the tree under source_dir at target dir in the form of symlinks""" + + for root, dirs, files in os.walk(source_dir): + for file in files: + source_file_path = Path(root) / file + if filter is not None: + if filter not in str(source_file_path): + continue + + target_file_path = target_dir / source_file_path.relative_to(source_dir) + target_file_path.parent.mkdir(parents=True, exist_ok=True) + + if remove_uuid: + parent, name = target_file_path.parent, target_file_path.name + name_parts = name.split(".") + name_parts.remove(name_parts[-2]) + target_file_path = parent / ".".join(name_parts) + if not target_file_path.exists(): + target_file_path.symlink_to(source_file_path) \ No newline at end of file diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index d598efc..03a033b 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -23,12 +23,12 @@ def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): - with NWBHDF5IO(path=nwbfile_path, mode="r") as io: - nwbfile = io.read() - - # run checks for raw files - _check_raw_ephys_data(one=one, nwbfile=nwbfile) - _check_raw_video_data(one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) + # with NWBHDF5IO(path=nwbfile_path, mode="r") as io: + # nwbfile = io.read() + nwbfile = NWBHDF5IO(path=nwbfile_path, mode="r").read() + # run checks for raw files + _check_raw_ephys_data(one=one, nwbfile=nwbfile) + _check_raw_video_data(one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): @@ -276,16 +276,15 @@ def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band # draw a random set of samples and check if they are equal in value n_samples, n_channels = data_nwb.shape - ix = np.column_stack( - [ - np.random.randint(n_samples, size=10), - np.random.randint(n_channels, size=10), - ] - ) - - samples_nwb = np.array([data_nwb[*i] for i in ix]) - samples_one = np.array([data_one[*i] for i in ix]) - np.testing.assert_array_equal(samples_nwb, samples_one) + ix = np.random.randint(n_samples, size=10) + for i in ix: + samples_nwb = data_nwb[i] + samples_one = data_one[int(i)][:-1] # excluding the digital channel + np.testing.assert_array_equal(samples_nwb, samples_one) + + # samples_nwb = np.array([data_nwb[*i] for i in ix]) + # samples_one = np.array([data_one[*i] for i in ix]) + # np.testing.assert_array_equal(samples_nwb, samples_one) # check the time stamps nwb_timestamps = nwbfile.acquisition[f"ElectricalSeries{band.upper()}{imec}"].timestamps[:] @@ -295,6 +294,7 @@ def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band np.arange(0, sglx_streamer.ns), direction="forward" ) np.testing.assert_array_equal(nwb_timestamps, brainbox_timestamps) + print(f"passing {pname}, {band}") def _check_raw_video_data(*, one: ONE, nwbfile: NWBFile, nwbfile_path: str): From 53eb6c29186412a25404933c60b65b34b7359199 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 10:32:22 +0000 Subject: [PATCH 47/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 5 +++-- src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py | 3 +-- src/ibl_to_nwb/helpers.py | 3 ++- src/ibl_to_nwb/testing/_consistency_checks.py | 6 +++--- 4 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index f7acf6f..c8daa27 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -26,7 +26,6 @@ WheelInterface, ) - # def create_symlinks(source_dir, target_dir, remove_uuid=True): # """replicates the tree under source_dir at target dir in the form of symlinks""" # for root, dirs, files in os.walk(source_dir): @@ -46,6 +45,7 @@ # if not target_file_path.exists(): # target_file_path.symlink_to(source_file_path) + def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): """replicates the tree under source_dir at target dir in the form of symlinks""" @@ -67,6 +67,7 @@ def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): if not target_file_path.exists(): target_file_path.symlink_to(source_file_path) + def get_last_before(eid: str, one: ONE, revision: str): revisions = one.list_revisions(eid, revision="*") revisions = [datetime.strptime(revision, "%Y-%m-%d") for revision in revisions] @@ -228,4 +229,4 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) if mode == "raw": os.system(f"find {session_scratch_folder} -type l -exec unlink {{}} \;") shutil.rmtree(session_scratch_folder) -# find . -type l -exec unlink {} \;") \ No newline at end of file +# find . -type l -exec unlink {} \;") diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index d6537f8..ec8bf6f 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -1,6 +1,5 @@ # %% import os -import sys from pathlib import Path # if running on SDSC, use the OneSdsc, else normal @@ -52,4 +51,4 @@ else: check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) # %% -print('all checks passed') \ No newline at end of file +print("all checks passed") diff --git a/src/ibl_to_nwb/helpers.py b/src/ibl_to_nwb/helpers.py index 3714079..a2132ce 100644 --- a/src/ibl_to_nwb/helpers.py +++ b/src/ibl_to_nwb/helpers.py @@ -1,6 +1,7 @@ import os from pathlib import Path + def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): """replicates the tree under source_dir at target dir in the form of symlinks""" @@ -20,4 +21,4 @@ def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): name_parts.remove(name_parts[-2]) target_file_path = parent / ".".join(name_parts) if not target_file_path.exists(): - target_file_path.symlink_to(source_file_path) \ No newline at end of file + target_file_path.symlink_to(source_file_path) diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 03a033b..50143f0 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -276,12 +276,12 @@ def _check_raw_ephys_data(*, one: ONE, nwbfile: NWBFile, pname: str = None, band # draw a random set of samples and check if they are equal in value n_samples, n_channels = data_nwb.shape - ix = np.random.randint(n_samples, size=10) + ix = np.random.randint(n_samples, size=10) for i in ix: samples_nwb = data_nwb[i] - samples_one = data_one[int(i)][:-1] # excluding the digital channel + samples_one = data_one[int(i)][:-1] # excluding the digital channel np.testing.assert_array_equal(samples_nwb, samples_one) - + # samples_nwb = np.array([data_nwb[*i] for i in ix]) # samples_one = np.array([data_one[*i] for i in ix]) # np.testing.assert_array_equal(samples_nwb, samples_one) From cc2a508ac02f4a0d3da37fd70b020f37b399a544 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 14 Jan 2025 02:34:24 -0800 Subject: [PATCH 48/50] wip --- .../_scripts/post_conversion_check_nwbfile.py | 5 +++-- src/ibl_to_nwb/testing/_consistency_checks.py | 12 ++++++------ 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index d6537f8..64e2303 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -20,8 +20,9 @@ if "raw" in nwbfile_path: raw = True -nwbfile = NWBHDF5IO.read_nwb(nwbfile_path) -eid, revision = nwbfile.session_id.split(":") # this is the hack that has to be removed eventually +with NWBHDF5IO(path=nwbfile_path, mode="r") as io: + nwbfile = io.read() + eid, revision = nwbfile.session_id.split(":") # this is the hack that has to be removed eventually # path setup base_path = Path.home() / "ibl_scratch" diff --git a/src/ibl_to_nwb/testing/_consistency_checks.py b/src/ibl_to_nwb/testing/_consistency_checks.py index 03a033b..19c1b79 100644 --- a/src/ibl_to_nwb/testing/_consistency_checks.py +++ b/src/ibl_to_nwb/testing/_consistency_checks.py @@ -23,12 +23,12 @@ def check_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): def check_raw_nwbfile_for_consistency(*, one: ONE, nwbfile_path: Path): - # with NWBHDF5IO(path=nwbfile_path, mode="r") as io: - # nwbfile = io.read() - nwbfile = NWBHDF5IO(path=nwbfile_path, mode="r").read() - # run checks for raw files - _check_raw_ephys_data(one=one, nwbfile=nwbfile) - _check_raw_video_data(one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) + with NWBHDF5IO(path=nwbfile_path, mode="r") as io: + nwbfile = io.read() + + # run checks for raw files + _check_raw_ephys_data(one=one, nwbfile=nwbfile) + _check_raw_video_data(one=one, nwbfile=nwbfile, nwbfile_path=nwbfile_path) def _check_wheel_data(*, one: ONE, nwbfile: NWBFile): From 77be87c8311de5ac4b88863cd4bf471f77d27e99 Mon Sep 17 00:00:00 2001 From: Georg Raiser Date: Tue, 14 Jan 2025 11:08:11 +0000 Subject: [PATCH 49/50] made a mess ... cleanup --- .../_scripts/_convert_brainwide_map.py | 46 ++----------------- .../_scripts/post_conversion_check_nwbfile.py | 12 ++--- 2 files changed, 10 insertions(+), 48 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index c8daa27..1598666 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -6,6 +6,8 @@ import spikeglx +from ibl_to_nwb.helpers import create_symlinks + # if running on SDSC, use the OneSdsc, else normal if "USE_SDSC_ONE" in os.environ: print("using SDSC ONE") @@ -26,47 +28,6 @@ WheelInterface, ) -# def create_symlinks(source_dir, target_dir, remove_uuid=True): -# """replicates the tree under source_dir at target dir in the form of symlinks""" -# for root, dirs, files in os.walk(source_dir): -# for dir in dirs: -# folder = target_dir / (Path(root) / dir).relative_to(source_dir) -# folder.mkdir(parents=True, exist_ok=True) - -# for root, dirs, files in os.walk(source_dir): -# for file in files: -# source_file_path = Path(root) / file -# target_file_path = target_dir / source_file_path.relative_to(source_dir) -# if remove_uuid: -# parent, name = target_file_path.parent, target_file_path.name -# name_parts = name.split(".") -# name_parts.remove(name_parts[-2]) -# target_file_path = parent / ".".join(name_parts) -# if not target_file_path.exists(): -# target_file_path.symlink_to(source_file_path) - - -def create_symlinks(source_dir, target_dir, remove_uuid=True, filter=None): - """replicates the tree under source_dir at target dir in the form of symlinks""" - - for root, dirs, files in os.walk(source_dir): - for file in files: - source_file_path = Path(root) / file - if filter is not None: - if filter not in str(source_file_path): - continue - - target_file_path = target_dir / source_file_path.relative_to(source_dir) - target_file_path.parent.mkdir(parents=True, exist_ok=True) - - if remove_uuid: - parent, name = target_file_path.parent, target_file_path.name - name_parts = name.split(".") - name_parts.remove(name_parts[-2]) - target_file_path = parent / ".".join(name_parts) - if not target_file_path.exists(): - target_file_path.symlink_to(source_file_path) - def get_last_before(eid: str, one: ONE, revision: str): revisions = one.list_revisions(eid, revision="*") @@ -227,6 +188,7 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) # cleanup if cleanup: if mode == "raw": + # find . -type l -exec unlink {} \;") os.system(f"find {session_scratch_folder} -type l -exec unlink {{}} \;") shutil.rmtree(session_scratch_folder) -# find . -type l -exec unlink {} \;") + diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index 1f1e490..02010da 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -15,9 +15,8 @@ from ibl_to_nwb.testing._consistency_checks import check_nwbfile_for_consistency, check_raw_nwbfile_for_consistency # nwbfile_path = sys.argv[1] -nwbfile_path = "/mnt/home/graiser/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-raw_ecephys+image.nwb" -if "raw" in nwbfile_path: - raw = True +nwbfile_path = "/home/georg/ibl_scratch/nwbfiles/sub-NR_0031/sub-NR_0031_ses-caa5dddc-9290-4e27-9f5e-575ba3598614_desc-processed_behavior+ecephys.nwb" +mode = "raw" if "raw" in nwbfile_path else "processed" with NWBHDF5IO(path=nwbfile_path, mode="r") as io: nwbfile = io.read() @@ -47,9 +46,10 @@ # instantiate one one = ONE(**one_kwargs) -if raw: +if mode == 'raw': check_raw_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) -else: +if mode == 'processed': check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) # %% -print("all checks passed") + +print(f"all checks passed, mode={mode}") From 315fa05d7c1e8d7f80e37cd1fddc200a05cc736d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 14 Jan 2025 11:08:21 +0000 Subject: [PATCH 50/50] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/ibl_to_nwb/_scripts/_convert_brainwide_map.py | 1 - src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py | 4 ++-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py index 1598666..2c40582 100644 --- a/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py +++ b/src/ibl_to_nwb/_scripts/_convert_brainwide_map.py @@ -191,4 +191,3 @@ def convert(eid: str, one: ONE, data_interfaces: list, revision: str, mode: str) # find . -type l -exec unlink {} \;") os.system(f"find {session_scratch_folder} -type l -exec unlink {{}} \;") shutil.rmtree(session_scratch_folder) - diff --git a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py index 02010da..a264c7a 100644 --- a/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py +++ b/src/ibl_to_nwb/_scripts/post_conversion_check_nwbfile.py @@ -46,9 +46,9 @@ # instantiate one one = ONE(**one_kwargs) -if mode == 'raw': +if mode == "raw": check_raw_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) -if mode == 'processed': +if mode == "processed": check_nwbfile_for_consistency(one=one, nwbfile_path=nwbfile_path) # %%