Skip to content

Commit

Permalink
Merge pull request #39 from shenshan/master
Browse files Browse the repository at this point in the history
Behavior bug fixes/Break transaction for big ingestions/Histology tables for resolved probe trajectories/Check data completeness of ProbeInsertions/Workshop tutorials
  • Loading branch information
Thinh Nguyen authored Feb 26, 2021
2 parents 579ee26 + b474bcc commit a9bdcd8
Show file tree
Hide file tree
Showing 54 changed files with 8,823 additions and 791 deletions.
1 change: 1 addition & 0 deletions .dockerignore
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
data/*
snapshots/*
4 changes: 2 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@

# some unused files
notebooks/notebooks_tutorial/202102_behavior_paper/one_data/*
rasters/*
notebooks/.*
notebooks/notebooks_plotting/*
scripts/update_entries.py
scripts/compare_tables.py
scripts/delete_shadow_tables.py
scripts/report_missing_trialset.py
scripts/update_entries.py
scripts/update_entries.py

# IDEA configs
.idea/
Expand Down
32 changes: 32 additions & 0 deletions Dockerfile.brain
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
FROM datajoint/djlab:py3.8-debian

RUN pip install --upgrade pip
RUN pip install --upgrade datajoint

COPY --chown=dja:anaconda ./iblenv.yaml /tmp/iblenv.yaml
USER root
RUN . /root/.bashrc
RUN conda init bash
RUN conda install conda-build
RUN conda update -n base -c defaults conda
RUN conda update --all
RUN conda config --set channel_priority false
RUN conda env create --file /tmp/iblenv.yaml
RUN conda activate iblenv
RUN pip install importlib_resource imageio
RUN pip install --no-dependencies git+https://github.com/int-brain-lab/ibllib
RUN pip install --no-dependencies git+https://github.com/int-brain-lab/iblapps
RUN pip install --no-dependencies git+https://github.com/cortex-lab/phylib

USER dja:anaconda
ADD . /src/IBL-pipeline

USER root
RUN pip install -e --no-dependencies /src/IBL-pipeline
RUN conda install -c conda-forge opencv -y
COPY --chown=dja:anaconda ./apt_requirements.txt /tmp/apt_requirements.txt
RUN apt update
USER dja:anaconda
RUN \
/entrypoint.sh echo "Requirements updated..." && \
rm "${APT_REQUIREMENTS}"
22 changes: 22 additions & 0 deletions docker-compose-brain.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,22 @@
version: '3'
services:
datajoint_brain:
build:
context: .
dockerfile: Dockerfile.brain
container_name: ibl_datajoint_brain
env_file: .env
volumes:
- ./notebooks:/home/dja
- ./images:/images
- .:/src/IBL-pipeline
- ./data:/data
- ./raster:/raster
- ./root/.one_params:/home/dja/.one_params
user: 1000:anaconda
ports:
- "9003:8888"
networks:
- ibl_brain
networks:
ibl_brain:
18 changes: 4 additions & 14 deletions ibl_pipeline/analyses/behavior.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,12 +109,7 @@ def make(self, key):
psych_results = {**key, **psych_results}
psych_results['prob_left'] = prob_left[
'trial_stim_prob_left']
if abs(p_left - 0.8) < 0.001:
psych_results['prob_left_block'] = 80
elif abs(p_left - 0.2) < 0.001:
psych_results['prob_left_block'] = 20
elif abs(p_left - 0.5) < 0.001:
psych_results['prob_left_block'] = 50
psych_results['prob_left_block'] = round(p_left*10)*10

self.insert1(psych_results)

Expand Down Expand Up @@ -183,12 +178,7 @@ def make(self, key):
rt['reaction_time_ci_high'] = utils.compute_reaction_time(
trials_sub, compute_ci=True)

if abs(p_left - 0.8) < 0.001:
rt['prob_left_block'] = 80
elif abs(p_left - 0.2) < 0.001:
rt['prob_left_block'] = 20
elif abs(p_left - 0.5) < 0.001:
rt['prob_left_block'] = 50
rt['prob_left_block'] = round(p_left*10)*10

self.insert1(rt)

Expand Down Expand Up @@ -498,10 +488,9 @@ def make(self, key):
bpod_board = (behavior.Settings & sessions_rel).fetch('pybpod_board')
ephys_board = [True for i in list(bpod_board) if 'ephys' in i]

task_protocols = (acquisition.Session & sessions_rel).fetch('task_protocol')
delays = (behavior.SessionDelay & sessions_rel).fetch('session_delay_in_mins')

if len(ephys_board) == 3 and np.any(delays > 15):
if len(ephys_board) == 3 and np.any(delays >= 15):

n_trials = (behavior.TrialSet & sessions_rel).fetch('n_trials')
performance_easy = (PsychResults & sessions_rel).fetch(
Expand Down Expand Up @@ -533,6 +522,7 @@ def make(self, key):
return

# also compute the median reaction time
# to put into
medRT = compute_reaction_time(trials)

# psych_unbiased = utils.compute_psych_pars(trials_unbiased)
Expand Down
32 changes: 14 additions & 18 deletions ibl_pipeline/behavior.py
Original file line number Diff line number Diff line change
Expand Up @@ -649,7 +649,7 @@ class SessionDelayAvailability(dj.Imported):
definition = """
-> acquisition.Session
---
error_type: enum("elapsed time not available", "raw task data not available")
error_type: enum("elapsed time not available", "raw task data not available", "delay not available")
"""


Expand All @@ -669,25 +669,21 @@ class SessionDelay(dj.Imported):
date.strftime('%Y-%m-%d')))

def make(self, key):

eID = (acquisition.Session & key).fetch1('session_uuid')
data = one.load(str(eID), dataset_types=['_iblrig_taskData.raw'])
trial_start, trial_end = (TrialSet.Trial & key & 'trial_id=1').fetch1(
'trial_start_time', 'trial_end_time')
first_trial_duration = trial_end - trial_start

if data[0]:
if 'elapsed_time' in data[0][0].keys():
elapsed_time = data[0][0]['elapsed_time'].split(':')
key['session_delay_in_secs'] = float(elapsed_time[1])*60 + \
float(elapsed_time[2]) - first_trial_duration
key['session_delay_in_mins'] = key['session_delay_in_secs']/60
self.insert1(key)
else:
key['error_type'] = 'raw task data not available'
SessionDelayAvailability.insert1(key, allow_direct_insert=True)
json = one.alyx.get(one.get_details(str(eID))['url'])['json']

if 'SESSION_START_DELAY_SEC' in json.keys():
self.insert1(dict(
**key,
session_delay_in_secs=json['SESSION_START_DELAY_SEC'],
session_delay_in_mins=json['SESSION_START_DELAY_SEC'] / 60
))
else:
key['error_type'] = 'elapsed time not available'
SessionDelayAvailability.insert1(key, allow_direct_insert=True)
key['error_type'] = 'delay not available'
SessionDelayAvailability.insert1(
key, allow_direct_insert=True,
skip_duplicates=True)


@schema
Expand Down
14 changes: 8 additions & 6 deletions ibl_pipeline/ephys.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import numpy as np
from os import path, environ
from . import acquisition, reference, behavior, data
from .ingest import ephys as ephys_ingest
from tqdm import tqdm
import numpy as np
import pandas as pd
Expand Down Expand Up @@ -55,7 +54,7 @@ class CompleteClusterSession(dj.Computed):
'clusters.amps.npy',
'clusters.channels.npy',
'clusters.depths.npy',
'clusters.metrics.csv',
'clusters.metrics.pqt',
'clusters.peakToTrough.npy',
'clusters.uuids.csv',
'clusters.waveforms.npy',
Expand Down Expand Up @@ -224,10 +223,13 @@ def make(self, key):

probe_name = (ProbeInsertion & key).fetch1('probe_label')

clusters = alf.io.load_object(
ses_path.joinpath('alf', probe_name), 'clusters')
spikes = alf.io.load_object(
ses_path.joinpath('alf', probe_name), 'spikes')
try:
clusters = alf.io.load_object(
ses_path.joinpath('alf', probe_name), 'clusters')
spikes = alf.io.load_object(
ses_path.joinpath('alf', probe_name), 'spikes')
except:
return

time_fnames = [k for k in spikes.keys() if 'times' in k]

Expand Down
Loading

0 comments on commit a9bdcd8

Please sign in to comment.