Skip to content

Photometry integration #938

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 50 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
06b0478
updated requirements, updated pipelines
grg2rsr Jan 13, 2025
bff5469
fibers are now named fiber_{brain_region} in the extraction process
grg2rsr Jan 17, 2025
57f7164
fixing shifted sync timestamps in the extraction
grg2rsr Feb 24, 2025
b606290
reading digital inputs file via iblphotometry.io (validated)
grg2rsr Feb 24, 2025
16ffbc4
Merge remote-tracking branch 'origin/develop' into photometry-integra…
grg2rsr Mar 4, 2025
8ae4039
fix for reextraction (.pqt file read instead of .csv) for digital_inp…
grg2rsr Mar 4, 2025
3fd7623
bugfix in the extractor after fix in the experiment description file
grg2rsr Mar 6, 2025
54fb016
Merge remote-tracking branch 'origin/develop' into photometry-integra…
grg2rsr Apr 24, 2025
07efbd0
WIP changes for daq sync
grg2rsr Apr 30, 2025
217bd5f
leftovers
grg2rsr May 12, 2025
c050dd9
Merge branch 'develop' into photometry-integration
grg2rsr May 13, 2025
a933cb7
4 rig updates - ready for testing
grg2rsr May 13, 2025
f6a804d
ready for testing
grg2rsr May 14, 2025
60cd99c
4 rig - ready for testing round 2
grg2rsr May 15, 2025
fce9ec2
label the DLC task as running in the dlc env
oliche Mar 19, 2025
1ae68de
the default scratch drive is /scratch
oliche Mar 19, 2025
e5d2dfa
DLC has the option of running within the current env if available
oliche Mar 21, 2025
fd99fff
check dlc env reserved for subprocess call
oliche Mar 21, 2025
fbc69c1
ephys compression is a large task
oliche Mar 22, 2025
7b5127b
the session loader trial loader has a revision parameter
oliche Apr 2, 2025
87bb0ed
DLC: the motion energy can run in the current environment
oliche Apr 4, 2025
26a6ce2
fix deprecation warning for DLC
oliche Apr 29, 2025
938bc33
fix call to motion energy method by removing instance
oliche Apr 30, 2025
c0e6b87
update requirements.txt
oliche May 20, 2025
3c4d1f4
Check for iblsorter availability by other means of a try ... importError
oliche May 21, 2025
a5bd4f6
make sure ibl-neuropixel higher than 1.7.1
oliche May 22, 2025
75c3e40
changing the extractor for compatibility with DI only and Frameclock …
grg2rsr May 30, 2025
c1ebda1
added functionality to deal with premature termination of daqami
grg2rsr Jun 2, 2025
15fe6af
tiny bugfix for syncing sessions were daqami was stopped before bonsai
grg2rsr Jun 4, 2025
ed4ea5a
ruff please, (tests are stubs)
grg2rsr Jun 4, 2025
8e69190
flake8
grg2rsr Jun 4, 2025
31eb3bb
Merge remote-tracking branch 'origin/prefect' into photometry-integra…
grg2rsr Jun 4, 2025
f59a954
updated requirements again after prefect merge
grg2rsr Jun 4, 2025
e6e7ae2
another crucial bugfix for daqami frame number check during extractor
grg2rsr Jun 6, 2025
f3a58e8
moving the frameclock back on AI7, included downward compatibility in…
grg2rsr Jun 6, 2025
85eb00a
related to previous commit, better checking for analog/digital sync c…
grg2rsr Jun 6, 2025
720787d
bugfix
grg2rsr Jun 6, 2025
516ddbd
bugfix for wrong frameclock channel
grg2rsr Jun 11, 2025
52c0c6f
extractor fix
grg2rsr Jun 11, 2025
d46d846
very hardcoded fix for frameclock channel that turns obsolete as soon…
grg2rsr Jun 11, 2025
808754b
int / str bugfix
grg2rsr Jun 11, 2025
7f8d652
bugfix for channel int/str
oliche Jun 12, 2025
771048e
fake commit just to check user name
grg2rsr Jun 12, 2025
1a747c3
bugfix for downward compatible frameclock_channel
grg2rsr Jun 13, 2025
d32f54b
more verbose error msg for sync fail
grg2rsr Jun 13, 2025
fd00180
path bugfix for extractor
grg2rsr Jun 16, 2025
4ecb482
splitting sessions by spacers and attepmting to sync each
grg2rsr Jun 17, 2025
18a02e2
spacer detection for hot swapping
grg2rsr Jun 18, 2025
4fbdfc9
bugfix for failing job creation due to wrong kwarg handling
grg2rsr Jun 23, 2025
fd3b633
bugfix in job creator when root_path is session_folder
grg2rsr Jun 23, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 20 additions & 11 deletions brainbox/io/one.py
Original file line number Diff line number Diff line change
Expand Up @@ -1204,14 +1204,16 @@ def timesprobe2times(self, values, direction='forward'):
elif direction == 'reverse':
return self._sync['reverse'](values) / self._sync['fs']

def samples2times(self, values, direction='forward'):
def samples2times(self, values, direction='forward', band='ap'):
"""
Converts ephys sample values to session main clock seconds
:param values: numpy array of times in seconds or samples to resync
:param direction: 'forward' (samples probe time to seconds main time) or 'reverse'
(seconds main time to samples probe time)
:return:
"""
if band == 'lf':
values *= 12
self._get_probe_info()
return self._sync[direction](values)

Expand All @@ -1237,8 +1239,8 @@ def raster(self, spikes, channels, save_dir=None, br=None, label='raster', time_
:param **kwargs: kwargs passed to `driftmap()` (optional)
:return:
"""
br = br or BrainRegions()
time_series = time_series or {}
br = BrainRegions() if br is None else br
time_series = {} if time_series is None else time_series
fig, axs = plt.subplots(2, 2, gridspec_kw={
'width_ratios': [.95, .05], 'height_ratios': [.1, .9]}, figsize=(16, 9), sharex='col')
axs[0, 1].set_axis_off()
Expand Down Expand Up @@ -1281,13 +1283,20 @@ def plot_rawdata_snippet(self, sr, spikes, clusters, t0,
save_dir=None,
label='raster',
gain=-93,
title=None):
title=None,
alpha=0.3,
processing='destripe'):

# compute the raw data offset and destripe, we take 400ms around t0
first_sample, last_sample = (int((t0 - 0.2) * sr.fs), int((t0 + 0.2) * sr.fs))
raw = sr[first_sample:last_sample, :-sr.nsync].T
channel_labels = channels['labels'] if (channels is not None) and ('labels' in channels) else True
destriped = ibldsp.voltage.destripe(raw, sr.fs, channel_labels=channel_labels)
if processing == 'destripe':
samples = ibldsp.voltage.destripe(raw, sr.fs, channel_labels=channel_labels)
else:
import scipy.signal
sos = scipy.signal.butter(**{"N": 3, "Wn": 300 / sr.fs * 2, "btype": "highpass"}, output="sos")
samples = scipy.signal.sosfiltfilt(sos, raw)
# filter out the spikes according to good/bad clusters and to the time slice
spike_sel = slice(*np.searchsorted(spikes['samples'], [first_sample, last_sample]))
ss = spikes['samples'][spike_sel]
Expand All @@ -1297,9 +1306,9 @@ def plot_rawdata_snippet(self, sr, spikes, clusters, t0,
title = self._default_plot_title(spikes)
# display the raw data snippet with spikes overlaid
fig, axs = plt.subplots(1, 2, gridspec_kw={'width_ratios': [.95, .05]}, figsize=(16, 9), sharex='col')
Density(destriped, fs=sr.fs, taxis=1, gain=gain, ax=axs[0], t0=t0 - 0.2, unit='s')
axs[0].scatter(ss[sok] / sr.fs, sc[sok], color="green", alpha=0.5)
axs[0].scatter(ss[~sok] / sr.fs, sc[~sok], color="red", alpha=0.5)
Density(samples, fs=sr.fs, taxis=1, gain=gain, ax=axs[0], t0=t0 - 0.2, unit='s')
axs[0].scatter(ss[sok] / sr.fs, sc[sok], color="green", alpha=alpha)
axs[0].scatter(ss[~sok] / sr.fs, sc[~sok], color="red", alpha=alpha)
axs[0].set(title=title, xlim=[t0 - 0.035, t0 + 0.035])
# adds the channel locations if available
if (channels is not None) and ('atlas_id' in channels):
Expand Down Expand Up @@ -1501,7 +1510,7 @@ def _find_behaviour_collection(self, obj):
f'e.g sl.load_{obj}(collection="{collections[0]}")')
raise ALFMultipleCollectionsFound

def load_trials(self, collection=None):
def load_trials(self, collection=None, revision=None):
"""
Function to load trials data into SessionLoader.trials

Expand All @@ -1510,13 +1519,13 @@ def load_trials(self, collection=None):
collection: str
Alf collection of trials data
"""

revision = self.revision if revision is None else revision
if not collection:
collection = self._find_behaviour_collection('trials')
# itiDuration frequently has a mismatched dimension, and we don't need it, exclude using regex
self.one.wildcards = False
self.trials = self.one.load_object(
self.eid, 'trials', collection=collection, attribute=r'(?!itiDuration).*', revision=self.revision or None).to_df()
self.eid, 'trials', collection=collection, attribute=r'(?!itiDuration).*', revision=revision or None).to_df()
self.one.wildcards = True
self.data_info.loc[self.data_info['name'] == 'trials', 'is_loaded'] = True

Expand Down
Loading