@@ -808,6 +808,7 @@ class SpikeSortingLoader:
808
808
spike_sorter : str = 'pykilosort'
809
809
spike_sorting_path : Path = None
810
810
_sync : dict = None
811
+ revision : str = None
811
812
812
813
def __post_init__ (self ):
813
814
# pid gets precedence
@@ -886,7 +887,7 @@ def _get_spike_sorting_collection(self, spike_sorter=None):
886
887
_logger .debug (f"selecting: { collection } to load amongst candidates: { self .collections } " )
887
888
return collection
888
889
889
- def load_spike_sorting_object (self , obj , * args , ** kwargs ):
890
+ def load_spike_sorting_object (self , obj , * args , revision = None , ** kwargs ):
890
891
"""
891
892
Loads an ALF object
892
893
:param obj: object name, str between 'spikes', 'clusters' or 'channels'
@@ -895,8 +896,10 @@ def load_spike_sorting_object(self, obj, *args, **kwargs):
895
896
:param collection: string specifiying the collection, for example 'alf/probe01/pykilosort'
896
897
:param kwargs: additional arguments to be passed to one.api.One.load_object
897
898
:param missing: 'raise' (default) or 'ignore'
899
+ :param revision: the dataset revision to load
898
900
:return:
899
901
"""
902
+ revision = revision if revision is not None else self .revision
900
903
self .download_spike_sorting_object (obj , * args , ** kwargs )
901
904
return self ._load_object (self .files [obj ])
902
905
@@ -907,7 +910,7 @@ def get_version(self, spike_sorter=None):
907
910
return dset [0 ]['version' ] if len (dset ) else 'unknown'
908
911
909
912
def download_spike_sorting_object (self , obj , spike_sorter = None , dataset_types = None , collection = None ,
910
- attribute = None , missing = 'raise' , ** kwargs ):
913
+ attribute = None , missing = 'raise' , revision = None , ** kwargs ):
911
914
"""
912
915
Downloads an ALF object
913
916
:param obj: object name, str between 'spikes', 'clusters' or 'channels'
@@ -917,8 +920,10 @@ def download_spike_sorting_object(self, obj, spike_sorter=None, dataset_types=No
917
920
:param kwargs: additional arguments to be passed to one.api.One.load_object
918
921
:param attribute: list of attributes to load for the object
919
922
:param missing: 'raise' (default) or 'ignore'
923
+ :param revision: the dataset revision to load
920
924
:return:
921
925
"""
926
+ revision = revision if revision is not None else self .revision
922
927
if spike_sorter is None :
923
928
spike_sorter = self .spike_sorter if self .spike_sorter is not None else 'iblsorter'
924
929
if len (self .collections ) == 0 :
@@ -1170,12 +1175,13 @@ def url(self):
1170
1175
webclient = getattr (self .one , '_web_client' , None )
1171
1176
return webclient .rel_path2url (get_alf_path (self .session_path )) if webclient else None
1172
1177
1173
- def _get_probe_info (self ):
1178
+ def _get_probe_info (self , revision = None ):
1179
+ revision = revision if revision is not None else self .revision
1174
1180
if self ._sync is None :
1175
1181
timestamps = self .one .load_dataset (
1176
- self .eid , dataset = '_spikeglx_*.timestamps.npy' , collection = f'raw_ephys_data/{ self .pname } ' )
1182
+ self .eid , dataset = '_spikeglx_*.timestamps.npy' , collection = f'raw_ephys_data/{ self .pname } ' , revision = revision )
1177
1183
_ = self .one .load_dataset ( # this is not used here but we want to trigger the download for potential tasks
1178
- self .eid , dataset = '_spikeglx_*.sync.npy' , collection = f'raw_ephys_data/{ self .pname } ' )
1184
+ self .eid , dataset = '_spikeglx_*.sync.npy' , collection = f'raw_ephys_data/{ self .pname } ' , revision = revision )
1179
1185
try :
1180
1186
ap_meta = spikeglx .read_meta_data (self .one .load_dataset (
1181
1187
self .eid , dataset = '_spikeglx_*.ap.meta' , collection = f'raw_ephys_data/{ self .pname } ' ))
0 commit comments